diff --git a/.docker/Dockerfile-acala.j2 b/.docker/Dockerfile-acala.j2 new file mode 100644 index 0000000000..dd980e8f2a --- /dev/null +++ b/.docker/Dockerfile-acala.j2 @@ -0,0 +1,41 @@ +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install {{ RUST_TOOLCHAIN }} && \ + rustup default {{ RUST_TOOLCHAIN }} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain {{ RUST_TOOLCHAIN }} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD ACALA ===== +FROM rust-builder as builder-acala-bin + +WORKDIR /unique_parachain + +RUN git clone -b {{ ACALA_BUILD_BRANCH }} --depth 1 https://github.com/AcalaNetwork/Acala.git && \ + cd Acala && \ + make init && \ + make build-release + +# ===== BIN ====== + +FROM ubuntu:20.04 as builder-acala + +COPY --from=builder-acala-bin /unique_parachain/Acala/target/production/acala /unique_parachain/Acala/target/production/acala diff --git a/.docker/Dockerfile-chain-dev b/.docker/Dockerfile-chain-dev new file mode 100644 index 0000000000..b475af82a2 --- /dev/null +++ b/.docker/Dockerfile-chain-dev @@ -0,0 +1,31 @@ +FROM ubuntu:20.04 + +ARG RUST_TOOLCHAIN= +ARG FEATURE= + +ENV DEBIAN_FRONTEND=noninteractive +ENV TZ=Etc/UTC +ENV FEATURE=$FEATURE +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" + +RUN echo "$FEATURE\n" && echo "$RUST_TOOLCHAIN\n" + +RUN apt-get update && apt-get install -y git curl libssl-dev llvm pkg-config libclang-dev clang git make cmake protobuf-compiler + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install $RUST_TOOLCHAIN && \ + rustup default $RUST_TOOLCHAIN && \ + rustup target add wasm32-unknown-unknown --toolchain $RUST_TOOLCHAIN + +RUN mkdir /dev_chain +COPY . /dev_chain + +WORKDIR /dev_chain + +RUN cargo build --release +RUN echo "$FEATURE" + +CMD cargo run --release --features=$FEATURE -- --dev -linfo --unsafe-ws-external --rpc-cors=all --unsafe-rpc-external diff --git a/.docker/Dockerfile-chain-dev-unit b/.docker/Dockerfile-chain-dev-unit new file mode 100644 index 0000000000..033e5407e0 --- /dev/null +++ b/.docker/Dockerfile-chain-dev-unit @@ -0,0 +1,26 @@ +FROM ubuntu:20.04 + +ENV DEBIAN_FRONTEND=noninteractive +ENV TZ=Etc/UTC + +RUN apt-get update && apt-get install -y git curl libssl-dev llvm pkg-config libclang-dev clang git make cmake protobuf-compiler + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +ARG RUST_TOOLCHAIN= +ARG FEATURE= + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install $RUST_TOOLCHAIN && \ + rustup default $RUST_TOOLCHAIN && \ + rustup target add wasm32-unknown-unknown --toolchain $RUST_TOOLCHAIN + +RUN mkdir /dev_chain +COPY . /dev_chain + +WORKDIR /dev_chain + +CMD cargo test --features=limit-testing --workspace diff --git a/.docker/Dockerfile-cumulus.j2 b/.docker/Dockerfile-cumulus.j2 new file mode 100644 index 0000000000..8caa44d0bf --- /dev/null +++ b/.docker/Dockerfile-cumulus.j2 @@ -0,0 +1,40 @@ +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install {{ RUST_TOOLCHAIN }} && \ + rustup default {{ RUST_TOOLCHAIN }} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain {{ RUST_TOOLCHAIN }} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD CUMULUS ===== +FROM rust-builder as builder-cumulus-bin + +WORKDIR /unique_parachain + +RUN git clone -b {{ CUMULUS_BUILD_BRANCH }} --depth 1 https://github.com/paritytech/cumulus.git && \ + cd cumulus && \ + cargo build --release + +# ===== BIN ====== + +FROM ubuntu:20.04 as builder-cumulus + +COPY --from=builder-cumulus-bin /unique_parachain/cumulus/target/release/polkadot-parachain /unique_parachain/cumulus/target/release/polkadot-parachain diff --git a/.docker/Dockerfile-moonbeam.j2 b/.docker/Dockerfile-moonbeam.j2 new file mode 100644 index 0000000000..ee422d1ab7 --- /dev/null +++ b/.docker/Dockerfile-moonbeam.j2 @@ -0,0 +1,41 @@ +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install {{ RUST_TOOLCHAIN }} && \ + rustup default {{ RUST_TOOLCHAIN }} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain {{ RUST_TOOLCHAIN }} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + + +# ===== BUILD MOONBEAM ===== +FROM rust-builder as builder-moonbeam-bin + +WORKDIR /unique_parachain + +RUN git clone -b {{ MOONBEAM_BUILD_BRANCH }} --depth 1 https://github.com/PureStake/moonbeam.git && \ + cd moonbeam && \ + cargo build --release + +# ===== BIN ====== + +FROM ubuntu:20.04 as builder-moonbeam + +COPY --from=builder-moonbeam-bin /unique_parachain/moonbeam/target/release/moonbeam /unique_parachain/moonbeam/target/release/moonbeam diff --git a/.docker/Dockerfile-parachain b/.docker/Dockerfile-parachain index fb84f75a48..9175e71466 100644 --- a/.docker/Dockerfile-parachain +++ b/.docker/Dockerfile-parachain @@ -76,3 +76,4 @@ CMD export NVM_DIR="$HOME/.nvm" && \ [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ cd /polkadot-launch && \ yarn start launch-config.json + diff --git a/.docker/Dockerfile-parachain-node-only b/.docker/Dockerfile-parachain-node-only new file mode 100644 index 0000000000..a5610a669f --- /dev/null +++ b/.docker/Dockerfile-parachain-node-only @@ -0,0 +1,98 @@ +ARG POLKADOT_BUILD_BRANCH +FROM uniquenetwork/builder-polkadot:${POLKADOT_BUILD_BRANCH} as polkadot + +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ARG RUST_TOOLCHAIN= + +ENV RUST_TOOLCHAIN $RUST_TOOLCHAIN +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install $RUST_TOOLCHAIN && \ + rustup default $RUST_TOOLCHAIN && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain $RUST_TOOLCHAIN + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + + +# ===== BUILD current version ====== +FROM rust-builder as builder-unique-current + +ARG PROFILE=release +ARG FEATURE= +ARG MAINNET_BRANCH= +ARG REPO_URL= + +WORKDIR /unique_parachain + +RUN git clone $REPO_URL -b $MAINNET_BRANCH . && \ + cargo build --features=$FEATURE --$PROFILE + +# ===== BUILD target version ====== +FROM rust-builder as builder-unique-target + +ARG PROFILE=release +ARG FEATURE= + +COPY . /unique_parachain +WORKDIR /unique_parachain + +RUN cargo build --features=$FEATURE --$PROFILE + +# ===== RUN ====== + +FROM ubuntu:20.04 + +ARG RUNTIME= +ENV RUNTIME $RUNTIME +ARG POLKADOT_BUILD_BRANCH= +ENV POLKADOT_BUILD_BRANCH $POLKADOT_BUILD_BRANCH + +RUN apt-get -y update && \ + apt-get -y install curl git && \ + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ + export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + nvm install v16.16.0 && \ + nvm use v16.16.0 + +RUN git clone https://github.com/uniquenetwork/polkadot-launch -b unique-network + +RUN export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + npm install --global yarn && \ + yarn install + +RUN echo "$RUNTIME" +RUN echo "$POLKADOT_BUILD_BRANCH" + +COPY --from=builder-unique-current /unique_parachain/target/release/unique-collator /unique-chain/current/release/ +COPY --from=builder-unique-target /unique_parachain/target/release/unique-collator /unique-chain/target/release/ +COPY --from=builder-unique-target /unique_parachain/target/release/wbuild/"$RUNTIME"-runtime/"$RUNTIME"_runtime.compact.compressed.wasm /unique-chain/target/release/wbuild/"$RUNTIME"-runtime/"$RUNTIME"_runtime.compact.compressed.wasm + +COPY --from=polkadot /unique_parachain/polkadot/target/release/polkadot /polkadot/target/release/ +COPY --from=polkadot /unique_parachain/polkadot/target/release/wbuild/westend-runtime/westend_runtime.compact.compressed.wasm /polkadot/target/release/wbuild/westend-runtime/westend_runtime.compact.compressed.wasm + +CMD export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + yarn start launch-config.json --test-upgrade-parachains -w -n + + diff --git a/.docker/Dockerfile-parachain-upgrade b/.docker/Dockerfile-parachain-upgrade index 4c60f106ab..96b46360d6 100644 --- a/.docker/Dockerfile-parachain-upgrade +++ b/.docker/Dockerfile-parachain-upgrade @@ -94,4 +94,4 @@ COPY --from=polkadot /unique_parachain/polkadot/target/release/wbuild/westend-ru CMD export NVM_DIR="$HOME/.nvm" && \ [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ cd /polkadot-launch && \ - yarn start launch-config.json --test-upgrade-parachains \ No newline at end of file + yarn start launch-config.json --test-upgrade-parachains diff --git a/.docker/Dockerfile-parachain-upgrade-data b/.docker/Dockerfile-parachain-upgrade-data index 010ec5738a..85e0851218 100644 --- a/.docker/Dockerfile-parachain-upgrade-data +++ b/.docker/Dockerfile-parachain-upgrade-data @@ -99,4 +99,4 @@ COPY --from=polkadot /unique_parachain/polkadot/target/release/wbuild/westend-ru CMD export NVM_DIR="$HOME/.nvm" PATH="$PATH:/chainql/target/release" REPLICA_FROM && \ [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ cd /polkadot-launch && \ - yarn start launch-config.json --test-upgrade-parachains \ No newline at end of file + yarn start launch-config.json --test-upgrade-parachains diff --git a/.docker/Dockerfile-polkadot.j2 b/.docker/Dockerfile-polkadot.j2 new file mode 100644 index 0000000000..93306dd4d3 --- /dev/null +++ b/.docker/Dockerfile-polkadot.j2 @@ -0,0 +1,40 @@ +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install {{ RUST_TOOLCHAIN }} && \ + rustup default {{ RUST_TOOLCHAIN }} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain {{ RUST_TOOLCHAIN }} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD POLKADOT ===== +FROM rust-builder as builder-polkadot-bin + +WORKDIR /unique_parachain + +RUN git clone -b {{ POLKADOT_BUILD_BRANCH }} --depth 1 https://github.com/paritytech/polkadot.git && \ + cd polkadot && \ + cargo build --release + +# ===== BIN ====== + +FROM ubuntu:20.04 as builder-polkadot + +COPY --from=builder-polkadot-bin /unique_parachain/polkadot/target/release/polkadot /unique_parachain/polkadot/target/release/polkadot diff --git a/.docker/Dockerfile-testnet.j2 b/.docker/Dockerfile-testnet.j2 index 733ecaa06d..fdc52ff8d8 100644 --- a/.docker/Dockerfile-testnet.j2 +++ b/.docker/Dockerfile-testnet.j2 @@ -70,4 +70,4 @@ EXPOSE 30333 CMD export NVM_DIR="$HOME/.nvm" && \ [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ cd /polkadot-launch && \ - yarn start launch-config.json \ No newline at end of file + yarn start launch-config.json diff --git a/.docker/Dockerfile-try-runtime b/.docker/Dockerfile-try-runtime index fc0437e5ce..65442d8dbe 100644 --- a/.docker/Dockerfile-try-runtime +++ b/.docker/Dockerfile-try-runtime @@ -41,11 +41,9 @@ ENV REPLICA_FROM $REPLICA_FROM COPY . /unique_parachain WORKDIR /unique_parachain -RUN echo "[BE CAREFUL] applying disable-ic patch\n" && \ - git apply .docker/patch/disable-ic.patch RUN echo "Requested features: $FEATURE\n" && \ echo "Fork from: $REPLICA_FROM\n" && \ - cargo build --features=try-runtime,$FEATURE --release + cargo build --features=$FEATURE --release -CMD cargo run --features=try-runtime,$FEATURE --release -- try-runtime on-runtime-upgrade live --uri $REPLICA_FROM +CMD cargo run --features=try-runtime,$FEATURE --release -- try-runtime -ltry-runtime::cli=debug --no-spec-check-panic on-runtime-upgrade live --uri $REPLICA_FROM diff --git a/.docker/Dockerfile-xcm.j2 b/.docker/Dockerfile-xcm.j2 new file mode 100644 index 0000000000..b4f01c323e --- /dev/null +++ b/.docker/Dockerfile-xcm.j2 @@ -0,0 +1,84 @@ +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install {{ RUST_TOOLCHAIN }} && \ + rustup default {{ RUST_TOOLCHAIN }} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain {{ RUST_TOOLCHAIN }} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD ====== +FROM rust-builder as builder-unique + +ARG PROFILE=release + +WORKDIR /unique_parachain + +COPY ./xcm-config/launch-config-xcm-{{ NETWORK }}.json ./launch-config-xcm-{{ NETWORK }}.json +COPY ./xcm-config/5validators.jsonnet ./5validators.jsonnet +COPY ./xcm-config/minBondFix.jsonnet ./minBondFix.jsonnet + +RUN git clone -b {{ BRANCH }} https://github.com/UniqueNetwork/unique-chain.git && \ + cd unique-chain && \ + cargo build --features={{ FEATURE }} --$PROFILE + +# ===== RUN ====== + +FROM ubuntu:20.04 + +RUN apt-get -y update && \ + apt-get -y install curl git && \ + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ + export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + nvm install v16.16.0 && \ + nvm use v16.16.0 + +RUN git clone https://github.com/uniquenetwork/polkadot-launch -b {{ POLKADOT_LAUNCH_BRANCH }} + +RUN export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + npm install --global yarn && \ + yarn install + +COPY --from=builder-unique /unique_parachain/launch-config-xcm-{{ NETWORK }}.json /polkadot-launch/ +COPY --from=builder-unique /unique_parachain/5validators.jsonnet /polkadot-launch/5validators.jsonnet +COPY --from=builder-unique /unique_parachain/minBondFix.jsonnet /polkadot-launch/minBondFix.jsonnet + +COPY --from=builder-unique /unique_parachain/unique-chain/target/release/unique-collator /unique-chain/target/release/ + +COPY --from=uniquenetwork/builder-polkadot:{{ POLKADOT_BUILD_BRANCH }} /unique_parachain/polkadot/target/release/polkadot /polkadot/target/release/ +COPY --from=uniquenetwork/builder-moonbeam:{{ MOONBEAM_BUILD_BRANCH }} /unique_parachain/moonbeam/target/release/moonbeam /moonbeam/target/release/ +COPY --from=uniquenetwork/builder-cumulus:{{ CUMULUS_BUILD_BRANCH }} /unique_parachain/cumulus/target/release/polkadot-parachain /cumulus/target/release/cumulus +COPY --from=uniquenetwork/builder-acala:{{ ACALA_BUILD_BRANCH }} /unique_parachain/Acala/target/production/acala /acala/target/release/ +COPY --from=uniquenetwork/builder-chainql:latest /chainql/target/release/chainql /chainql/target/release/ + +EXPOSE 9844 +EXPOSE 9933 +EXPOSE 9944 +EXPOSE 9946 +EXPOSE 9947 +EXPOSE 9948 + +CMD export NVM_DIR="$HOME/.nvm" PATH="$PATH:/chainql/target/release" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + yarn start launch-config-xcm-{{ NETWORK }}.json diff --git a/.docker/additional/xcm-rococo/.env b/.docker/additional/xcm-rococo/.env new file mode 100644 index 0000000000..2458f34215 --- /dev/null +++ b/.docker/additional/xcm-rococo/.env @@ -0,0 +1,16 @@ +RUST_TOOLCHAIN=nightly-2022-07-24 +UNIQUE_BRANCH="develop" + +POLKADOT_BUILD_BRANCH=release-v0.9.30 + +KARURA_BUILD_BRANCH=2.9.1 +ACALA_BUILD_BRANCH=2.9.2 + +MOONRIVER_BUILD_BRANCH=runtime-1701 +MOONBEAM_BUILD_BRANCH=runtime-1701 + +STATEMINE_BUILD_BRANCH=parachains-v9270 +STATEMINT_BUILD_BRANCH=release-parachains-v9230 +WESTMINT_BUILD_BRANCH=parachains-v9270 + +POLKADOT_LAUNCH_BRANCH="unique-network" diff --git a/.docker/additional/xcm-rococo/Dockerfile-xcm-opal-rococo b/.docker/additional/xcm-rococo/Dockerfile-xcm-opal-rococo new file mode 100644 index 0000000000..e7f4f14f1f --- /dev/null +++ b/.docker/additional/xcm-rococo/Dockerfile-xcm-opal-rococo @@ -0,0 +1,87 @@ +ARG CHAIN=opal +ARG LAUNCH_CONFIG_FILE=launch-config-xcm-opal-rococo.json + +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ARG RUST_TOOLCHAIN +ENV RUST_TOOLCHAIN $RUST_TOOLCHAIN +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install ${RUST_TOOLCHAIN} && \ + rustup default ${RUST_TOOLCHAIN} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain ${RUST_TOOLCHAIN} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD ====== +FROM rust-builder as builder-unique + +ARG UNIQUE_BRANCH +ARG CHAIN +ARG LAUNCH_CONFIG_FILE +ARG PROFILE=release + +WORKDIR /unique_parachain +#COPY . . + +RUN git clone -b ${UNIQUE_BRANCH} https://github.com/UniqueNetwork/unique-chain.git . && \ +# cd unique-chain && \ + cargo build --features=${CHAIN}-runtime --$PROFILE + +# ===== RUN ====== +FROM ubuntu:20.04 + +ARG POLKADOT_LAUNCH_BRANCH +ARG LAUNCH_CONFIG_FILE +ENV POLKADOT_LAUNCH_BRANCH $POLKADOT_LAUNCH_BRANCH +ENV LAUNCH_CONFIG_FILE $LAUNCH_CONFIG_FILE + +RUN apt-get -y update && \ + apt-get -y install curl git && \ + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ + export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + nvm install v16.16.0 && \ + nvm use v16.16.0 + +RUN git clone https://github.com/uniquenetwork/polkadot-launch -b ${POLKADOT_LAUNCH_BRANCH} + +RUN export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + npm install --global yarn && \ + yarn install + +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/${LAUNCH_CONFIG_FILE} /polkadot-launch/ + +COPY --from=builder-unique /unique_parachain/target/release/unique-collator /unique-chain/target/release/ +COPY --from=uniquenetwork/builder-polkadot:${POLKADOT_BUILD_BRANCH} /unique_parachain/polkadot/target/release/polkadot /polkadot/target/release/ +COPY --from=uniquenetwork/builder-cumulus:${STATEMINE_BUILD_BRANCH} /unique_parachain/cumulus/target/release/polkadot-parachain /cumulus/target/release/cumulus +COPY --from=uniquenetwork/builder-chainql:latest /chainql/target/release/chainql /chainql/target/release/ + +EXPOSE 9844 +EXPOSE 9944 +EXPOSE 9946 +EXPOSE 9947 +EXPOSE 9948 + +CMD export NVM_DIR="$HOME/.nvm" PATH="$PATH:/chainql/target/release" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + yarn start ${LAUNCH_CONFIG_FILE} diff --git a/.docker/additional/xcm-rococo/Dockerfile-xcm-quartz-rococo b/.docker/additional/xcm-rococo/Dockerfile-xcm-quartz-rococo new file mode 100644 index 0000000000..335fb8162c --- /dev/null +++ b/.docker/additional/xcm-rococo/Dockerfile-xcm-quartz-rococo @@ -0,0 +1,91 @@ +ARG CHAIN=quartz +ARG LAUNCH_CONFIG_FILE=launch-config-xcm-quartz-rococo.json + +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ARG RUST_TOOLCHAIN +ENV RUST_TOOLCHAIN $RUST_TOOLCHAIN +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install ${RUST_TOOLCHAIN} && \ + rustup default ${RUST_TOOLCHAIN} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain ${RUST_TOOLCHAIN} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD ====== +FROM rust-builder as builder-unique + +ARG UNIQUE_BRANCH +ARG CHAIN +ARG LAUNCH_CONFIG_FILE +ARG PROFILE=release + +WORKDIR /unique_parachain +#COPY . . + +RUN git clone -b ${UNIQUE_BRANCH} https://github.com/UniqueNetwork/unique-chain.git . && \ +# cd unique-chain && \ + cargo build --features=${CHAIN}-runtime --$PROFILE + +# ===== RUN ====== +FROM ubuntu:20.04 + +ARG POLKADOT_LAUNCH_BRANCH +ARG LAUNCH_CONFIG_FILE +ENV POLKADOT_LAUNCH_BRANCH $POLKADOT_LAUNCH_BRANCH +ENV LAUNCH_CONFIG_FILE $LAUNCH_CONFIG_FILE + +RUN apt-get -y update && \ + apt-get -y install curl git && \ + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ + export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + nvm install v16.16.0 && \ + nvm use v16.16.0 + +RUN git clone https://github.com/uniquenetwork/polkadot-launch -b ${POLKADOT_LAUNCH_BRANCH} + +RUN export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + npm install --global yarn && \ + yarn install + +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/${LAUNCH_CONFIG_FILE} /polkadot-launch/ +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/5validators.jsonnet /polkadot-launch/5validators.jsonnet +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/minBondFix.jsonnet /polkadot-launch/minBondFix.jsonnet + +COPY --from=builder-unique /unique_parachain/target/release/unique-collator /unique-chain/target/release/ +COPY --from=uniquenetwork/builder-polkadot:${POLKADOT_BUILD_BRANCH} /unique_parachain/polkadot/target/release/polkadot /polkadot/target/release/ +COPY --from=uniquenetwork/builder-moonbeam:${MOONRIVER_BUILD_BRANCH} /unique_parachain/moonbeam/target/release/moonbeam /moonbeam/target/release/ +COPY --from=uniquenetwork/builder-cumulus:${STATEMINE_BUILD_BRANCH} /unique_parachain/cumulus/target/release/polkadot-parachain /cumulus/target/release/cumulus +COPY --from=uniquenetwork/builder-acala:${KARURA_BUILD_BRANCH} /unique_parachain/Acala/target/production/acala /acala/target/release/ +COPY --from=uniquenetwork/builder-chainql:latest /chainql/target/release/chainql /chainql/target/release/ + +EXPOSE 9844 +EXPOSE 9944 +EXPOSE 9946 +EXPOSE 9947 +EXPOSE 9948 + +CMD export NVM_DIR="$HOME/.nvm" PATH="$PATH:/chainql/target/release" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + yarn start ${LAUNCH_CONFIG_FILE} diff --git a/.docker/additional/xcm-rococo/Dockerfile-xcm-unique-rococo b/.docker/additional/xcm-rococo/Dockerfile-xcm-unique-rococo new file mode 100644 index 0000000000..92ed31d6a0 --- /dev/null +++ b/.docker/additional/xcm-rococo/Dockerfile-xcm-unique-rococo @@ -0,0 +1,91 @@ +ARG CHAIN=unique +ARG LAUNCH_CONFIG_FILE=launch-config-xcm-unique-rococo.json + +# ===== Rust builder ===== +FROM ubuntu:20.04 as rust-builder +LABEL maintainer="Unique.Network" + +ARG RUST_TOOLCHAIN +ENV RUST_TOOLCHAIN $RUST_TOOLCHAIN +ENV CARGO_HOME="/cargo-home" +ENV PATH="/cargo-home/bin:$PATH" +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update && \ + apt-get install -y curl cmake pkg-config libssl-dev git clang llvm libudev-dev protobuf-compiler && \ + apt-get clean && \ + rm -r /var/lib/apt/lists/* + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none + +RUN rustup toolchain uninstall $(rustup toolchain list) && \ + rustup toolchain install ${RUST_TOOLCHAIN} && \ + rustup default ${RUST_TOOLCHAIN} && \ + rustup target list --installed && \ + rustup show +RUN rustup target add wasm32-unknown-unknown --toolchain ${RUST_TOOLCHAIN} + +RUN mkdir /unique_parachain +WORKDIR /unique_parachain + +# ===== BUILD ====== +FROM rust-builder as builder-unique + +ARG UNIQUE_BRANCH +ARG CHAIN +ARG LAUNCH_CONFIG_FILE +ARG PROFILE=release + +WORKDIR /unique_parachain +#COPY . . + +RUN git clone -b ${UNIQUE_BRANCH} https://github.com/UniqueNetwork/unique-chain.git . && \ +# cd unique-chain && \ + cargo build --features=${CHAIN}-runtime --$PROFILE + +# ===== RUN ====== +FROM ubuntu:20.04 + +ARG POLKADOT_LAUNCH_BRANCH +ARG LAUNCH_CONFIG_FILE +ENV POLKADOT_LAUNCH_BRANCH $POLKADOT_LAUNCH_BRANCH +ENV LAUNCH_CONFIG_FILE $LAUNCH_CONFIG_FILE + +RUN apt-get -y update && \ + apt-get -y install curl git && \ + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ + export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + nvm install v16.16.0 && \ + nvm use v16.16.0 + +RUN git clone https://github.com/uniquenetwork/polkadot-launch -b ${POLKADOT_LAUNCH_BRANCH} + +RUN export NVM_DIR="$HOME/.nvm" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + npm install --global yarn && \ + yarn install + +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/${LAUNCH_CONFIG_FILE} /polkadot-launch/ +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/5validators.jsonnet /polkadot-launch/5validators.jsonnet +COPY --from=builder-unique /unique_parachain/.docker/xcm-config/minBondFix.jsonnet /polkadot-launch/minBondFix.jsonnet + +COPY --from=builder-unique /unique_parachain/target/release/unique-collator /unique-chain/target/release/ +COPY --from=uniquenetwork/builder-polkadot:${POLKADOT_BUILD_BRANCH} /unique_parachain/polkadot/target/release/polkadot /polkadot/target/release/ +COPY --from=uniquenetwork/builder-moonbeam:${MOONBEAM_BUILD_BRANCH} /unique_parachain/moonbeam/target/release/moonbeam /moonbeam/target/release/ +COPY --from=uniquenetwork/builder-cumulus:${STATEMINT_BUILD_BRANCH} /unique_parachain/cumulus/target/release/polkadot-parachain /cumulus/target/release/cumulus +COPY --from=uniquenetwork/builder-acala:${ACALA_BUILD_BRANCH} /unique_parachain/Acala/target/production/acala /acala/target/release/ +COPY --from=uniquenetwork/builder-chainql:latest /chainql/target/release/chainql /chainql/target/release/ + +EXPOSE 9844 +EXPOSE 9944 +EXPOSE 9946 +EXPOSE 9947 +EXPOSE 9948 + +CMD export NVM_DIR="$HOME/.nvm" PATH="$PATH:/chainql/target/release" && \ + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ + cd /polkadot-launch && \ + yarn start ${LAUNCH_CONFIG_FILE} diff --git a/.docker/additional/xcm-rococo/docker-compose-xcm-opal-rococo.yml b/.docker/additional/xcm-rococo/docker-compose-xcm-opal-rococo.yml new file mode 100644 index 0000000000..40f7410577 --- /dev/null +++ b/.docker/additional/xcm-rococo/docker-compose-xcm-opal-rococo.yml @@ -0,0 +1,26 @@ +version: "3.5" + +services: + xcm_opal_rococo: + build: + context: . + dockerfile: .Dockerfile-xcm-opal-rococo + container_name: xcm-opal-rococo + image: xcm-opal-rococo:latest + env_file: .env + expose: + - 9844 + - 9944 + - 9946 + - 9947 + - 9948 + ports: + - 127.0.0.1:9844:9844 + - 127.0.0.1:9944:9944 + - 127.0.0.1:9946:9946 + - 127.0.0.1:9947:9947 + - 127.0.0.1:9948:9948 + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/additional/xcm-rococo/docker-compose-xcm-quartz-rococo.yml b/.docker/additional/xcm-rococo/docker-compose-xcm-quartz-rococo.yml new file mode 100644 index 0000000000..561fa57ddd --- /dev/null +++ b/.docker/additional/xcm-rococo/docker-compose-xcm-quartz-rococo.yml @@ -0,0 +1,26 @@ +version: "3.5" + +services: + xcm_quartz_rococo: + build: + context: . + dockerfile: .Dockerfile-xcm-quartz-rococo + container_name: xcm-quartz-rococo + image: xcm-quartz-rococo:latest + env_file: .env + expose: + - 9844 + - 9944 + - 9946 + - 9947 + - 9948 + ports: + - 127.0.0.1:9844:9844 + - 127.0.0.1:9944:9944 + - 127.0.0.1:9946:9946 + - 127.0.0.1:9947:9947 + - 127.0.0.1:9948:9948 + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/additional/xcm-rococo/docker-compose-xcm-unique-rococo.yml b/.docker/additional/xcm-rococo/docker-compose-xcm-unique-rococo.yml new file mode 100644 index 0000000000..116c59651f --- /dev/null +++ b/.docker/additional/xcm-rococo/docker-compose-xcm-unique-rococo.yml @@ -0,0 +1,26 @@ +version: "3.5" + +services: + xcm_unique_rococo: + build: + context: . + dockerfile: .Dockerfile-xcm-unique-rococo + container_name: xcm-unique-rococo + image: xcm-unique-rococo:latest + env_file: .env + expose: + - 9844 + - 9944 + - 9946 + - 9947 + - 9948 + ports: + - 127.0.0.1:9844:9844 + - 127.0.0.1:9944:9944 + - 127.0.0.1:9946:9946 + - 127.0.0.1:9947:9947 + - 127.0.0.1:9948:9948 + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/docker-compose-dev.yaml b/.docker/docker-compose-dev.yaml new file mode 100644 index 0000000000..87989b4a5d --- /dev/null +++ b/.docker/docker-compose-dev.yaml @@ -0,0 +1,9 @@ +version: "3.5" + +services: + node-dev: + build: + context: ../ + dockerfile: .docker/Dockerfile-chain-dev + image: node-dev + container_name: node-dev diff --git a/.docker/docker-compose-forkless.yml b/.docker/docker-compose-forkless.yml index d2a4e90f7a..dc1598a949 100644 --- a/.docker/docker-compose-forkless.yml +++ b/.docker/docker-compose-forkless.yml @@ -21,4 +21,4 @@ services: logging: options: max-size: "1m" - max-file: "3" \ No newline at end of file + max-file: "3" diff --git a/.docker/docker-compose-master.yml b/.docker/docker-compose-master.yml new file mode 100644 index 0000000000..13acfa1b60 --- /dev/null +++ b/.docker/docker-compose-master.yml @@ -0,0 +1,19 @@ +version: "3.5" + +services: + blockchain_nodes: + build: + context: ../ + dockerfile: .docker/Dockerfile-parachain + image: blockchain_nodes + container_name: blockchain_nodes + expose: + - 9944 + - 9933 + ports: + - 127.0.0.1:9944:9944 + - 127.0.0.1:9933:9933 + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/docker-compose-try-runtime.yml b/.docker/docker-compose-try-runtime.yml index f3af2f137b..b14543d4ba 100644 --- a/.docker/docker-compose-try-runtime.yml +++ b/.docker/docker-compose-try-runtime.yml @@ -16,4 +16,4 @@ services: logging: options: max-size: "1m" - max-file: "3" \ No newline at end of file + max-file: "3" diff --git a/.docker/docker-compose.tmp-dev.j2 b/.docker/docker-compose.tmp-dev.j2 new file mode 100644 index 0000000000..6f706be8df --- /dev/null +++ b/.docker/docker-compose.tmp-dev.j2 @@ -0,0 +1,21 @@ +version: "3.5" + +services: + node-dev: + build: + args: + - "RUST_TOOLCHAIN={{ RUST_TOOLCHAIN }}" + - "FEATURE={{ FEATURE }}" + context: ../ + dockerfile: .docker/Dockerfile-chain-dev + expose: + - 9944 + - 9933 + ports: + - 127.0.0.1:9944:9944 + - 127.0.0.1:9933:9933 + logging: + options: + max-size: "1m" + max-file: "3" + command: cargo run --release --features={{ FEATURE }} -- --dev -linfo --unsafe-ws-external --rpc-cors=all --unsafe-rpc-external diff --git a/.docker/docker-compose.tmp-forkless-data.j2 b/.docker/docker-compose.tmp-forkless-data.j2 index 4637d9909b..6888b3a0ff 100644 --- a/.docker/docker-compose.tmp-forkless-data.j2 +++ b/.docker/docker-compose.tmp-forkless-data.j2 @@ -39,4 +39,4 @@ services: logging: options: max-size: "1m" - max-file: "3" \ No newline at end of file + max-file: "3" diff --git a/.docker/docker-compose.tmp-forkless-nodata.j2 b/.docker/docker-compose.tmp-forkless-nodata.j2 index 7bcf5b58d6..66f9ec634b 100644 --- a/.docker/docker-compose.tmp-forkless-nodata.j2 +++ b/.docker/docker-compose.tmp-forkless-nodata.j2 @@ -30,4 +30,4 @@ services: logging: options: max-size: "1m" - max-file: "3" \ No newline at end of file + max-file: "3" diff --git a/.docker/docker-compose.tmp-master.j2 b/.docker/docker-compose.tmp-master.j2 new file mode 100644 index 0000000000..79fa55b184 --- /dev/null +++ b/.docker/docker-compose.tmp-master.j2 @@ -0,0 +1,11 @@ +version: "3.5" + +services: + blockchain_nodes: + build: + args: + - "RUST_TOOLCHAIN={{ RUST_TOOLCHAIN }}" + - "BRANCH={{ BRANCH }}" + - "REPO_URL={{ REPO_URL }}" + - "FEATURE={{ FEATURE }}" + - "POLKADOT_BUILD_BRANCH={{ POLKADOT_BUILD_BRANCH }}" diff --git a/.docker/docker-compose.tmp-node.j2 b/.docker/docker-compose.tmp-node.j2 new file mode 100644 index 0000000000..69da0764db --- /dev/null +++ b/.docker/docker-compose.tmp-node.j2 @@ -0,0 +1,37 @@ +version: "3.5" + +services: + node-parachain: + build: + args: + - "RUST_TOOLCHAIN={{ RUST_TOOLCHAIN }}" + - "BRANCH={{ BRANCH }}" + - "REPO_URL={{ REPO_URL }}" + - "FEATURE={{ FEATURE }}" + - "RUNTIME={{ RUNTIME }}" + - "POLKADOT_BUILD_BRANCH={{ POLKADOT_BUILD_BRANCH }}" + - "MAINNET_TAG={{ MAINNET_TAG }}" + - "MAINNET_BRANCH={{ MAINNET_BRANCH }}" + context: ../ + dockerfile: .docker/Dockerfile-parachain-node-only + image: node-parachain + container_name: node-parachain + volumes: + - type: bind + source: ./launch-config-forkless-nodata.json + target: /polkadot-launch/launch-config.json + read_only: true + expose: + - 9944 + - 9945 + - 9933 + - 9844 + ports: + - 127.0.0.1:9944:9944 + - 127.0.0.1:9945:9945 + - 127.0.0.1:9933:9933 + - 127.0.0.1:9844:9844 + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/docker-compose.tmp-unit.j2 b/.docker/docker-compose.tmp-unit.j2 new file mode 100644 index 0000000000..c8bb94706c --- /dev/null +++ b/.docker/docker-compose.tmp-unit.j2 @@ -0,0 +1,14 @@ +version: "3.5" + +services: + node-dev: + build: + context: ../ + dockerfile: .docker/Dockerfile-chain-dev-unit + args: + - "RUST_TOOLCHAIN={{ RUST_TOOLCHAIN }}" + - "FEATURE={{ FEATURE }}" + logging: + options: + max-size: "1m" + max-file: "3" diff --git a/.docker/docker-compose.tmp-xcm-tests.j2 b/.docker/docker-compose.tmp-xcm-tests.j2 new file mode 100644 index 0000000000..7458da0db7 --- /dev/null +++ b/.docker/docker-compose.tmp-xcm-tests.j2 @@ -0,0 +1,25 @@ +version: "3.5" + +services: + xcm_nodes: + image: uniquenetwork/xcm-{{ NETWORK }}-testnet-local:latest + container_name: xcm-{{ NETWORK }}-testnet-local + expose: + - 9844 + - 9933 + - 9944 + - 9946 + - 9947 + - 9948 + ports: + - 127.0.0.1:9844:9844 + - 127.0.0.1:9933:9933 + - 127.0.0.1:9944:9944 + - 127.0.0.1:9946:9946 + - 127.0.0.1:9947:9947 + - 127.0.0.1:9948:9948 + logging: + options: + max-size: "1m" + max-file: "3" + diff --git a/.docker/docker-compose.try-runtime.j2 b/.docker/docker-compose.try-runtime.j2 index 41b1488957..ade4fba0e9 100644 --- a/.docker/docker-compose.try-runtime.j2 +++ b/.docker/docker-compose.try-runtime.j2 @@ -7,3 +7,4 @@ services: - "RUST_TOOLCHAIN={{ RUST_TOOLCHAIN }}" - "FEATURE={{ FEATURE }}" - "REPLICA_FROM={{ REPLICA_FROM }}" + diff --git a/.docker/forkless-config/fork.jsonnet b/.docker/forkless-config/fork.jsonnet index 375bcfed67..af13f5db71 100644 --- a/.docker/forkless-config/fork.jsonnet +++ b/.docker/forkless-config/fork.jsonnet @@ -93,4 +93,4 @@ outSpec { }, }, }, -} \ No newline at end of file +} diff --git a/.docker/forkless-config/launch-config-forkless-data.j2 b/.docker/forkless-config/launch-config-forkless-data.j2 index f6b6bf65a0..7f56597fc4 100644 --- a/.docker/forkless-config/launch-config-forkless-data.j2 +++ b/.docker/forkless-config/launch-config-forkless-data.j2 @@ -108,7 +108,11 @@ "--rpc-cors=all", "--unsafe-rpc-external", "--unsafe-ws-external", - "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug" + "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", + "--", + "--port=31335", + "--ws-port=9745", + "--rpc-port=9734" ] }, { @@ -129,4 +133,4 @@ "simpleParachains": [], "hrmpChannels": [], "finalization": false -} \ No newline at end of file +} diff --git a/.docker/forkless-config/launch-config-forkless-nodata.j2 b/.docker/forkless-config/launch-config-forkless-nodata.j2 index beb1c3c898..2dc476f4b8 100644 --- a/.docker/forkless-config/launch-config-forkless-nodata.j2 +++ b/.docker/forkless-config/launch-config-forkless-nodata.j2 @@ -102,7 +102,11 @@ "--unsafe-rpc-external", "--unsafe-ws-external", "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", - "--ws-max-connections=1000" + "--ws-max-connections=1000", + "--", + "--port=31335", + "--ws-port=9745", + "--rpc-port=9734" ] }, { @@ -115,7 +119,11 @@ "--unsafe-rpc-external", "--unsafe-ws-external", "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", - "--ws-max-connections=1000" + "--ws-max-connections=1000", + "--", + "--port=31337", + "--ws-port=9747", + "--rpc-port=9737" ] } ] @@ -124,4 +132,4 @@ "simpleParachains": [], "hrmpChannels": [], "finalization": false -} \ No newline at end of file +} diff --git a/.docker/forkless-config/launch-config-node-update-only-v3.j2 b/.docker/forkless-config/launch-config-node-update-only-v3.j2 index 080d73ccc7..9b111697a1 100644 --- a/.docker/forkless-config/launch-config-node-update-only-v3.j2 +++ b/.docker/forkless-config/launch-config-node-update-only-v3.j2 @@ -102,7 +102,11 @@ "--unsafe-rpc-external", "--unsafe-ws-external", "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", - "--ws-max-connections=1000" + "--ws-max-connections=1000", + "--", + "--port=31335", + "--ws-port=9745", + "--rpc-port=9734" ] }, { @@ -124,4 +128,4 @@ "simpleParachains": [], "hrmpChannels": [], "finalization": false -} \ No newline at end of file +} diff --git a/.docker/forkless-config/typeNames.jsonnet b/.docker/forkless-config/typeNames.jsonnet index b463287e23..82bcccefe5 100644 --- a/.docker/forkless-config/typeNames.jsonnet +++ b/.docker/forkless-config/typeNames.jsonnet @@ -48,4 +48,4 @@ types // local systemAccount = chain._decode(types['AccountInfo'], encoded); -// chain.System._encodeKey.Account(['0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d']) \ No newline at end of file +// chain.System._encodeKey.Account(['0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d']) diff --git a/.docker/testnet-config/launch-config.json b/.docker/testnet-config/launch-config.json index 7e8fa5b30e..9709a36df6 100644 --- a/.docker/testnet-config/launch-config.json +++ b/.docker/testnet-config/launch-config.json @@ -118,4 +118,4 @@ "simpleParachains": [], "hrmpChannels": [], "finalization": false -} \ No newline at end of file +} diff --git a/.docker/xcm-config/5validators.jsonnet b/.docker/xcm-config/5validators.jsonnet new file mode 100644 index 0000000000..582cc9d3c5 --- /dev/null +++ b/.docker/xcm-config/5validators.jsonnet @@ -0,0 +1,50 @@ + +function(spec) + spec { + genesis+: { + runtime+: { + staking+: { + validatorCount: 5, + invulnerables: [ + '5GNJqTPyNqANBkUVMN1LPPrxXnFouWXoe2wNSmmEoLctxiZY', + '5HpG9w8EBLe5XCrbczpwq5TSXvedjrBGCwqxK1iQ7qUsSWFc', + '5Ck5SLSHYac6WFt5UZRSsdJjwmpSZq85fd5TRNAdZQVzEAPT', + '5HKPmK9GYtE1PSLsS1qiYU9xQ9Si1NcEhdeCq9sw5bqu4ns8', + '5FCfAonRZgTFrTd9HREEyeJjDpT397KMzizE6T3DvebLFE7n', + ], + stakers: [ + [ + '5GNJqTPyNqANBkUVMN1LPPrxXnFouWXoe2wNSmmEoLctxiZY', + '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', + 100000000000000, + 'Validator', + ], + [ + '5HpG9w8EBLe5XCrbczpwq5TSXvedjrBGCwqxK1iQ7qUsSWFc', + '5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty', + 100000000000000, + 'Validator', + ], + [ + '5Ck5SLSHYac6WFt5UZRSsdJjwmpSZq85fd5TRNAdZQVzEAPT', + '5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y', + 100000000000000, + 'Validator', + ], + [ + '5HKPmK9GYtE1PSLsS1qiYU9xQ9Si1NcEhdeCq9sw5bqu4ns8', + '5DAAnrj7VHTznn2AWBemMuyBwZWs6FNFjdyVXUeYum3PTXFy', + 100000000000000, + 'Validator', + ], + [ + '5FCfAonRZgTFrTd9HREEyeJjDpT397KMzizE6T3DvebLFE7n', + '5HGjWAeFDfFCWPsjFQdVV2Msvz2XtMktvgocEZcCj68kUMaw', + 100000000000000, + 'Validator', + ], + ], + }, + }, + }, + } diff --git a/.docker/xcm-config/launch-config-xcm-opal-rococo.json b/.docker/xcm-config/launch-config-xcm-opal-rococo.json new file mode 100644 index 0000000000..18b7e00f39 --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-opal-rococo.json @@ -0,0 +1,134 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "rococo-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2095", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "westmint-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2095, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/launch-config-xcm-opal.json b/.docker/xcm-config/launch-config-xcm-opal.json new file mode 100644 index 0000000000..6711dbad57 --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-opal.json @@ -0,0 +1,135 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "westend-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2095", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "westmint-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2095, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/launch-config-xcm-quartz-rococo.json b/.docker/xcm-config/launch-config-xcm-quartz-rococo.json new file mode 100644 index 0000000000..52c55770fd --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-quartz-rococo.json @@ -0,0 +1,199 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "rococo-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2095", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/acala/target/release/acala", + "id": "2000", + "chain": "karura-dev", + "balance": "1000000000000000000000", + "nodes": [ + { + "wsPort": 9946, + "port": 31202, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/moonbeam/target/release/moonbeam", + "id": 2023, + "balance": "1000000000000000000000", + "chain": "moonriver-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "minBondFix.jsonnet" + ], + "nodes": [ + { + "wsPort": 9947, + "port": 31203, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "--", + "--execution=wasm" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "statemine-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2095, + "recipient": 2000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2095, + "recipient": 2023, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2023, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2095, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/launch-config-xcm-quartz.json b/.docker/xcm-config/launch-config-xcm-quartz.json new file mode 100644 index 0000000000..14b1280911 --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-quartz.json @@ -0,0 +1,200 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "westend-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2095", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/acala/target/release/acala", + "id": "2000", + "chain": "karura-dev", + "balance": "1000000000000000000000", + "nodes": [ + { + "wsPort": 9946, + "port": 31202, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/moonbeam/target/release/moonbeam", + "id": 2023, + "balance": "1000000000000000000000", + "chain": "moonriver-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "minBondFix.jsonnet" + ], + "nodes": [ + { + "wsPort": 9947, + "port": 31203, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "--", + "--execution=wasm" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "statemine-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2095, + "recipient": 2000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2095, + "recipient": 2023, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2023, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2095, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2095, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/launch-config-xcm-unique-rococo.json b/.docker/xcm-config/launch-config-xcm-unique-rococo.json new file mode 100644 index 0000000000..db5eb193c5 --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-unique-rococo.json @@ -0,0 +1,207 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "rococo-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2037", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "-lruntime=trace", + "-lxcm=trace", + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/acala/target/release/acala", + "id": "2000", + "chain": "acala-dev", + "balance": "1000000000000000000000", + "chainInitializer": [ + "chainql", + "-e", + "(import '${spec}') {id+: '-local'}" + ], + "nodes": [ + { + "wsPort": 9946, + "port": 31202, + "name": "alice", + "flags": [ + "-lruntime=trace", + "-lxcm=trace", + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/moonbeam/target/release/moonbeam", + "id": "2004", + "balance": "1000000000000000000000", + "chain": "moonbeam-local", + "nodes": [ + { + "wsPort": 9947, + "port": 31203, + "name": "alice", + "flags": [ + "-lruntime=trace", + "-lxcm=trace", + "--unsafe-rpc-external", + "--unsafe-ws-external", + "--", + "--execution=wasm" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "statemint-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "-lruntime=trace", + "-lxcm=trace", + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2037, + "recipient": 2000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2000, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2037, + "recipient": 2004, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2004, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2037, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/launch-config-xcm-unique.json b/.docker/xcm-config/launch-config-xcm-unique.json new file mode 100644 index 0000000000..6acb25461d --- /dev/null +++ b/.docker/xcm-config/launch-config-xcm-unique.json @@ -0,0 +1,200 @@ +{ + "relaychain": { + "bin": "/polkadot/target/release/polkadot", + "chain": "westend-local", + "chainInitializer": [ + "chainql", + "--tla-code=spec=import '${spec}'", + "5validators.jsonnet" + ], + "nodes": [ + { + "name": "alice", + "wsPort": 9844, + "rpcPort": 9843, + "port": 30444, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "bob", + "wsPort": 9855, + "rpcPort": 9854, + "port": 30555, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "charlie", + "wsPort": 9866, + "rpcPort": 9865, + "port": 30666, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "dave", + "wsPort": 9877, + "rpcPort": 9876, + "port": 30777, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + }, + { + "name": "eve", + "wsPort": 9888, + "rpcPort": 9887, + "port": 30888, + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "-lparachain::candidate_validation=debug" + ] + } + + ], + "genesis": { + "runtime": { + "runtime_genesis_config": { + "parachainsConfiguration": { + "config": { + "validation_upgrade_frequency": 1, + "validation_upgrade_delay": 1 + } + } + } + } + } + }, + "parachains": [ + { + "bin": "/unique-chain/target/release/unique-collator", + "id": "2037", + "balance": "1000000000000000000000000", + "nodes": [ + { + "port": 31200, + "wsPort": 9944, + "rpcPort": 9933, + "name": "alice", + "flags": [ + "-lxcm=trace,parity_ws::handler=debug,jsonrpsee_core=trace,jsonrpsee-core=trace,jsonrpsee_ws_server=debug", + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/acala/target/release/acala", + "id": "2000", + "chain": "acala-dev", + "balance": "1000000000000000000000", + "chainInitializer": [ + "chainql", + "-e", + "(import '${spec}') {id+: '-local'}" + ], + "nodes": [ + { + "wsPort": 9946, + "port": 31202, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + }, + { + "bin": "/moonbeam/target/release/moonbeam", + "id": "2004", + "balance": "1000000000000000000000", + "chain": "moonbeam-local", + "nodes": [ + { + "wsPort": 9947, + "port": 31203, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external", + "--", + "--execution=wasm" + ] + } + ] + }, + { + "bin": "/cumulus/target/release/cumulus", + "id": "1000", + "chain": "statemint-local", + "balance": "1000000000000000000000000", + "nodes": [ + { + "wsPort": 9948, + "port": 31204, + "name": "alice", + "flags": [ + "--unsafe-rpc-external", + "--unsafe-ws-external" + ] + } + ] + } + ], + "simpleParachains": [], + "hrmpChannels": [ + { + "sender": 2037, + "recipient": 2000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2000, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2037, + "recipient": 2004, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2004, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 2037, + "recipient": 1000, + "maxCapacity": 8, + "maxMessageSize": 512 + }, + { + "sender": 1000, + "recipient": 2037, + "maxCapacity": 8, + "maxMessageSize": 512 + } + ], + "finalization": false +} + diff --git a/.docker/xcm-config/minBondFix.jsonnet b/.docker/xcm-config/minBondFix.jsonnet new file mode 100644 index 0000000000..ed2ba50f3e --- /dev/null +++ b/.docker/xcm-config/minBondFix.jsonnet @@ -0,0 +1,10 @@ +function(spec) +spec { + genesis+: { + runtime+: { + parachainStaking+: { + candidates: std.map(function(candidate) [candidate[0], candidate[1] * 1000], super.candidates) + }, + }, + }, +} diff --git a/.env b/.env index 276c2bec40..b0ff06dcb0 100644 --- a/.env +++ b/.env @@ -1,21 +1,22 @@ -RUST_TOOLCHAIN=nightly-2022-05-11 -RUST_C=1.62.0-nightly -# old -POLKA_VERSION=release-v0.9.24 -UNIQUE_BRANCH=develop -USER=*** -PASS=*** - +RUST_TOOLCHAIN=nightly-2022-07-24 POLKADOT_BUILD_BRANCH=release-v0.9.30 -POLKADOT_MAINNET_BRANCH=release-v0.9.29 # unused within the current forkless setup -UNIQUE_MAINNET_TAG=quartz-v924012-2-old-tests-fixes +POLKADOT_MAINNET_BRANCH=release-v0.9.29 +STATEMINT_BUILD_BRANCH=release-parachains-v9271 +ACALA_BUILD_BRANCH=2.9.6 +MOONBEAM_BUILD_BRANCH=v0.26.1 +UNIQUE_MAINNET_TAG=v924010-old-tests-fixes UNIQUE_REPLICA_FROM=wss://eu-ws.unique.network:443 +KUSAMA_MAINNET_BRANCH=release-v0.9.30 +STATEMINE_BUILD_BRANCH=parachains-v9271 +KARURA_BUILD_BRANCH=release-karura-2.9.5 +MOONRIVER_BUILD_BRANCH=v0.26.1 QUARTZ_MAINNET_TAG=quartz-v924012-2-old-tests-fixes QUARTZ_REPLICA_FROM=wss://eu-ws-quartz.unique.network:443 +UNQWND_MAINNET_BRANCH=release-v0.9.30 +WESTMINT_BUILD_BRANCH=parachains-v9290 OPAL_MAINNET_TAG=quartz-v924012-2-old-tests-fixes OPAL_REPLICA_FROM=wss://eu-ws-opal.unique.network:443 - -POLKADOT_LAUNCH_BRANCH=unique-network \ No newline at end of file +POLKADOT_LAUNCH_BRANCH=unique-network diff --git a/.gitattributes b/.gitattributes index efdba87644..a22d53f542 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,3 @@ * text=auto *.sh text eol=lf +*.ts linguist-detectable=false \ No newline at end of file diff --git a/.github/workflows/canary.yml b/.github/workflows/canary.yml new file mode 100644 index 0000000000..f659abcc45 --- /dev/null +++ b/.github/workflows/canary.yml @@ -0,0 +1,12 @@ +on: + workflow_call: + +jobs: + + market-e2e-test: + name: market e2e tests + uses: ./.github/workflows/market-test_v2.yml + secrets: inherit + + + diff --git a/.github/workflows/ci-develop.yml b/.github/workflows/ci-develop.yml new file mode 100644 index 0000000000..de1d80e000 --- /dev/null +++ b/.github/workflows/ci-develop.yml @@ -0,0 +1,56 @@ +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586869783/CI+Develop +# Workflow which controls starts nested workflows. +name: develop + +# Triger: PR at 'develop' branch with following types of events. +on: + pull_request: + branches: [ 'develop' ] + types: [ opened, reopened, synchronize, ready_for_review, converted_to_draft ] + +#Concurency group for control execution queue over github runners. +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + +# List of a jobs included into Workflow. +jobs: + + yarn-test-dev: + if: github.event.pull_request.draft == false # Conditional check for draft per job. + uses: ./.github/workflows/dev-build-tests_v2.yml + + + unit-test: + if: github.event.pull_request.draft == false # Conditional check for draft per job. + uses: ./.github/workflows/unit-test_v2.yml + + canary: + if: ${{ (github.event.pull_request.draft == false && contains( github.event.pull_request.labels.*.name, 'canary')) }} # Conditional check for draft & labels per job. + uses: ./.github/workflows/canary.yml + secrets: inherit # pass all secrets from initial workflow to nested + + xcm: + if: ${{ (github.event.pull_request.draft == false && contains( github.event.pull_request.labels.*.name, 'xcm')) }} # Conditional check for draft & labels per job. + uses: ./.github/workflows/xcm.yml + secrets: inherit # pass all secrets from initial workflow to nested + + forkless: + if: ${{ (github.event.pull_request.draft == false && contains( github.event.pull_request.labels.*.name, 'forkless')) }} # Conditional check for draft & labels per job. + uses: ./.github/workflows/forkless.yml + + node-only-update: + if: ${{ (github.event.pull_request.draft == false && contains( github.event.pull_request.labels.*.name, 'node-only-update')) }} # Conditional check for draft & labels per job. + uses: ./.github/workflows/node-only-update_v2.yml + + parallel_and_sequential_tests: + if: ${{ (github.event.pull_request.draft == false && contains( github.event.pull_request.labels.*.name, 'integration')) }} # Conditional check for draft & labels per job. + uses: ./.github/workflows/integration-tests.yml + + codestyle: + if: github.event.pull_request.draft == false # Conditional check for draft per job. + uses: ./.github/workflows/codestyle_v2.yml + + yarn_eslint: + if: github.event.pull_request.draft == false # Conditional check for draft per job. + uses: ./.github/workflows/test_codestyle_v2.yml diff --git a/.github/workflows/ci-master.yml b/.github/workflows/ci-master.yml index 9c6239b2d2..ede417e011 100644 --- a/.github/workflows/ci-master.yml +++ b/.github/workflows/ci-master.yml @@ -15,8 +15,23 @@ concurrency: # List of a jobs included into Workflow. jobs: + + unit-test: + uses: ./.github/workflows/unit-test_v2.yml + + node-only-update: + uses: ./.github/workflows/node-only-update_v2.yml + forkless: uses: ./.github/workflows/forkless.yml - # codestyle: - # uses: ./.github/workflows/codestyle.yml \ No newline at end of file + canary: + uses: ./.github/workflows/canary.yml + secrets: inherit # pass all secrets from initial workflow to nested + + xcm: + uses: ./.github/workflows/xcm.yml + secrets: inherit # pass all secrets from initial workflow to nested + + codestyle: + uses: ./.github/workflows/codestyle_v2.yml diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle_v2.yml similarity index 54% rename from .github/workflows/codestyle.yml rename to .github/workflows/codestyle_v2.yml index 3f8d9136de..0c2f21285a 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle_v2.yml @@ -1,13 +1,18 @@ -name: Code style +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586837012/Code+style+testing +# Nested workflow for checks related to formatting Rust code -on: [push] +name: cargo fmt + +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: jobs: rustfmt: - runs-on: ubuntu-20.04 + runs-on: self-hosted-ci steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - name: Install latest nightly uses: actions-rs/toolchain@v1 with: @@ -16,16 +21,21 @@ jobs: target: wasm32-unknown-unknown components: rustfmt, clippy - name: Run cargo fmt - run: cargo fmt -- --check + run: cargo fmt -- --check # In that mode it returns only exit code. + - name: Cargo fmt state + if: success() + run: echo "Nothing to do. Command 'cargo fmt -- --check' returned exit code 0." + clippy: if: ${{ false }} - runs-on: ubuntu-20.04 + runs-on: self-hosted-ci steps: - - uses: actions/checkout@v1 + + - uses: actions/checkout@v3 - name: Install substrate dependencies - run: sudo apt-get install libssl-dev pkg-config libclang-dev clang + run: sudo apt-get install libssl-dev pkg-config libclang-dev clang protobuf-compiler - name: Install latest nightly uses: actions-rs/toolchain@v1 with: diff --git a/.github/workflows/dev-build-tests_v2.yml b/.github/workflows/dev-build-tests_v2.yml new file mode 100644 index 0000000000..5e6d18a07c --- /dev/null +++ b/.github/workflows/dev-build-tests_v2.yml @@ -0,0 +1,101 @@ +# Integration test in --dev mode +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586411104/Integration+tests +name: yarn test dev + +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: + + + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + dev_build_int_tests: + # The type of runner that the job will run on + runs-on: [self-hosted-ci,medium] + timeout-minutes: 1380 + + name: ${{ matrix.network }} + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: + - network: "opal" + features: "opal-runtime" + - network: "quartz" + features: "quartz-runtime" + - network: "unique" + features: "unique-runtime" + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-dev.j2 + output_file: .docker/docker-compose.${{ matrix.network }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + FEATURE=${{ matrix.features }} + + + - name: Show build configuration + run: cat .docker/docker-compose.${{ matrix.network }}.yml + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose-dev.yaml" -f ".docker/docker-compose.${{ matrix.network }}.yml" up -d --build --remove-orphans + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Run tests + working-directory: tests + run: | + yarn install + yarn add mochawesome + node scripts/readyness.js + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn test --reporter mochawesome --reporter-options reportFilename=test-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: Test Report + uses: phoenix-actions/test-reporting@v8 + id: test-report + if: success() || failure() # run this step even if previous step failed + with: + name: int test results - ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + + - name: Read output variables + run: | + echo "url is ${{ steps.test-report.outputs.runHtmlUrl }}" + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose-dev.yaml" -f ".docker/docker-compose.${{ matrix.network }}.yml" down + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f -a + docker system prune -f + docker image prune -f -a diff --git a/.github/workflows/execution-matrix.yml b/.github/workflows/execution-matrix.yml new file mode 100644 index 0000000000..f267cd1905 --- /dev/null +++ b/.github/workflows/execution-matrix.yml @@ -0,0 +1,42 @@ +name: Reusable workflow + +on: + workflow_call: + # Map the workflow outputs to job outputs + outputs: + matrix: + description: "The first output string" + value: ${{ jobs.create-matrix.outputs.matrix_output }} + +jobs: + + create-marix: + + name: Prepare execution matrix + + runs-on: self-hosted-ci + outputs: + matrix_output: ${{ steps.create_matrix.outputs.matrix }} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Create Execution matrix + uses: CertainLach/create-matrix-action@v3 + id: create_matrix + with: + matrix: | + network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}}, replica_from_address {${{ env.OPAL_REPLICA_FROM }}} + network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}}, replica_from_address {${{ env.QUARTZ_REPLICA_FROM }}} + network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}}, replica_from_address {${{ env.UNIQUE_REPLICA_FROM }}} + diff --git a/.github/workflows/forkless-update-data_v2.yml b/.github/workflows/forkless-update-data_v2.yml index 117b1fff74..4395a49792 100644 --- a/.github/workflows/forkless-update-data_v2.yml +++ b/.github/workflows/forkless-update-data_v2.yml @@ -38,6 +38,7 @@ jobs: network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}}, replica_from_address {${{ env.OPAL_REPLICA_FROM }}} network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}}, replica_from_address {${{ env.QUARTZ_REPLICA_FROM }}} network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}}, replica_from_address {${{ env.UNIQUE_REPLICA_FROM }}} + forkless-update-data: needs: execution-marix # The type of runner that the job will run on @@ -80,6 +81,7 @@ jobs: RUNTIME=${{ matrix.runtime }} BRANCH=${{ github.head_ref }} REPLICA_FROM=${{ matrix.replica_from_address }} + - name: Show build configuration run: cat .docker/docker-compose.${{ matrix.network }}.yml @@ -91,6 +93,7 @@ jobs: variables: | FEATURE=${{ matrix.features }} RUNTIME=${{ matrix.runtime }} + - name: Show launch-config-forkless configuration run: cat .docker/launch-config-forkless-data.json @@ -165,4 +168,4 @@ jobs: run: | docker builder prune -f -a docker system prune -f - docker image prune -f -a \ No newline at end of file + docker image prune -f -a diff --git a/.github/workflows/forkless-update-nodata_v2.yml b/.github/workflows/forkless-update-nodata_v2.yml index 77dfe5b9c6..dc1476f774 100644 --- a/.github/workflows/forkless-update-nodata_v2.yml +++ b/.github/workflows/forkless-update-nodata_v2.yml @@ -37,6 +37,8 @@ jobs: network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}} network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}} network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}} + + forkless-update-nodata: needs: execution-marix # The type of runner that the job will run on @@ -79,6 +81,7 @@ jobs: FEATURE=${{ matrix.features }} RUNTIME=${{ matrix.runtime }} BRANCH=${{ github.head_ref }} + - name: Show build configuration run: cat .docker/docker-compose.${{ matrix.network }}.yml @@ -90,6 +93,7 @@ jobs: variables: | FEATURE=${{ matrix.features }} RUNTIME=${{ matrix.runtime }} + - name: Show launch-config-forkless configuration run: cat .docker/launch-config-forkless-nodata.json @@ -158,25 +162,48 @@ jobs: if: success() || failure() run: cat './forkless-parachain-upgrade-nodata-logs.${{ matrix.features }}/node-parachain.log' - - name: Run tests + - name: Run Parallel tests + working-directory: tests + run: | + yarn install + yarn add mochawesome + node scripts/readyness.js + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn testParallel --reporter mochawesome --reporter-options reportFilename=test-parallel-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: Test Report Parallel + uses: phoenix-actions/test-reporting@v8 + id: test-report-parallel + if: success() || failure() # run this step even if previous step failed + with: + name: Report Parallel tests results - ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-parallel-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + + - name: Run Sequential tests working-directory: tests + if: success() || failure() run: | yarn install yarn add mochawesome node scripts/readyness.js echo "Ready to start tests" yarn polkadot-types - NOW=$(date +%s) && yarn test --reporter mochawesome --reporter-options reportFilename=test-${NOW} + NOW=$(date +%s) && yarn testSequential --reporter mochawesome --reporter-options reportFilename=test-sequential-${NOW} env: RPC_URL: http://127.0.0.1:9933/ - - name: Test Report + - name: Test Report Sequential uses: phoenix-actions/test-reporting@v8 - id: test-report + id: test-report-sequential if: success() || failure() # run this step even if previous step failed with: - name: Report tests results - ${{ matrix.network }} # Name of the check run which will be created - path: tests/mochawesome-report/test-*.json # Path to test results + name: Report Sequential tests results - ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-sequential-*.json # Path to test results reporter: mochawesome-json fail-on-error: 'false' @@ -189,4 +216,4 @@ jobs: run: | docker builder prune -f -a docker system prune -f - docker image prune -f -a \ No newline at end of file + docker image prune -f -a diff --git a/.github/workflows/forkless.yml b/.github/workflows/forkless.yml index 8aa217eadf..e974c9c4cf 100644 --- a/.github/workflows/forkless.yml +++ b/.github/workflows/forkless.yml @@ -17,4 +17,4 @@ jobs: try-runtime: name: try-runtime - uses: ./.github/workflows/try-runtime_v2.yml \ No newline at end of file + uses: ./.github/workflows/try-runtime_v2.yml diff --git a/.github/workflows/generate-execution-matrix.yml b/.github/workflows/generate-execution-matrix.yml new file mode 100644 index 0000000000..91127783a3 --- /dev/null +++ b/.github/workflows/generate-execution-matrix.yml @@ -0,0 +1,45 @@ +name: Prepare execution matrix + +on: + workflow_call: + # Map the workflow outputs to job outputs + outputs: + matrix_values: + description: "Matix output" + matrix: ${{ jobs.prepare-execution-matrix.outputs.matrix }} + + +#concurrency: +# group: ${{ github.workflow }}-${{ github.head_ref }} +# cancel-in-progress: true + + +jobs: + prepare-execution-matrix: + name: Generate output + runs-on: self-hosted-ci + # Map the job outputs to step outputs + outputs: + matrix: ${{ steps.create_matrix.outputs.matrix }} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Create Execution matrix + uses: CertainLach/create-matrix-action@v3 + id: create_matrix + with: + matrix: | + network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}}, replica_from_address {${{ env.OPAL_REPLICA_FROM }}} + network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}}, replica_from_address {${{ env.QUARTZ_REPLICA_FROM }}} + network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}}, replica_from_address {${{ env.UNIQUE_REPLICA_FROM }}} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 0000000000..1fe61eef77 --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,339 @@ +# Test workflow for debug parallel and sequental tests in scope of execution time reducing. +name: Integration tests + +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: +#Define Workflow variables + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + nodes-execution-matrix: + + name: execution matrix + + runs-on: self-hosted-ci + outputs: + matrix: ${{ steps.create_matrix.outputs.matrix }} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v1.0.2 + + - name: Create Execution matrix + uses: fabiocaccamo/create-matrix-action@v2 + id: create_matrix + with: + matrix: | + network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}} + network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}} + network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}} + + parallel-test: + needs: nodes-execution-matrix + # The type of runner that the job will run on + runs-on: [self-hosted-ci,large] + + + + timeout-minutes: 1380 + + name: ${{ matrix.network }} - parallel + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: ${{fromJson(needs.nodes-execution-matrix.outputs.matrix)}} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v1.0.2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-node.j2 + output_file: .docker/docker-compose.node.${{ matrix.network }}.yml + variables: | + REPO_URL=${{ github.server_url }}/${{ github.repository }}.git + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + POLKADOT_MAINNET_BRANCH=${{ env.POLKADOT_MAINNET_BRANCH }} + MAINNET_TAG=${{ matrix.mainnet_tag }} + MAINNET_BRANCH=${{ matrix.mainnet_branch }} + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + BRANCH=${{ github.head_ref }} + + - name: Show build configuration + run: cat .docker/docker-compose.node.${{ matrix.network }}.yml + + - name: Generate launch-config-forkless-nodata.json + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/forkless-config/launch-config-node-update-only-v3.j2 + output_file: .docker/launch-config-forkless-nodata.json + variables: | + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + + - name: Show launch-config-forkless configuration + run: cat .docker/launch-config-forkless-nodata.json + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" up -d --build --remove-orphans --force-recreate --timeout 300 + + # 🚀 POLKADOT LAUNCH COMPLETE 🚀 + - name: Check if docker logs consist messages related to testing of Node Parachain Upgrade. + if: success() + run: | + counter=160 + function check_container_status { + docker inspect -f {{.State.Running}} node-parachain + } + function do_docker_logs { + docker logs --details node-parachain 2>&1 + } + function is_started { + if [ "$(check_container_status)" == "true" ]; then + echo "Container: node-parachain RUNNING"; + echo "Check Docker logs" + DOCKER_LOGS=$(do_docker_logs) + if [[ ${DOCKER_LOGS} = *"POLKADOT LAUNCH COMPLETE"* ]];then + echo "🚀 POLKADOT LAUNCH COMPLETE 🚀" + return 0 + else + echo "Message not found in logs output, repeating..." + return 1 + fi + else + echo "Container node-parachain NOT RUNNING" + echo "Halting all future checks" + exit 1 + fi + echo "something goes wrong" + exit 1 + } + while ! is_started; do + echo "Waiting for special message in log files " + sleep 30s + counter=$(( $counter - 1 )) + echo "Counter: $counter" + if [ "$counter" -gt "0" ]; then + continue + else + break + fi + done + echo "Halting script" + exit 0 + shell: bash + + - name: Run Parallel tests on Node Parachain + working-directory: tests + run: | + yarn install + yarn add mochawesome + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn testParallel --reporter mochawesome --reporter-options reportFilename=test-parallel-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: Parallel Tests report + uses: phoenix-actions/test-reporting@v8 + id: test-report-parallel + if: success() || failure() # run this step even if previous step failed + with: + name: Parallel Tests report - ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-parallel-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" down --volumes + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f -a + docker system prune -f + docker image prune -f -a + + - name: List files in Workspace + if: always() + run: ls -la ./ + + sequential-test: + needs: nodes-execution-matrix + # The type of runner that the job will run on + runs-on: [self-hosted-ci,large] + + timeout-minutes: 1380 + + name: ${{ matrix.network }} - sequential + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: ${{fromJson(needs.nodes-execution-matrix.outputs.matrix)}} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v1.0.2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-node.j2 + output_file: .docker/docker-compose.node.${{ matrix.network }}.yml + variables: | + REPO_URL=${{ github.server_url }}/${{ github.repository }}.git + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + POLKADOT_MAINNET_BRANCH=${{ env.POLKADOT_MAINNET_BRANCH }} + MAINNET_TAG=${{ matrix.mainnet_tag }} + MAINNET_BRANCH=${{ matrix.mainnet_branch }} + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + BRANCH=${{ github.head_ref }} + + - name: Show build configuration + run: cat .docker/docker-compose.node.${{ matrix.network }}.yml + + - name: Generate launch-config-forkless-nodata.json + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/forkless-config/launch-config-node-update-only-v3.j2 + output_file: .docker/launch-config-forkless-nodata.json + variables: | + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + + - name: Show launch-config-forkless configuration + run: cat .docker/launch-config-forkless-nodata.json + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" up -d --build --remove-orphans --force-recreate --timeout 300 + + # 🚀 POLKADOT LAUNCH COMPLETE 🚀 + - name: Check if docker logs consist messages related to testing of Node Parachain Upgrade. + if: success() + run: | + counter=160 + function check_container_status { + docker inspect -f {{.State.Running}} node-parachain + } + function do_docker_logs { + docker logs --details node-parachain 2>&1 + } + function is_started { + if [ "$(check_container_status)" == "true" ]; then + echo "Container: node-parachain RUNNING"; + echo "Check Docker logs" + DOCKER_LOGS=$(do_docker_logs) + if [[ ${DOCKER_LOGS} = *"POLKADOT LAUNCH COMPLETE"* ]];then + echo "🚀 POLKADOT LAUNCH COMPLETE 🚀" + return 0 + else + echo "Message not found in logs output, repeating..." + return 1 + fi + else + echo "Container node-parachain NOT RUNNING" + echo "Halting all future checks" + exit 1 + fi + echo "something goes wrong" + exit 1 + } + while ! is_started; do + echo "Waiting for special message in log files " + sleep 30s + counter=$(( $counter - 1 )) + echo "Counter: $counter" + if [ "$counter" -gt "0" ]; then + continue + else + break + fi + done + echo "Halting script" + exit 0 + shell: bash + + - name: Run Sequential tests on Node Parachain + working-directory: tests + run: | + yarn install + yarn add mochawesome + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn testSequential --reporter mochawesome --reporter-options reportFilename=test-sequential-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: Sequential Tests report + uses: phoenix-actions/test-reporting@v8 + id: test-report-sequential + if: success() || failure() # run this step even if previous step failed + with: + name: Parallel Tests report - ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-sequential-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" down --volumes + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f -a + docker system prune -f + docker image prune -f -a + + - name: List files in Workspace + if: always() + run: ls -la ./ diff --git a/.github/workflows/market-test_v2.yml b/.github/workflows/market-test_v2.yml new file mode 100644 index 0000000000..5d73f4cb83 --- /dev/null +++ b/.github/workflows/market-test_v2.yml @@ -0,0 +1,187 @@ +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586509375/Market+e2e+test +# Nested workflow for lunching Market e2e tests from external repository https://github.com/UniqueNetwork/market-e2e-tests + +name: market api tests + +# Controls when the action will run. +on: + workflow_call: + + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + market_test: + # The type of runner that the job will run on + runs-on: [self-hosted-ci,large] + timeout-minutes: 360 + + name: ${{ matrix.network }} + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: + - network: "opal" + features: "opal-runtime" + + steps: + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Checkout master repo + uses: actions/checkout@master + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Checkout Market e2e tests + uses: actions/checkout@v3 + with: + repository: 'UniqueNetwork/market-e2e-tests' + ssh-key: ${{ secrets.GH_PAT }} + path: 'qa-tests' + ref: 'master' + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Copy qa-tests/.env.example to qa-tests/.env + working-directory: qa-tests + run: cp .env.docker .env + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: qa-tests/.docker/docker-compose.tmp-market.j2 + output_file: qa-tests/.docker/docker-compose.${{ matrix.network }}.yml + variables: | + REPO_URL=${{ github.server_url }}/${{ github.repository }}.git + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + FEATURE=${{ matrix.features }} + BRANCH=${{ github.head_ref }} + + + - name: Show build configuration + working-directory: qa-tests + run: cat .docker/docker-compose.${{ matrix.network }}.yml + + - name: Start node-parachain + working-directory: qa-tests + run: docker-compose -f ".docker/docker-compose.market.yml" -f ".docker/docker-compose.${{ matrix.network }}.yml" up -d --build --remove-orphans --force-recreate node-parachain + + - uses: actions/setup-node@v3 + with: + node-version: 16.17 + + - name: Setup TypeScript + working-directory: qa-tests + run: | + npm install -g ts-node + npm install + + - name: Copy qa-tests/.env.docker to qa-tests/.env + working-directory: qa-tests + run: | + rm -rf .env + cp .env.docker .env + +# Temporary disable node readyness check. Have to dig into the script logic. +# - name: Wait for chain up and running +# working-directory: tests +# run: | +# yarn install +# node scripts/readyness.js +# echo "Ready to start tests" +# env: +# RPC_URL: http://127.0.0.1:9933/ + + - name: Wait for chain up and running + run: | + sleep 1200s + echo "Ready to start Market e2e tests" + + - name: Show content of .env file and Generate accounts + working-directory: qa-tests + run: | + cat .env + ts-node ./src/scripts/create-market-accounts.ts + + - name: Copy qa-tests/.env to qa-tests/.env.docker + working-directory: qa-tests + run: | + rm -rf .env.docker + cp .env .env.docker + + - name: Get chain logs + if: always() # run this step always + run: | + docker exec node-parachain cat /polkadot-launch/9944.log + docker exec node-parachain cat /polkadot-launch/9945.log + docker exec node-parachain cat /polkadot-launch/alice.log + docker exec node-parachain cat /polkadot-launch/eve.log + docker exec node-parachain cat /polkadot-launch/dave.log + docker exec node-parachain cat /polkadot-launch/charlie.log + + - name: Deploy contracts + run: | + cd qa-tests + ts-node ./src/scripts/deploy-contract.ts + + - name: Timeout for debug + if: failure() + run: sleep 300s + + - name: Import test data + working-directory: qa-tests + run: ts-node ./src/scripts/create-test-collections.ts + + - name: Show content of qa-test .env + working-directory: qa-tests + run: cat .env + + - name: Read qa -test .env file Before market start + uses: xom9ikk/dotenv@v2 + with: + path: qa-tests/ + + - name: local-market:start + run: docker-compose -f "qa-tests/.docker/docker-compose.market.yml" -f "qa-tests/.docker/docker-compose.${{ matrix.network }}.yml" up -d --build + + - name: Wait for market readyness + working-directory: qa-tests + run: src/scripts/wait-market-ready.sh + shell: bash + + - name: Install dependecies + working-directory: qa-tests + run: | + npm ci + npm install -D @playwright/test + npx playwright install-deps + npx playwright install + + - name: Show content of qa-test .env + working-directory: qa-tests + run: cat .env + + - name: Test API interface + working-directory: qa-tests + run: | + npx playwright test --workers=8 --quiet .*.api.test.ts --reporter=github --config playwright.config.ts + + - name: Timeout for debug + if: failure() + run: sleep 300s + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f "qa-tests/.docker/docker-compose.market.yml" -f "qa-tests/.docker/docker-compose.${{ matrix.network }}.yml" down --volumes + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f + docker system prune -f diff --git a/.github/workflows/node-only-update_v2.yml b/.github/workflows/node-only-update_v2.yml new file mode 100644 index 0000000000..129b4cd6e9 --- /dev/null +++ b/.github/workflows/node-only-update_v2.yml @@ -0,0 +1,389 @@ +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586837028/Nodes+only+update +# Node only update with restart polkadot-launch process. + +name: nodes-only update + +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: + + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + nodes-execution-matrix: + + name: execution matrix + + runs-on: self-hosted-ci + outputs: + matrix: ${{ steps.create_matrix.outputs.matrix }} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Create Execution matrix + uses: CertainLach/create-matrix-action@v3 + id: create_matrix + with: + matrix: | + network {opal}, runtime {opal}, features {opal-runtime}, mainnet_branch {${{ env.OPAL_MAINNET_TAG }}} + network {quartz}, runtime {quartz}, features {quartz-runtime}, mainnet_branch {${{ env.QUARTZ_MAINNET_TAG }}} + network {unique}, runtime {unique}, features {unique-runtime}, mainnet_branch {${{ env.UNIQUE_MAINNET_TAG }}} + + + + nodes-only-update: + needs: nodes-execution-matrix + # The type of runner that the job will run on + runs-on: [self-hosted-ci,large] + + timeout-minutes: 2880 # 48 hours for execution jobs. + + name: ${{ matrix.network }} + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: ${{fromJson(needs.nodes-execution-matrix.outputs.matrix)}} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-node.j2 + output_file: .docker/docker-compose.node.${{ matrix.network }}.yml + variables: | + REPO_URL=${{ github.server_url }}/${{ github.repository }}.git + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + POLKADOT_MAINNET_BRANCH=${{ env.POLKADOT_MAINNET_BRANCH }} + MAINNET_TAG=${{ matrix.mainnet_tag }} + MAINNET_BRANCH=${{ matrix.mainnet_branch }} + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + BRANCH=${{ github.head_ref }} + + - name: Show build configuration + run: cat .docker/docker-compose.node.${{ matrix.network }}.yml + + - name: Generate launch-config-forkless-nodata.json + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/forkless-config/launch-config-forkless-nodata.j2 + output_file: .docker/launch-config-forkless-nodata.json + variables: | + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + + - name: Show launch-config-forkless configuration + run: cat .docker/launch-config-forkless-nodata.json + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" up -d --build --remove-orphans --force-recreate --timeout 300 + + # 🚀 POLKADOT LAUNCH COMPLETE 🚀 + - name: Check if docker logs consist messages related to testing of Node Parachain Upgrade. + if: success() + run: | + counter=160 + function check_container_status { + docker inspect -f {{.State.Running}} node-parachain + } + function do_docker_logs { + docker logs --details node-parachain 2>&1 + } + function is_started { + if [ "$(check_container_status)" == "true" ]; then + echo "Container: node-parachain RUNNING"; + echo "Check Docker logs" + DOCKER_LOGS=$(do_docker_logs) + if [[ ${DOCKER_LOGS} = *"POLKADOT LAUNCH COMPLETE"* ]];then + echo "🚀 POLKADOT LAUNCH COMPLETE 🚀" + return 0 + else + echo "Message not found in logs output, repeating..." + return 1 + fi + else + echo "Container node-parachain NOT RUNNING" + echo "Halting all future checks" + exit 1 + fi + echo "something goes wrong" + exit 1 + } + while ! is_started; do + echo "Waiting for special message in log files " + sleep 30s + counter=$(( $counter - 1 )) + echo "Counter: $counter" + if [ "$counter" -gt "0" ]; then + continue + else + break + fi + done + echo "Halting script" + exit 0 + shell: bash + + - name: Checkout at '${{ matrix.mainnet_branch }}' branch + uses: actions/checkout@master + with: + ref: ${{ matrix.mainnet_branch }} #Checking out head commit + path: ${{ matrix.mainnet_branch }} + + - name: Run tests before Node Parachain upgrade + working-directory: ${{ matrix.mainnet_branch }}/tests + run: | + yarn install + yarn add mochawesome + node scripts/readyness.js + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn test --reporter mochawesome --reporter-options reportFilename=test-before-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: Upload Test Report Before Node upgrade + uses: phoenix-actions/test-reporting@v8 + id: test-report-before + if: success() || failure() # run this step even if previous step failed + with: + name: Tests before node upgrade ${{ matrix.network }} # Name of the check run which will be created + path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-before-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + token: ${{ secrets.GITHUB_TOKEN }} + + # TODO uncomment thease steps after the merge + #- name: Run Parallel tests before Node Parachain upgrade + # working-directory: ${{ matrix.mainnet_branch }}/tests + # run: | + # yarn install + # yarn add mochawesome + # echo "Ready to start tests" + # yarn polkadot-types + # NOW=$(date +%s) && yarn testParallel --reporter mochawesome --reporter-options reportFilename=test-parallel-${NOW} + # env: + # RPC_URL: http://127.0.0.1:9933/ + + #- name: Upload Parallel Test Report Before Node upgrade + # uses: phoenix-actions/test-reporting@v8 + # id: test-parallel-report-before + # if: success() || failure() # run this step even if previous step failed + # with: + # name: Tests before node upgrade ${{ matrix.network }} # Name of the check run which will be created + # path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-parallel-*.json # Path to test results + # reporter: mochawesome-json + # fail-on-error: 'false' + + # - name: Run Sequential tests before Node Parachain upgrade + # if: success() || failure() + # working-directory: ${{ matrix.mainnet_branch }}/tests + # run: NOW=$(date +%s) && yarn testSequential --reporter mochawesome --reporter-options reportFilename=test-sequential-${NOW} + # env: + # RPC_URL: http://127.0.0.1:9933/ + + # - name: Upload Sequential Test Report Before Node upgrade + # uses: phoenix-actions/test-reporting@v8 + # id: test-sequential-report-before + # if: success() || failure() # run this step even if previous step failed + # with: + # name: Tests before node upgrade ${{ matrix.network }} # Name of the check run which will be created + # path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-sequential-*.json # Path to test results + # reporter: mochawesome-json + # fail-on-error: 'false' + + - name: Send SIGUSR1 to polkadot-launch process + if: success() || failure() + run: | + #Get PID of polkadot-launch + ContainerID=$(docker ps -aqf "name=node-parachain") + PID=$(docker exec node-parachain pidof 'polkadot-launch') + sleep 30s + echo -e "\n" + echo -e "Restart polkadot-launch process: $PID\n" + docker exec node-parachain kill -SIGUSR1 ${PID} + echo "SIGUSR1 sent to Polkadot-launch PID: $PID" + sleep 60s + echo -e "Show logs of node-parachain container.\n" + docker logs ${ContainerID} + + - name: Tail chain logs in case of docker image crashed after Polkadot Launch restart + if: failure() # run this step only at failure + run: | + docker exec node-parachain tail -n 1000 /polkadot-launch/9944.log + docker exec node-parachain tail -n 1000 /polkadot-launch/9945.log + docker exec node-parachain tail -n 1000 /polkadot-launch/alice.log + + - name: copy chain log files from container to the host + if: success() || failure() # run this step even if previous step failed + run: | + mkdir -p /tmp/node-only-update + docker cp node-parachain:/polkadot-launch/9944.log /tmp/node-only-update/ + docker cp node-parachain:/polkadot-launch/9945.log /tmp/node-only-update/ + docker cp node-parachain:/polkadot-launch/alice.log /tmp/node-only-update/ + + - name: Upload chain log files + if: success() || failure() + uses: actions/upload-artifact@v3 + with: + name: node-only-update-chain-logs + path: /tmp/node-only-update/ + if-no-files-found: warn + + - name: Check if docker logs consist messages related to testing of Node Parachain Upgrade. + if: success() + run: | + counter=160 + function check_container_status { + docker inspect -f {{.State.Running}} node-parachain + } + function do_docker_logs { + docker logs --details node-parachain 2>&1 + } + function is_started { + if [ "$(check_container_status)" == "true" ]; then + echo "Container: node-parachain RUNNING"; + echo "Check Docker logs" + DOCKER_LOGS=$(do_docker_logs) + if [[ ${DOCKER_LOGS} = *"All parachain collators restarted with the new binaries."* ]];then + echo "🌗 All parachain collators restarted with the new binaries." + return 0 + else + echo "Message not found in logs output, repeating..." + return 1 + fi + else + echo "Container node-parachain NOT RUNNING" + echo "Halting all future checks" + exit 1 + fi + echo "something goes wrong" + exit 1 + } + while ! is_started; do + echo "Waiting for special message in log files " + sleep 30s + counter=$(( $counter - 1 )) + echo "Counter: $counter" + if [ "$counter" -gt "0" ]; then + continue + else + break + fi + done + echo "Halting script" + exit 0 + shell: bash + + ## TODO: Remove next two blocks before switch to Parrallel & Sequental tests. Uncoment commented blocks. + - name: Run tests after Node Parachain upgrade + if: success() || failure() # run this step even if previous step failed + working-directory: ${{ matrix.mainnet_branch }}/tests + run: | + yarn install + yarn add mochawesome + node scripts/readyness.js + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn test --reporter mochawesome --reporter-options reportFilename=test-after-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + # - name: Test Report After Node upgrade + # uses: phoenix-actions/test-reporting@v8 + # id: test-report-after + # if: success() || failure() # run this step even if previous step failed + # with: + # name: Tests after node upgrade ${{ matrix.network }} # Name of the check run which will be created + # path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-after-*.json # Path to test results + # reporter: mochawesome-json + # fail-on-error: 'false' + # token: ${{ secrets.GITHUB_TOKEN }} + + # TODO uncomment thease steps after the merge + #- name: Run Parallel tests after Node Parachain upgrade + # working-directory: ${{ matrix.mainnet_branch }}/tests + # run: | + # yarn install + # yarn add mochawesome + # node scripts/readyness.js + # echo "Ready to start tests" + # yarn polkadot-types + # NOW=$(date +%s) && yarn testParallel --reporter mochawesome --reporter-options reportFilename=test-parallel-${NOW} + # env: + # RPC_URL: http://127.0.0.1:9933/ + + #- name: Test Report Parallel After Node upgrade + # uses: phoenix-actions/test-reporting@v8 + # id: test-report-parallel-after + # if: success() || failure() # run this step even if previous step failed + # with: + # name: Tests after node upgrade ${{ matrix.network }} # Name of the check run which will be created + # path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-parallel-*.json # Path to test results + # reporter: mochawesome-json + # fail-on-error: 'false' + + #- name: Run Sequential tests after Node Parachain upgrade + # if: success() || failure() + # working-directory: ${{ matrix.mainnet_branch }}/tests + # run: NOW=$(date +%s) && yarn testSequential --reporter mochawesome --reporter-options reportFilename=test-sequential-${NOW} + # env: + # RPC_URL: http://127.0.0.1:9933/ + + #- name: Upload Sequential Test Report After Node upgrade + # uses: phoenix-actions/test-reporting@v8 + # id: test-sequential-report-after + # if: success() || failure() # run this step even if previous step failed + # with: + # name: Tests before node upgrade ${{ matrix.network }} # Name of the check run which will be created + # path: ${{ matrix.mainnet_branch }}/tests/mochawesome-report/test-sequential-*.json # Path to test results + # reporter: mochawesome-json + # fail-on-error: 'false' + + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose-forkless.yml" -f ".docker/docker-compose.node.${{ matrix.network }}.yml" down --volumes + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f -a + docker system prune -f + docker image prune -f -a + + - name: Remove repo at the end + if: always() # run this step always + run: | + ls -ls ./ diff --git a/.github/workflows/node_build_test.yml b/.github/workflows/node_build_test.yml deleted file mode 100644 index 8aeea95240..0000000000 --- a/.github/workflows/node_build_test.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Build & test - -# Controls when the action will run. -on: - # Triggers the workflow on push or pull request events but only for the master branch - push: - branches: [ develop ] - # pull_request: - # branches: [ develop ] - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - build: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - - # if: github.event_name == 'pull_request' && github.event.action == 'closed' && github.event.pull_request.merged == true - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - #runs ssh connection - - name: Go to server - uses: appleboy/ssh-action@master - with: - host: ${{ secrets.SERVER_IP }} - username: ${{ secrets.SERVER_USERNAME }} - key: ${{ secrets.KEY }} - port: ${{ secrets.SERVER_PORT }} - command_timeout: 300m - script: | - eval $(ssh-agent -s) - ssh-add /home/devops/.ssh/git_hub - git clone git@github.com:UniqueNetwork/unique-chain.git - cd unique-chain - git checkout develop - # git pull --all - chmod +x ci_node.sh - ./ci_node.sh - rm -rf /home/polkadot/unique-chain diff --git a/.github/workflows/notify.yml b/.github/workflows/notify.yml deleted file mode 100644 index d99788de9a..0000000000 --- a/.github/workflows/notify.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: telegram message -on: - push: - branches: [ develop ] -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: avkviring/telegram-github-action@v0.0.13 - env: - telegram_to: ${{ secrets.TELEGRAM_TO }} - telegram_token: ${{ secrets.TELEGRAM_TOKEN }} - event: ${{ toJson(github.event) }} diff --git a/.github/workflows/test_codestyle_v2.yml b/.github/workflows/test_codestyle_v2.yml new file mode 100644 index 0000000000..22bc9b7854 --- /dev/null +++ b/.github/workflows/test_codestyle_v2.yml @@ -0,0 +1,24 @@ +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586804253/Yarn+eslint +# Yarn Eslint over tests +# +name: yarn eslint + +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: + +jobs: + code_style: + runs-on: [ self-hosted-ci ] + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Install modules + run: cd tests && yarn + - name: Run ESLint + run: cd tests && yarn eslint --ext .ts,.js src/ diff --git a/.github/workflows/testnet-build.yml b/.github/workflows/testnet-build.yml index 9156685c17..54b73de187 100644 --- a/.github/workflows/testnet-build.yml +++ b/.github/workflows/testnet-build.yml @@ -55,6 +55,7 @@ jobs: network {opal}, runtime {opal}, features {opal-runtime} network {quartz}, runtime {quartz}, features {quartz-runtime} network {unique}, runtime {unique}, features {unique-runtime} + testnet-build: needs: prepare-execution-marix # The type of runner that the job will run on @@ -99,6 +100,7 @@ jobs: FEATURE=${{ matrix.features }} RUNTIME=${{ matrix.runtime }} BRANCH=${{ github.head_ref }} + - name: Show build configuration run: cat .docker/Dockerfile-testnet.${{ matrix.network }}.yml @@ -136,4 +138,4 @@ jobs: if: always() # run this step always run: | docker builder prune -f - docker system prune -f \ No newline at end of file + docker system prune -f diff --git a/.github/workflows/tests_codestyle.yml b/.github/workflows/tests_codestyle.yml deleted file mode 100644 index b9a5e84731..0000000000 --- a/.github/workflows/tests_codestyle.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: Tests code style - -on: [push] - -jobs: - build: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v2 - - name: Install modules - run: cd tests && yarn - - name: Run ESLint - run: cd tests && yarn eslint --ext .ts,.js src/ \ No newline at end of file diff --git a/.github/workflows/try-runtime_v2.yml b/.github/workflows/try-runtime_v2.yml index aed4b13fb0..26bef18c07 100644 --- a/.github/workflows/try-runtime_v2.yml +++ b/.github/workflows/try-runtime_v2.yml @@ -50,6 +50,7 @@ jobs: RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} FEATURE=${{ matrix.features }} REPLICA_FROM=${{ matrix.replica_from_address }} + - name: Show build configuration run: cat .docker/docker-compose.try-runtime.${{ matrix.network }}.yml @@ -75,4 +76,4 @@ jobs: run: | docker builder prune -f -a docker system prune -f - docker image prune -f -a \ No newline at end of file + docker image prune -f -a diff --git a/.github/workflows/unit-test_v2.yml b/.github/workflows/unit-test_v2.yml new file mode 100644 index 0000000000..ad5153af09 --- /dev/null +++ b/.github/workflows/unit-test_v2.yml @@ -0,0 +1,60 @@ +# https://cryptousetech.atlassian.net/wiki/spaces/CI/pages/2586738699/Unit+Tests +# Re-Usable Workflow for lanching Unit tests +name: unit tests + +# Controls when the action will run. +# Triger: only call from main workflow(re-usable workflows) +on: + workflow_call: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + unit_tests: + # The type of runner that the job will run on + runs-on: [self-hosted-ci,medium] + timeout-minutes: 1380 + + name: ${{ github.base_ref }} + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-unit.j2 + output_file: .docker/docker-compose.unit.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + + + - name: Show build configuration + run: cat .docker/docker-compose.unit.yml + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose-dev.yaml" -f ".docker/docker-compose.unit.yml" up --build --force-recreate --timeout 300 --remove-orphans --exit-code-from node-dev + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose-dev.yaml" -f ".docker/docker-compose.unit.yml" down + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f -a + docker system prune -f + docker image prune -f -a diff --git a/.github/workflows/xcm.yml b/.github/workflows/xcm.yml new file mode 100644 index 0000000000..154585d751 --- /dev/null +++ b/.github/workflows/xcm.yml @@ -0,0 +1,400 @@ +name: xcm-testnet-build + +# Controls when the action will run. +on: + workflow_call: + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +#Define Workflow variables +env: + REPO_URL: ${{ github.server_url }}/${{ github.repository }} + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + prepare-execution-marix: + + name: Prepare execution matrix + + runs-on: [XL] + outputs: + matrix: ${{ steps.create_matrix.outputs.matrix }} + + steps: + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Create Execution matrix + uses: fabiocaccamo/create-matrix-action@v2 + id: create_matrix + with: + matrix: | + network {opal}, runtime {opal}, features {opal-runtime}, acala_version {${{ env.ACALA_BUILD_BRANCH }}}, moonbeam_version {${{ env.MOONBEAM_BUILD_BRANCH }}}, cumulus_version {${{ env.WESTMINT_BUILD_BRANCH }}}, runtest {testXcmOpal} + network {quartz}, runtime {quartz}, features {quartz-runtime}, acala_version {${{ env.KARURA_BUILD_BRANCH }}}, moonbeam_version {${{ env.MOONRIVER_BUILD_BRANCH }}}, cumulus_version {${{ env.STATEMINE_BUILD_BRANCH }}}, runtest {testXcmQuartz} + network {unique}, runtime {unique}, features {unique-runtime}, acala_version {${{ env.ACALA_BUILD_BRANCH }}}, moonbeam_version {${{ env.MOONBEAM_BUILD_BRANCH }}}, cumulus_version {${{ env.STATEMINT_BUILD_BRANCH }}}, runtest {testXcmUnique} + + xcm-build: + + needs: prepare-execution-marix + # The type of runner that the job will run on + runs-on: [XL] + + timeout-minutes: 600 + + name: ${{ matrix.network }}-build + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: ${{fromJson(needs.prepare-execution-marix.outputs.matrix)}} + + steps: + - name: Skip if pull request is in Draft + if: github.event.pull_request.draft == true + run: exit 1 + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Log in to Docker Hub + uses: docker/login-action@v2.0.0 + with: + username: ${{ secrets.CORE_DOCKERHUB_USERNAME }} + password: ${{ secrets.CORE_DOCKERHUB_TOKEN }} + + - name: Install jq + run: sudo apt install jq -y + + # Check POLKADOT version and build it if it doesn't exist in repository + - name: Generate ENV related extend Dockerfile file for POLKADOT + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/Dockerfile-polkadot.j2 + output_file: .docker/Dockerfile-polkadot.${{ env.POLKADOT_BUILD_BRANCH }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + + - name: Check if the dockerhub repository contains the needed version POLKADOT + run: | + # aquire token + TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.CORE_DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.CORE_DOCKERHUB_TOKEN }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) + export TOKEN=$TOKEN + + # Get TAGS from DOCKERHUB POLKADOT repository + POLKADOT_TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" https://hub.docker.com/v2/repositories/uniquenetwork/builder-polkadot/tags/?page_size=100 | jq -r '."results"[]["name"]') + # Show TAGS + echo "POLKADOT TAGS:" + echo $POLKADOT_TAGS + # Check correct version POLKADOT and build it if it doesn't exist in POLKADOT TAGS + if [[ ${POLKADOT_TAGS[*]} =~ (^|[[:space:]])"${{ env.POLKADOT_BUILD_BRANCH }}"($|[[:space:]]) ]]; then + echo "Repository has needed POLKADOT version"; + docker pull uniquenetwork/builder-polkadot:${{ env.POLKADOT_BUILD_BRANCH }} + else + echo "Repository has not needed POLKADOT version, so build it"; + cd .docker/ && docker build --no-cache --file ./Dockerfile-polkadot.${{ env.POLKADOT_BUILD_BRANCH }}.yml --tag uniquenetwork/builder-polkadot:${{ env.POLKADOT_BUILD_BRANCH }} . + echo "Push needed POLKADOT version to the repository"; + docker push uniquenetwork/builder-polkadot:${{ env.POLKADOT_BUILD_BRANCH }} + fi + shell: bash + + # Check ACALA version and build it if it doesn't exist in repository + - name: Generate ENV related extend Dockerfile file for ACALA + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/Dockerfile-acala.j2 + output_file: .docker/Dockerfile-acala.${{ matrix.acala_version }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + ACALA_BUILD_BRANCH=${{ matrix.acala_version }} + + - name: Check if the dockerhub repository contains the needed ACALA version + run: | + # aquire token + TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.CORE_DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.CORE_DOCKERHUB_TOKEN }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) + export TOKEN=$TOKEN + + # Get TAGS from DOCKERHUB repository + ACALA_TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" https://hub.docker.com/v2/repositories/uniquenetwork/builder-acala/tags/?page_size=100 | jq -r '."results"[]["name"]') + # Show TAGS + echo "ACALA TAGS:" + echo $ACALA_TAGS + # Check correct version ACALA and build it if it doesn't exist in ACALA TAGS + if [[ ${ACALA_TAGS[*]} =~ (^|[[:space:]])"${{ matrix.acala_version }}"($|[[:space:]]) ]]; then + echo "Repository has needed ACALA version"; + docker pull uniquenetwork/builder-acala:${{ matrix.acala_version }} + else + echo "Repository has not needed ACALA version, so build it"; + cd .docker/ && docker build --no-cache --file ./Dockerfile-acala.${{ matrix.acala_version }}.yml --tag uniquenetwork/builder-acala:${{ matrix.acala_version }} . + echo "Push needed ACALA version to the repository"; + docker push uniquenetwork/builder-acala:${{ matrix.acala_version }} + fi + shell: bash + + # Check MOONBEAM version and build it if it doesn't exist in repository + - name: Generate ENV related extend Dockerfile file for MOONBEAM + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/Dockerfile-moonbeam.j2 + output_file: .docker/Dockerfile-moonbeam.${{ matrix.moonbeam_version }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + MOONBEAM_BUILD_BRANCH=${{ matrix.moonbeam_version }} + + - name: Check if the dockerhub repository contains the needed MOONBEAM version + run: | + # aquire token + TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.CORE_DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.CORE_DOCKERHUB_TOKEN }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) + export TOKEN=$TOKEN + + # Get TAGS from DOCKERHUB repository + MOONBEAM_TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" https://hub.docker.com/v2/repositories/uniquenetwork/builder-moonbeam/tags/?page_size=100 | jq -r '."results"[]["name"]') + # Show TAGS + echo "MOONBEAM TAGS:" + echo $MOONBEAM_TAGS + # Check correct version MOONBEAM and build it if it doesn't exist in MOONBEAM TAGS + if [[ ${MOONBEAM_TAGS[*]} =~ (^|[[:space:]])"${{ matrix.moonbeam_version }}"($|[[:space:]]) ]]; then + echo "Repository has needed MOONBEAM version"; + docker pull uniquenetwork/builder-moonbeam:${{ matrix.moonbeam_version }} + else + echo "Repository has not needed MOONBEAM version, so build it"; + cd .docker/ && docker build --no-cache --file ./Dockerfile-moonbeam.${{ matrix.moonbeam_version }}.yml --tag uniquenetwork/builder-moonbeam:${{ matrix.moonbeam_version }} . + echo "Push needed MOONBEAM version to the repository"; + docker push uniquenetwork/builder-moonbeam:${{ matrix.moonbeam_version }} + fi + shell: bash + + + # Check CUMULUS version and build it if it doesn't exist in repository + - name: Generate ENV related extend Dockerfile file for CUMULUS + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/Dockerfile-cumulus.j2 + output_file: .docker/Dockerfile-cumulus.${{ matrix.cumulus_version }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + CUMULUS_BUILD_BRANCH=${{ matrix.cumulus_version }} + + - name: Check if the dockerhub repository contains the needed CUMULUS version + run: | + # aquire token + TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.CORE_DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.CORE_DOCKERHUB_TOKEN }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) + export TOKEN=$TOKEN + + # Get TAGS from DOCKERHUB repository + CUMULUS_TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" https://hub.docker.com/v2/repositories/uniquenetwork/builder-cumulus/tags/?page_size=100 | jq -r '."results"[]["name"]') + # Show TAGS + echo "CUMULUS TAGS:" + echo $CUMULUS_TAGS + # Check correct version CUMULUS and build it if it doesn't exist in CUMULUS TAGS + if [[ ${CUMULUS_TAGS[*]} =~ (^|[[:space:]])"${{ matrix.cumulus_version }}"($|[[:space:]]) ]]; then + echo "Repository has needed CUMULUS version"; + docker pull uniquenetwork/builder-cumulus:${{ matrix.cumulus_version }} + else + echo "Repository has not needed CUMULUS version, so build it"; + cd .docker/ && docker build --no-cache --file ./Dockerfile-cumulus.${{ matrix.cumulus_version }}.yml --tag uniquenetwork/builder-cumulus:${{ matrix.cumulus_version }} . + echo "Push needed CUMULUS version to the repository"; + docker push uniquenetwork/builder-cumulus:${{ matrix.cumulus_version }} + fi + shell: bash + + + # Build main image for XCM + - name: Generate ENV related extend Dockerfile file + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/Dockerfile-xcm.j2 + output_file: .docker/Dockerfile-xcm.${{ matrix.network }}.yml + variables: | + RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }} + NETWORK=${{ matrix.network }} + POLKADOT_BUILD_BRANCH=${{ env.POLKADOT_BUILD_BRANCH }} + POLKADOT_LAUNCH_BRANCH=${{ env.POLKADOT_LAUNCH_BRANCH }} + FEATURE=${{ matrix.features }} + RUNTIME=${{ matrix.runtime }} + BRANCH=${{ github.head_ref }} + ACALA_BUILD_BRANCH=${{ matrix.acala_version }} + MOONBEAM_BUILD_BRANCH=${{ matrix.moonbeam_version }} + CUMULUS_BUILD_BRANCH=${{ matrix.cumulus_version }} + + - name: Show build Dockerfile + run: cat .docker/Dockerfile-xcm.${{ matrix.network }}.yml + + - name: Show launch-config-xcm-${{ matrix.network }} configuration + run: cat .docker/xcm-config/launch-config-xcm-${{ matrix.network }}.json + + - name: Run find-and-replace to remove slashes from branch name + uses: mad9000/actions-find-and-replace-string@2 + id: branchname + with: + source: ${{ github.head_ref }} + find: '/' + replace: '-' + + - name: Pull chainql docker image + run: docker pull uniquenetwork/builder-chainql:latest + + - name: Build the stack + run: cd .docker/ && docker build --no-cache --file ./Dockerfile-xcm.${{ matrix.network }}.yml --tag uniquenetwork/xcm-${{ matrix.network }}-testnet-local:nightly-${{ steps.branchname.outputs.value }}-${{ github.sha }} --tag uniquenetwork/xcm-${{ matrix.network }}-testnet-local:latest . + + - name: Push docker image version + run: docker push uniquenetwork/xcm-${{ matrix.network }}-testnet-local:nightly-${{ steps.branchname.outputs.value }}-${{ github.sha }} + + - name: Push docker image latest + run: docker push uniquenetwork/xcm-${{ matrix.network }}-testnet-local:latest + + - name: Remove builder cache + if: always() # run this step always + run: | + docker builder prune -f + docker system prune -f + + xcm-tests: + needs: [prepare-execution-marix, xcm-build] + # The type of runner that the job will run on + runs-on: [XL] + + timeout-minutes: 600 + + name: ${{ matrix.network }}-tests + + continue-on-error: true #Do not stop testing of matrix runs failed. As it decided during PR review - it required 50/50& Let's check it with false. + + strategy: + matrix: + include: ${{fromJson(needs.prepare-execution-marix.outputs.matrix)}} + + steps: + - name: Skip if pull request is in Draft + if: github.event.pull_request.draft == true + run: exit 1 + + - name: Clean Workspace + uses: AutoModality/action-clean@v1.1.0 + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} #Checking out head commit + + - name: Read .env file + uses: xom9ikk/dotenv@v2 + + - name: Generate ENV related extend file for docker-compose + uses: cuchi/jinja2-action@v1.2.0 + with: + template: .docker/docker-compose.tmp-xcm-tests.j2 + output_file: .docker/docker-compose.xcm-tests.${{ matrix.network }}.yml + variables: | + NETWORK=${{ matrix.network }} + + - name: Show build configuration + run: cat .docker/docker-compose.xcm-tests.${{ matrix.network }}.yml + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Build the stack + run: docker-compose -f ".docker/docker-compose.xcm-tests.${{ matrix.network }}.yml" up -d --remove-orphans --force-recreate --timeout 300 + + # 🚀 POLKADOT LAUNCH COMPLETE 🚀 + - name: Check if docker logs consist messages related to testing of xcm tests + if: success() + run: | + counter=160 + function check_container_status { + docker inspect -f {{.State.Running}} xcm-${{ matrix.network }}-testnet-local + } + function do_docker_logs { + docker logs --details xcm-${{ matrix.network }}-testnet-local 2>&1 + } + function is_started { + if [ "$(check_container_status)" == "true" ]; then + echo "Container: xcm-${{ matrix.network }}-testnet-local RUNNING"; + echo "Check Docker logs" + DOCKER_LOGS=$(do_docker_logs) + if [[ ${DOCKER_LOGS} = *"POLKADOT LAUNCH COMPLETE"* ]];then + echo "🚀 POLKADOT LAUNCH COMPLETE 🚀" + return 0 + else + echo "Message not found in logs output, repeating..." + return 1 + fi + else + echo "Container xcm-${{ matrix.network }}-testnet-local NOT RUNNING" + echo "Halting all future checks" + exit 1 + fi + echo "something goes wrong" + exit 1 + } + while ! is_started; do + echo "Waiting for special message in log files " + sleep 30s + counter=$(( $counter - 1 )) + echo "Counter: $counter" + if [ "$counter" -gt "0" ]; then + continue + else + break + fi + done + echo "Halting script" + exit 0 + shell: bash + + - name: Run XCM tests + working-directory: tests + run: | + yarn install + yarn add mochawesome + node scripts/readyness.js + echo "Ready to start tests" + yarn polkadot-types + NOW=$(date +%s) && yarn ${{ matrix.runtest }} --reporter mochawesome --reporter-options reportFilename=test-${NOW} + env: + RPC_URL: http://127.0.0.1:9933/ + + - name: XCM Test Report + uses: phoenix-actions/test-reporting@v8 + id: test-report + if: success() || failure() # run this step even if previous step failed + with: + name: XCM Tests ${{ matrix.network }} # Name of the check run which will be created + path: tests/mochawesome-report/test-*.json # Path to test results + reporter: mochawesome-json + fail-on-error: 'false' + + - name: Stop running containers + if: always() # run this step always + run: docker-compose -f ".docker/docker-compose.xcm-tests.${{ matrix.network }}.yml" down + + - name: Clean Workspace + if: always() + uses: AutoModality/action-clean@v1.1.0 + + - name: Remove builder cache + if: always() # run this step always + run: | + docker system prune -a -f diff --git a/.maintain/frame-weight-template.hbs b/.maintain/frame-weight-template.hbs index 09eccff3a9..e6ce89a0b1 100644 --- a/.maintain/frame-weight-template.hbs +++ b/.maintain/frame-weight-template.hbs @@ -14,6 +14,7 @@ #![cfg_attr(rustfmt, rustfmt_skip)] #![allow(unused_parens)] #![allow(unused_imports)] +#![allow(missing_docs)] #![allow(clippy::unnecessary_cast)] use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; @@ -46,22 +47,22 @@ impl WeightInfo for SubstrateWeight { {{~#each benchmark.components as |c| ~}} {{~#if (not c.is_used)}}_{{/if}}{{c.name}}: u32, {{/each~}} ) -> Weight { - ({{underscore benchmark.base_weight}} as Weight) + Weight::from_ref_time({{underscore benchmark.base_weight}} as u64) {{#each benchmark.component_weight as |cw|}} // Standard Error: {{underscore cw.error}} - .saturating_add(({{underscore cw.slope}} as Weight).saturating_mul({{cw.name}} as Weight)) + .saturating_add(Weight::from_ref_time({{underscore cw.slope}} as u64).saturating_mul({{cw.name}} as u64)) {{/each}} {{#if (ne benchmark.base_reads "0")}} - .saturating_add(T::DbWeight::get().reads({{benchmark.base_reads}} as Weight)) + .saturating_add(T::DbWeight::get().reads({{benchmark.base_reads}} as u64)) {{/if}} {{#each benchmark.component_reads as |cr|}} - .saturating_add(T::DbWeight::get().reads(({{cr.slope}} as Weight).saturating_mul({{cr.name}} as Weight))) + .saturating_add(T::DbWeight::get().reads(({{cr.slope}} as u64).saturating_mul({{cr.name}} as u64))) {{/each}} {{#if (ne benchmark.base_writes "0")}} - .saturating_add(T::DbWeight::get().writes({{benchmark.base_writes}} as Weight)) + .saturating_add(T::DbWeight::get().writes({{benchmark.base_writes}} as u64)) {{/if}} {{#each benchmark.component_writes as |cw|}} - .saturating_add(T::DbWeight::get().writes(({{cw.slope}} as Weight).saturating_mul({{cw.name}} as Weight))) + .saturating_add(T::DbWeight::get().writes(({{cw.slope}} as u64).saturating_mul({{cw.name}} as u64))) {{/each}} } {{/each}} @@ -78,22 +79,22 @@ impl WeightInfo for () { {{~#each benchmark.components as |c| ~}} {{~#if (not c.is_used)}}_{{/if}}{{c.name}}: u32, {{/each~}} ) -> Weight { - ({{underscore benchmark.base_weight}} as Weight) + Weight::from_ref_time({{underscore benchmark.base_weight}} as u64) {{#each benchmark.component_weight as |cw|}} // Standard Error: {{underscore cw.error}} - .saturating_add(({{underscore cw.slope}} as Weight).saturating_mul({{cw.name}} as Weight)) + .saturating_add(Weight::from_ref_time({{underscore cw.slope}} as u64).saturating_mul({{cw.name}} as u64)) {{/each}} {{#if (ne benchmark.base_reads "0")}} - .saturating_add(RocksDbWeight::get().reads({{benchmark.base_reads}} as Weight)) + .saturating_add(RocksDbWeight::get().reads({{benchmark.base_reads}} as u64)) {{/if}} {{#each benchmark.component_reads as |cr|}} - .saturating_add(RocksDbWeight::get().reads(({{cr.slope}} as Weight).saturating_mul({{cr.name}} as Weight))) + .saturating_add(RocksDbWeight::get().reads(({{cr.slope}} as u64).saturating_mul({{cr.name}} as u64))) {{/each}} {{#if (ne benchmark.base_writes "0")}} - .saturating_add(RocksDbWeight::get().writes({{benchmark.base_writes}} as Weight)) + .saturating_add(RocksDbWeight::get().writes({{benchmark.base_writes}} as u64)) {{/if}} {{#each benchmark.component_writes as |cw|}} - .saturating_add(RocksDbWeight::get().writes(({{cw.slope}} as Weight).saturating_mul({{cw.name}} as Weight))) + .saturating_add(RocksDbWeight::get().writes(({{cw.slope}} as u64).saturating_mul({{cw.name}} as u64))) {{/each}} } {{/each}} diff --git a/.maintain/scripts/generate_sol.sh b/.maintain/scripts/generate_sol.sh index 5ce3460805..e9bb7b5675 100755 --- a/.maintain/scripts/generate_sol.sh +++ b/.maintain/scripts/generate_sol.sh @@ -1,11 +1,16 @@ #!/bin/sh set -eu +PRETTIER_CONFIG="$(pwd)""/.prettierrc" + tmp=$(mktemp) cargo test --package $PACKAGE -- $NAME --exact --nocapture --ignored | tee $tmp raw=$(mktemp --suffix .sol) sed -n '/=== SNIP START ===/, /=== SNIP END ===/{ /=== SNIP START ===/! { /=== SNIP END ===/! p } }' $tmp > $raw + formatted=$(mktemp) -prettier --use-tabs $raw > $formatted +prettier --config $PRETTIER_CONFIG $raw > $formatted + +sed -i -E -e "s/.+\/\/ FORMATTING: FORCE NEWLINE//g" $formatted mv $formatted $OUTPUT diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000000..dc0b79cafb --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +!**/*.sol diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000000..e97f200459 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,16 @@ +{ + "useTabs": true, + "tabWidth": 2, + "singleQuote": true, + "trailingComma": "all", + "overrides": [ + { + "files": "*.sol", + "options": { + "singleQuote": false, + "printWidth": 120, + "explicitTypes": "always" + } + } + ] +} \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 66731bad2a..1c82c79cdf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,7 +33,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -43,7 +43,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ "cfg-if 1.0.0", - "cipher", + "cipher 0.3.0", "cpufeatures", "opaque-debug 0.3.0", ] @@ -56,7 +56,7 @@ checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" dependencies = [ "aead", "aes", - "cipher", + "cipher 0.3.0", "ctr", "ghash", "subtle", @@ -68,16 +68,16 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.7", + "getrandom 0.2.8", "once_cell", "version_check", ] [[package]] name = "aho-corasick" -version = "0.7.18" +version = "0.7.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] @@ -88,6 +88,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf688625d06217d5b1bb0ea9d9c44a1635fd0ee3534466388d18203174f4d11" +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "ansi_term" version = "0.12.1" @@ -99,9 +108,23 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.58" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704" +checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" + +[[package]] +name = "app-promotion-rpc" +version = "0.1.0" +dependencies = [ + "pallet-common", + "pallet-evm", + "parity-scale-codec 3.2.1", + "sp-api", + "sp-core", + "sp-runtime", + "sp-std", + "up-data-structs", +] [[package]] name = "approx" @@ -112,6 +135,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "array-bytes" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a913633b0c922e6b745072795f50d90ebea78ba31a57e2ac8c2fc7b50950949" + [[package]] name = "arrayref" version = "0.3.6" @@ -163,9 +192,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "1.6.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" +checksum = "e14485364214912d3b19cc3435dde4df66065127f05fa0d75c712f36f12c2f28" dependencies = [ "concurrent-queue", "event-listener", @@ -188,9 +217,9 @@ dependencies = [ [[package]] name = "async-global-executor" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5262ed948da60dd8956c6c5aca4d4163593dddb7b32d73267c93dab7b2e98940" +checksum = "0da5b41ee986eed3f524c380e6d64965aea573882a8907682ad100f7859305ca" dependencies = [ "async-channel", "async-executor", @@ -198,16 +227,16 @@ dependencies = [ "async-lock", "blocking", "futures-lite", - "num_cpus", "once_cell", ] [[package]] name = "async-io" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5e18f61464ae81cde0a23e713ae8fd299580c54d697a35820cfd0625b8b0e07" +checksum = "83e21f3a490c72b3b0cf44962180e60045de2925d8dff97918f7ee43c8f637c7" dependencies = [ + "autocfg", "concurrent-queue", "futures-lite", "libc", @@ -232,11 +261,12 @@ dependencies = [ [[package]] name = "async-process" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf2c06e30a24e8c78a3987d07f0930edf76ef35e027e7bdb063fccafdad1f60c" +checksum = "02111fd8655a613c25069ea89fc8d9bb89331fa77486eb3bc059ee757cfa481c" dependencies = [ "async-io", + "autocfg", "blocking", "cfg-if 1.0.0", "event-listener", @@ -298,9 +328,9 @@ checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.56" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" +checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" dependencies = [ "proc-macro2", "quote", @@ -313,7 +343,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0de5164e5edbf51c45fb8c2d9664ae1c095cce1b265ecf7569093c0d66ef690" dependencies = [ - "bytes 1.2.0", + "bytes", "futures-sink", "futures-util", "memchr", @@ -362,11 +392,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" dependencies = [ "futures-core", - "getrandom 0.2.7", + "getrandom 0.2.8", "instant", "pin-project-lite 0.2.9", "rand 0.8.5", - "tokio 1.20.1", + "tokio", ] [[package]] @@ -380,7 +410,7 @@ dependencies = [ "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.29.0", + "object", "rustc-demangle", ] @@ -404,9 +434,15 @@ checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581" [[package]] name = "base64" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64ct" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" [[package]] name = "beef" @@ -420,21 +456,24 @@ dependencies = [ [[package]] name = "beefy-gadget" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", + "async-trait", "beefy-primitives", "fnv", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "hex", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-chain-spec", "sc-client-api", + "sc-consensus", "sc-finality-grandpa", "sc-keystore", "sc-network", + "sc-network-common", "sc-network-gossip", "sc-utils", "sp-api", @@ -454,14 +493,14 @@ dependencies = [ [[package]] name = "beefy-gadget-rpc" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "beefy-gadget", "beefy-primitives", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-rpc", "sc-utils", @@ -474,14 +513,18 @@ dependencies = [ [[package]] name = "beefy-merkle-tree" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "beefy-primitives", + "sp-api", +] [[package]] name = "beefy-primitives" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-api", "sp-application-crypto", @@ -560,7 +603,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -606,7 +649,7 @@ dependencies = [ "cc", "cfg-if 1.0.0", "constant_time_eq", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -615,7 +658,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ - "block-padding 0.1.5", + "block-padding", "byte-tools", "byteorder", "generic-array 0.12.4", @@ -627,17 +670,16 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding 0.2.1", - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -649,12 +691,6 @@ dependencies = [ "byte-tools", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "blocking" version = "1.2.0" @@ -670,166 +706,32 @@ dependencies = [ ] [[package]] -name = "bounded-vec" -version = "0.6.0" +name = "bondrewd" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3372be4090bf9d4da36bd8ba7ce6ca1669503d0cf6e667236c6df7f053153eb6" -dependencies = [ - "thiserror", -] - -[[package]] -name = "bp-header-chain" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-runtime", - "finality-grandpa", - "frame-support", - "parity-scale-codec 3.1.5", - "scale-info", - "serde", - "sp-core", - "sp-finality-grandpa", - "sp-runtime", - "sp-std", -] - -[[package]] -name = "bp-message-dispatch" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-runtime", - "frame-support", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-std", -] - -[[package]] -name = "bp-messages" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bitvec 1.0.1", - "bp-runtime", - "frame-support", - "frame-system", - "impl-trait-for-tuples", - "parity-scale-codec 3.1.5", - "scale-info", - "serde", - "sp-core", - "sp-std", -] - -[[package]] -name = "bp-polkadot-core" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-messages", - "bp-runtime", - "frame-support", - "frame-system", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-api", - "sp-core", - "sp-runtime", - "sp-std", - "sp-version", -] - -[[package]] -name = "bp-rococo" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-messages", - "bp-polkadot-core", - "bp-runtime", - "frame-support", - "parity-scale-codec 3.1.5", - "smallvec", - "sp-api", - "sp-runtime", - "sp-std", - "sp-version", -] - -[[package]] -name = "bp-runtime" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "frame-support", - "hash-db", - "num-traits", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-state-machine", - "sp-std", - "sp-trie", -] - -[[package]] -name = "bp-test-utils" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +checksum = "6d1660fac8d3acced44dac64453fafedf5aab2de196b932c727e63e4ae42d1cc" dependencies = [ - "bp-header-chain", - "ed25519-dalek", - "finality-grandpa", - "parity-scale-codec 3.1.5", - "sp-application-crypto", - "sp-finality-grandpa", - "sp-runtime", - "sp-std", + "bondrewd-derive", ] [[package]] -name = "bp-wococo" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +name = "bondrewd-derive" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723da0dee1eef38edc021b0793f892bdc024500c6a5b0727a2efe16f0e0a6977" dependencies = [ - "bp-messages", - "bp-polkadot-core", - "bp-rococo", - "bp-runtime", - "parity-scale-codec 3.1.5", - "sp-api", - "sp-runtime", - "sp-std", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "bridge-runtime-common" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +name = "bounded-vec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3372be4090bf9d4da36bd8ba7ce6ca1669503d0cf6e667236c6df7f053153eb6" dependencies = [ - "bp-message-dispatch", - "bp-messages", - "bp-runtime", - "frame-support", - "frame-system", - "hash-db", - "pallet-bridge-dispatch", - "pallet-bridge-grandpa", - "pallet-bridge-messages", - "pallet-transaction-payment", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-api", - "sp-core", - "sp-runtime", - "sp-state-machine", - "sp-std", - "sp-trie", + "thiserror", ] [[package]] @@ -858,9 +760,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.10.0" +version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byte-slice-cast" @@ -882,15 +784,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" - -[[package]] -name = "bytes" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "bzip2-sys" @@ -911,9 +807,9 @@ checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" [[package]] name = "camino" -version = "1.0.9" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412" +checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e" dependencies = [ "serde", ] @@ -935,7 +831,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" dependencies = [ "camino", "cargo-platform", - "semver 1.0.12", + "semver 1.0.14", "serde", "serde_json", ] @@ -958,6 +854,15 @@ dependencies = [ "nom", ] +[[package]] +name = "cfg-expr" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0aacacf4d96c24b2ad6eb8ee6df040e4f27b0d0b39a5710c30091baa830485db" +dependencies = [ + "smallvec", +] + [[package]] name = "cfg-if" version = "0.1.10" @@ -983,7 +888,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" dependencies = [ "cfg-if 1.0.0", - "cipher", + "cipher 0.3.0", "cpufeatures", "zeroize", ] @@ -996,29 +901,31 @@ checksum = "a18446b09be63d457bbec447509e85f662f32952b035ce892290396bc0b0cff5" dependencies = [ "aead", "chacha20", - "cipher", + "cipher 0.3.0", "poly1305", "zeroize", ] [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" dependencies = [ - "libc", + "iana-time-zone", + "js-sys", "num-integer", "num-traits", - "time", + "time 0.1.44", + "wasm-bindgen", "winapi", ] [[package]] name = "cid" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc949bff6704880faf064c42a4854032ab07bfcf3a4fcb82a57470acededb69c" +checksum = "f6ed9c8b2d17acb8110c46f1da5bf4a696d745e1474a16db0cd2b49cd0249bf2" dependencies = [ "core2", "multibase", @@ -1033,7 +940,17 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", +] + +[[package]] +name = "cipher" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e" +dependencies = [ + "crypto-common", + "inout", ] [[package]] @@ -1047,9 +964,9 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.3.3" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a050e2153c5be08febd6734e29298e844fdb0fa21aeddd63b4eb7baa106c69b" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" dependencies = [ "glob", "libc", @@ -1058,9 +975,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.15" +version = "3.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bbe24bbd31a185bc2c4f7c2abe80bea13a20d57ee4e55be70ac512bdc76417" +checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750" dependencies = [ "atty", "bitflags", @@ -1075,11 +992,11 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.2.15" +version = "3.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba52acd3b0a5c33aeada5cdaa3267cdc7c594a98731d4268cdc1532f4264cb4" +checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro-error", "proc-macro2", "quote", @@ -1116,22 +1033,32 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "comfy-table" -version = "5.0.1" +version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b103d85ca6e209388771bfb7aa6b68a7aeec4afbf6f0a0264bfbf50360e5212e" +checksum = "7b3d16bb3da60be2f7c7acfc438f2ae6f3496897ce68c291d0509bb67b4e248e" dependencies = [ - "strum 0.23.0", - "strum_macros 0.23.1", + "strum", + "strum_macros", "unicode-width", ] [[package]] name = "concurrent-queue" -version = "1.2.2" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" +checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c" dependencies = [ "cache-padded", ] @@ -1190,68 +1117,71 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] [[package]] name = "cranelift-bforest" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38faa2a16616c8e78a18d37b4726b98bfd2de192f2fdc8a39ddf568a408a0f75" +checksum = "44409ccf2d0f663920cab563d2b79fcd6b2e9a2bcc6e929fef76c8f82ad6c17a" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26f192472a3ba23860afd07d2b0217dc628f21fcc72617aa1336d98e1671f33b" +checksum = "98de2018ad96eb97f621f7d6b900a0cc661aec8d02ea4a50e56ecb48e5a2fcaf" dependencies = [ + "arrayvec 0.7.2", + "bumpalo", "cranelift-bforest", "cranelift-codegen-meta", "cranelift-codegen-shared", "cranelift-entity", + "cranelift-isle", "gimli", "log", - "regalloc", + "regalloc2", "smallvec", "target-lexicon", ] [[package]] name = "cranelift-codegen-meta" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32ddb89e9b89d3d9b36a5b7d7ea3261c98235a76ac95ba46826b8ec40b1a24" +checksum = "5287ce36e6c4758fbaf298bd1a8697ad97a4f2375a3d1b61142ea538db4877e5" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01fd0d9f288cc1b42d9333b7a776b17e278fc888c28e6a0f09b5573d45a150bc" +checksum = "2855c24219e2f08827f3f4ffb2da92e134ae8d8ecc185b11ec8f9878cf5f588e" [[package]] name = "cranelift-entity" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3bfe172b83167604601faf9dc60453e0d0a93415b57a9c4d1a7ae6849185cf" +checksum = "0b65673279d75d34bf11af9660ae2dbd1c22e6d28f163f5c72f4e1dc56d56103" dependencies = [ "serde", ] [[package]] name = "cranelift-frontend" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a006e3e32d80ce0e4ba7f1f9ddf66066d052a8c884a110b91d05404d6ce26dce" +checksum = "3ed2b3d7a4751163f6c4a349205ab1b7d9c00eecf19dcea48592ef1f7688eefc" dependencies = [ "cranelift-codegen", "log", @@ -1259,11 +1189,17 @@ dependencies = [ "target-lexicon", ] +[[package]] +name = "cranelift-isle" +version = "0.88.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be64cecea9d90105fc6a2ba2d003e98c867c1d6c4c86cc878f97ad9fb916293" + [[package]] name = "cranelift-native" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501241b0cdf903412ec9075385ac9f2b1eb18a89044d1538e97fab603231f70c" +checksum = "c4a03a6ac1b063e416ca4b93f6247978c991475e8271465340caa6f92f3c16a4" dependencies = [ "cranelift-codegen", "libc", @@ -1272,9 +1208,9 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.82.3" +version = "0.88.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d9e4211bbc3268042a96dd4de5bd979cda22434991d035f5f8eacba987fad2" +checksum = "c699873f7b30bc5f20dd03a796b4183e073a46616c91704792ec35e45d13f913" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -1318,15 +1254,14 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", "memoffset", - "once_cell", "scopeguard", ] @@ -1342,12 +1277,11 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if 1.0.0", - "once_cell", ] [[package]] @@ -1362,8 +1296,8 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03c6a1d5fa1de37e071642dfa44ec552ca5b299adb128fab16138e24b548fd21" dependencies = [ - "generic-array 0.14.5", - "rand_core 0.6.3", + "generic-array 0.14.6", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1374,7 +1308,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "typenum", ] @@ -1384,7 +1318,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "subtle", ] @@ -1394,15 +1328,15 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "subtle", ] [[package]] name = "ctor" -version = "0.1.22" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" dependencies = [ "quote", "syn", @@ -1414,7 +1348,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" dependencies = [ - "cipher", + "cipher 0.3.0", ] [[package]] @@ -1431,25 +1365,29 @@ dependencies = [ [[package]] name = "cumulus-client-cli" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "clap", + "parity-scale-codec 3.2.1", + "sc-chain-spec", "sc-cli", "sc-service", + "sp-core", + "sp-runtime", "url", ] [[package]] name = "cumulus-client-collator" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-client-consensus-common", "cumulus-client-network", "cumulus-primitives-core", "cumulus-relay-chain-interface", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -1466,13 +1404,13 @@ dependencies = [ [[package]] name = "cumulus-client-consensus-aura" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-client-consensus-common", "cumulus-primitives-core", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-consensus", "sc-consensus-aura", @@ -1495,13 +1433,13 @@ dependencies = [ [[package]] name = "cumulus-client-consensus-common" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-relay-chain-interface", "dyn-clone", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "polkadot-primitives", "sc-client-api", "sc-consensus", @@ -1516,14 +1454,14 @@ dependencies = [ [[package]] name = "cumulus-client-network" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-relay-chain-interface", "derive_more", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-node-primitives", "polkadot-parachain", @@ -1541,13 +1479,13 @@ dependencies = [ [[package]] name = "cumulus-client-pov-recovery" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "cumulus-relay-chain-interface", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-primitives", "polkadot-node-subsystem", "polkadot-overseer", @@ -1565,7 +1503,7 @@ dependencies = [ [[package]] name = "cumulus-client-service" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-client-cli", "cumulus-client-collator", @@ -1573,11 +1511,9 @@ dependencies = [ "cumulus-client-pov-recovery", "cumulus-primitives-core", "cumulus-relay-chain-interface", - "parity-scale-codec 3.1.5", "parking_lot 0.12.1", "polkadot-overseer", "polkadot-primitives", - "sc-chain-spec", "sc-client-api", "sc-consensus", "sc-consensus-babe", @@ -1595,13 +1531,13 @@ dependencies = [ [[package]] name = "cumulus-pallet-aura-ext" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "frame-executive", "frame-support", "frame-system", "pallet-aura", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-application-crypto", @@ -1613,13 +1549,13 @@ dependencies = [ [[package]] name = "cumulus-pallet-dmp-queue" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -1631,8 +1567,9 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ + "bytes", "cumulus-pallet-parachain-system-proc-macro", "cumulus-primitives-core", "cumulus-primitives-parachain-inherent", @@ -1642,7 +1579,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "pallet-balances", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "scale-info", "serde", @@ -1661,7 +1598,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -1672,12 +1609,12 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcm" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-io", @@ -1689,13 +1626,13 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcmp-queue" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rand_chacha 0.3.1", "scale-info", "sp-runtime", @@ -1707,10 +1644,10 @@ dependencies = [ [[package]] name = "cumulus-primitives-core" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "frame-support", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-core-primitives", "polkadot-parachain", "polkadot-primitives", @@ -1723,13 +1660,13 @@ dependencies = [ [[package]] name = "cumulus-primitives-parachain-inherent" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-primitives-core", "cumulus-relay-chain-interface", "cumulus-test-relay-sproof-builder", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "scale-info", "sp-api", @@ -1746,11 +1683,11 @@ dependencies = [ [[package]] name = "cumulus-primitives-timestamp" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "sp-inherents", "sp-std", "sp-timestamp", @@ -1759,11 +1696,12 @@ dependencies = [ [[package]] name = "cumulus-primitives-utility" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "frame-support", - "parity-scale-codec 3.1.5", + "log", + "parity-scale-codec 3.2.1", "polkadot-core-primitives", "polkadot-parachain", "polkadot-primitives", @@ -1771,19 +1709,20 @@ dependencies = [ "sp-std", "sp-trie", "xcm", + "xcm-builder", + "xcm-executor", ] [[package]] name = "cumulus-relay-chain-inprocess-interface" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-primitives-core", "cumulus-relay-chain-interface", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "parking_lot 0.12.1", "polkadot-cli", "polkadot-client", "polkadot-service", @@ -1791,7 +1730,6 @@ dependencies = [ "sc-client-api", "sc-consensus-babe", "sc-network", - "sc-service", "sc-sysinfo", "sc-telemetry", "sc-tracing", @@ -1807,19 +1745,18 @@ dependencies = [ [[package]] name = "cumulus-relay-chain-interface" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "cumulus-primitives-core", "derive_more", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee-core", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-overseer", "polkadot-service", "sc-client-api", - "sc-service", "sp-api", "sp-blockchain", "sp-core", @@ -1831,16 +1768,16 @@ dependencies = [ [[package]] name = "cumulus-relay-chain-rpc-interface" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "async-trait", "backoff", "cumulus-primitives-core", "cumulus-relay-chain-interface", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "jsonrpsee", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-service", "sc-client-api", @@ -1850,6 +1787,7 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-storage", + "tokio", "tracing", "url", ] @@ -1857,10 +1795,10 @@ dependencies = [ [[package]] name = "cumulus-test-relay-sproof-builder" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "sp-runtime", "sp-state-machine", @@ -1901,42 +1839,51 @@ checksum = "4033478fbf70d6acf2655ac70da91ee65852d69daf7a67bf7a2f518fb47aafcf" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle", "zeroize", ] [[package]] -name = "darling" -version = "0.13.4" +name = "cxx" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8" dependencies = [ - "darling_core", - "darling_macro", + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", ] [[package]] -name = "darling_core" -version = "0.13.4" +name = "cxx-build" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86" dependencies = [ - "fnv", - "ident_case", + "cc", + "codespan-reporting", + "once_cell", "proc-macro2", "quote", - "strsim", + "scratch", "syn", ] [[package]] -name = "darling_macro" -version = "0.13.4" +name = "cxxbridge-flags" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f" dependencies = [ - "darling_core", + "proc-macro2", "quote", "syn", ] @@ -2015,16 +1962,16 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", "subtle", ] @@ -2088,9 +2035,9 @@ checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" [[package]] name = "dtoa" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5caaa75cbd2b960ff1e5392d2cfb1f44717fffe12fc1f32b7b5d1267f99732a6" +checksum = "f8a6eee2d5d0d113f015688310da018bd1d864d86bd567c8fca9c266889e1bfa" [[package]] name = "dyn-clonable" @@ -2115,9 +2062,9 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d07a982d1fb29db01e5a59b1918e03da4df7297eaeee7686ac45542fd4e59c8" +checksum = "4f94fa09c2aeea5b8839e414b7b841bf429fd25b9c522116ac97ee87856d88b2" [[package]] name = "ecdsa" @@ -2154,11 +2101,25 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ed25519-zebra" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "403ef3e961ab98f0ba902771d29f842058578bb1ce7e3c59dad5a6a93e784c69" +dependencies = [ + "curve25519-dalek 3.2.0", + "hex", + "rand_core 0.6.4", + "sha2 0.9.9", + "thiserror", + "zeroize", +] + [[package]] name = "either" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "elliptic-curve" @@ -2170,9 +2131,9 @@ dependencies = [ "crypto-bigint", "der", "ff", - "generic-array 0.14.5", + "generic-array 0.14.6", "group", - "rand_core 0.6.3", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -2184,7 +2145,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro2", "quote", "syn", @@ -2212,9 +2173,9 @@ dependencies = [ [[package]] name = "enumn" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052bc8773a98bd051ff37db74a8a25f00e6bfa2cbd03373390c72e9f7afbf344" +checksum = "038b1afa59052df211f9efd58f8b1d84c242935ede1c3dbaed26b018a9e06ae2" dependencies = [ "proc-macro2", "quote", @@ -2223,9 +2184,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" +checksum = "c90bf5f19754d10198ccb95b70664fc925bd1fc090a0fd9a6ebc54acc8cd6272" dependencies = [ "atty", "humantime", @@ -2282,16 +2243,16 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23750149fe8834c0e24bb9adcbacbe06c45b9861f15df53e09f26cb7c4ab91ef" dependencies = [ - "bytes 1.2.0", + "bytes", "ethereum-types", "hash-db", "hash256-std-hasher", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rlp", "rlp-derive", "scale-info", "serde", - "sha3 0.10.1", + "sha3", "triehash", ] @@ -2313,14 +2274,14 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.2" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "evm" version = "0.35.0" -source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.24#e9252ed42dc26fc85b6703b1ba50660a08209e55" +source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.30#e9252ed42dc26fc85b6703b1ba50660a08209e55" dependencies = [ "auto_impl", "environmental", @@ -2329,46 +2290,47 @@ dependencies = [ "evm-gasometer", "evm-runtime", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "primitive-types", "rlp", "scale-info", "serde", - "sha3 0.10.1", + "sha3", ] [[package]] name = "evm-coder" -version = "0.1.0" +version = "0.1.3" dependencies = [ "ethereum", - "evm-coder-macros", + "evm-coder-procedural", "evm-core", + "frame-support", "hex", "hex-literal", "impl-trait-for-tuples", "primitive-types", + "sp-std", ] [[package]] -name = "evm-coder-macros" -version = "0.1.0" +name = "evm-coder-procedural" +version = "0.2.0" dependencies = [ "Inflector", - "darling", "hex", "proc-macro2", "quote", - "sha3 0.9.1", + "sha3", "syn", ] [[package]] name = "evm-core" version = "0.35.0" -source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.24#e9252ed42dc26fc85b6703b1ba50660a08209e55" +source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.30#e9252ed42dc26fc85b6703b1ba50660a08209e55" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "primitive-types", "scale-info", "serde", @@ -2377,7 +2339,7 @@ dependencies = [ [[package]] name = "evm-gasometer" version = "0.35.0" -source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.24#e9252ed42dc26fc85b6703b1ba50660a08209e55" +source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.30#e9252ed42dc26fc85b6703b1ba50660a08209e55" dependencies = [ "environmental", "evm-core", @@ -2388,13 +2350,13 @@ dependencies = [ [[package]] name = "evm-runtime" version = "0.35.0" -source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.24#e9252ed42dc26fc85b6703b1ba50660a08209e55" +source = "git+https://github.com/uniquenetwork/evm?branch=unique-polkadot-v0.9.30#e9252ed42dc26fc85b6703b1ba50660a08209e55" dependencies = [ "auto_impl", "environmental", "evm-core", "primitive-types", - "sha3 0.10.1", + "sha3", ] [[package]] @@ -2403,7 +2365,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e43f2f1833d64e33f15592464d6fdd70f349dda7b1a53088eb83cd94014008c5" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", ] [[package]] @@ -2480,7 +2442,7 @@ dependencies = [ [[package]] name = "fc-consensus" version = "2.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "async-trait", "fc-db", @@ -2499,12 +2461,12 @@ dependencies = [ [[package]] name = "fc-db" version = "2.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "fp-storage", "kvdb-rocksdb", "parity-db", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-client-db", "sp-core", @@ -2515,12 +2477,12 @@ dependencies = [ [[package]] name = "fc-mapping-sync" version = "2.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "fc-db", "fp-consensus", "fp-rpc", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", "sc-client-api", @@ -2532,7 +2494,7 @@ dependencies = [ [[package]] name = "fc-rpc" version = "2.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", "ethereum-types", @@ -2541,19 +2503,20 @@ dependencies = [ "fc-rpc-core", "fp-rpc", "fp-storage", - "futures 0.3.21", + "futures 0.3.25", "hex", "jsonrpsee", "libsecp256k1", "log", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "prometheus", "rand 0.8.5", "rlp", "rustc-hex", "sc-client-api", "sc-network", + "sc-network-common", "sc-rpc", "sc-service", "sc-transaction-pool", @@ -2561,18 +2524,19 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", + "sp-consensus", "sp-core", "sp-io", "sp-runtime", "sp-storage", "substrate-prometheus-endpoint", - "tokio 1.20.1", + "tokio", ] [[package]] name = "fc-rpc-core" version = "1.1.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", "ethereum-types", @@ -2598,7 +2562,7 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "131655483be284720a17d74ff97592b8e76576dc25563148601df2d7c9080924" dependencies = [ - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle", ] @@ -2612,19 +2576,31 @@ dependencies = [ "log", ] +[[package]] +name = "filetime" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall", + "windows-sys 0.42.0", +] + [[package]] name = "finality-grandpa" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9def033d8505edf199f6a5d07aa7e6d2d6185b164293b77f0efd108f4f3e11d" +checksum = "b22349c6a11563a202d95772a68e0fcf56119e74ea8a2a19cf2301460fcd0df5" dependencies = [ "either", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", "num-traits", - "parity-scale-codec 3.1.5", - "parking_lot 0.11.2", + "parity-scale-codec 3.2.1", + "parking_lot 0.12.1", "scale-info", ] @@ -2659,18 +2635,19 @@ dependencies = [ [[package]] name = "flexi_logger" -version = "0.15.12" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaab3caedb4149800f91e8e4899f29cd9ddf3b569b04c365ca9334f92f7542bf" +checksum = "0c76a80dd14a27fc3d8bc696502132cb52b3f227256fd8601166c3a35e45f409" dependencies = [ + "ansi_term", "atty", - "chrono", "glob", "lazy_static", "log", "regex", + "rustversion", "thiserror", - "yansi", + "time 0.3.9", ] [[package]] @@ -2682,28 +2659,27 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "fork-tree" version = "3.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", ] [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" dependencies = [ - "matches", "percent-encoding", ] [[package]] name = "fp-consensus" version = "2.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-core", "sp-runtime", "sp-std", @@ -2712,12 +2688,12 @@ dependencies = [ [[package]] name = "fp-evm" version = "3.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "evm", "frame-support", "impl-trait-for-tuples", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "serde", "sp-core", "sp-std", @@ -2726,7 +2702,7 @@ dependencies = [ [[package]] name = "fp-evm-mapping" version = "0.1.0" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "frame-support", "sp-core", @@ -2735,12 +2711,12 @@ dependencies = [ [[package]] name = "fp-rpc" version = "3.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", "ethereum-types", "fp-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-api", "sp-core", @@ -2752,11 +2728,11 @@ dependencies = [ [[package]] name = "fp-self-contained" version = "1.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", "frame-support", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "scale-info", "serde", @@ -2768,26 +2744,27 @@ dependencies = [ [[package]] name = "fp-storage" version = "2.0.0" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", ] [[package]] name = "frame-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "linregress", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "paste", "scale-info", "serde", "sp-api", "sp-application-crypto", + "sp-core", "sp-io", "sp-runtime", "sp-runtime-interface", @@ -2798,25 +2775,26 @@ dependencies = [ [[package]] name = "frame-benchmarking-cli" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "Inflector", + "array-bytes", "chrono", "clap", "comfy-table", "frame-benchmarking", "frame-support", "frame-system", + "gethostname", "handlebars", "hash-db", - "hex", "itertools", "kvdb", "lazy_static", "linked-hash-map", "log", "memory-db", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rand 0.8.5", "rand_pcg 0.3.1", "sc-block-builder", @@ -2848,7 +2826,7 @@ dependencies = [ [[package]] name = "frame-election-provider-solution-type" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2859,12 +2837,12 @@ dependencies = [ [[package]] name = "frame-election-provider-support" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-election-provider-solution-type", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-arithmetic", "sp-npos-elections", @@ -2875,11 +2853,12 @@ dependencies = [ [[package]] name = "frame-executive" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "frame-try-runtime", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -2895,7 +2874,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df6bb8542ef006ef0de09a5c4420787d79823c0ed7924225822362fd2bf2ff2d" dependencies = [ "cfg-if 1.0.0", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", ] @@ -2903,7 +2882,7 @@ dependencies = [ [[package]] name = "frame-support" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "bitflags", "frame-metadata", @@ -2912,11 +2891,12 @@ dependencies = [ "k256", "log", "once_cell", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "paste", "scale-info", "serde", "smallvec", + "sp-api", "sp-arithmetic", "sp-core", "sp-core-hashing-proc-macro", @@ -2927,16 +2907,19 @@ dependencies = [ "sp-state-machine", "sp-std", "sp-tracing", + "sp-weights", "tt-call", ] [[package]] name = "frame-support-procedural" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "Inflector", + "cfg-expr", "frame-support-procedural-tools", + "itertools", "proc-macro2", "quote", "syn", @@ -2945,7 +2928,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate", @@ -2957,7 +2940,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "3.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro2", "quote", @@ -2967,11 +2950,11 @@ dependencies = [ [[package]] name = "frame-system" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -2979,17 +2962,18 @@ dependencies = [ "sp-runtime", "sp-std", "sp-version", + "sp-weights", ] [[package]] name = "frame-system-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-runtime", @@ -2999,18 +2983,19 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", ] [[package]] name = "frame-try-runtime" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", + "parity-scale-codec 3.2.1", "sp-api", "sp-runtime", "sp-std", @@ -3018,9 +3003,9 @@ dependencies = [ [[package]] name = "fs-err" -version = "2.7.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd79fa345a495d3ae89fb7165fec01c0e72f41821d642dda363a1e97975652e" +checksum = "64db3e262960f0662f43a6366788d5f10f7f244b8f7d7d987f560baf5ded5c50" [[package]] name = "fs-swap" @@ -3070,9 +3055,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" dependencies = [ "futures-channel", "futures-core", @@ -3085,9 +3070,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" dependencies = [ "futures-core", "futures-sink", @@ -3095,15 +3080,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" [[package]] name = "futures-executor" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" +checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" dependencies = [ "futures-core", "futures-task", @@ -3113,9 +3098,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" [[package]] name = "futures-lite" @@ -3134,9 +3119,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" +checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" dependencies = [ "proc-macro2", "quote", @@ -3145,9 +3130,9 @@ dependencies = [ [[package]] name = "futures-rustls" -version = "0.22.1" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01fe9932a224b72b45336d96040aa86386d674a31d0af27d800ea7bc8ca97fe" +checksum = "d2411eed028cdf8c8034eaf21f9915f956b6c3abec4d4c7949ee67f0721127bd" dependencies = [ "futures-io", "rustls", @@ -3156,15 +3141,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" [[package]] name = "futures-task" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" +checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" [[package]] name = "futures-timer" @@ -3174,9 +3159,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" dependencies = [ "futures 0.1.31", "futures-channel", @@ -3191,6 +3176,15 @@ dependencies = [ "slab", ] +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + [[package]] name = "generic-array" version = "0.12.4" @@ -3202,14 +3196,24 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check", ] +[[package]] +name = "gethostname" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "getrandom" version = "0.1.16" @@ -3225,9 +3229,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" dependencies = [ "cfg-if 1.0.0", "libc", @@ -3293,17 +3297,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5ac374b108929de78460075f3dc439fa66df9d8fc77e8f12caa5165fcf0c89" dependencies = [ "ff", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle", ] [[package]] name = "h2" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" +checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" dependencies = [ - "bytes 1.2.0", + "bytes", "fnv", "futures-core", "futures-sink", @@ -3311,16 +3315,16 @@ dependencies = [ "http", "indexmap", "slab", - "tokio 1.20.1", + "tokio", "tokio-util", "tracing", ] [[package]] name = "handlebars" -version = "4.3.3" +version = "4.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360d9740069b2f6cbb63ce2dbaa71a20d3185350cbb990d7bebeb9318415eb17" +checksum = "433e4ab33f1213cdc25b5fa45c76881240cfe79284cf2b395e8b9e312a30a2fd" dependencies = [ "log", "pest", @@ -3345,15 +3349,6 @@ dependencies = [ "crunchy", ] -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - [[package]] name = "hashbrown" version = "0.12.3" @@ -3363,15 +3358,6 @@ dependencies = [ "ahash", ] -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "heck" version = "0.4.0" @@ -3432,7 +3418,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" dependencies = [ "digest 0.9.0", - "generic-array 0.14.5", + "generic-array 0.14.6", "hmac 0.8.1", ] @@ -3453,9 +3439,9 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.2.0", + "bytes", "fnv", - "itoa 1.0.2", + "itoa", ] [[package]] @@ -3464,16 +3450,16 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.2.0", + "bytes", "http", "pin-project-lite 0.2.9", ] [[package]] name = "httparse" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -3493,7 +3479,7 @@ version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ - "bytes 1.2.0", + "bytes", "futures-channel", "futures-core", "futures-util", @@ -3502,10 +3488,10 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.2", + "itoa", "pin-project-lite 0.2.9", "socket2", - "tokio 1.20.1", + "tokio", "tower-service", "tracing", "want", @@ -3522,15 +3508,33 @@ dependencies = [ "log", "rustls", "rustls-native-certs", - "tokio 1.20.1", + "tokio", "tokio-rustls", ] [[package]] -name = "ident_case" -version = "1.0.1" +name = "iana-time-zone" +version = "0.1.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] [[package]] name = "idna" @@ -3543,6 +3547,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "if-addrs" version = "0.7.0" @@ -3562,7 +3576,7 @@ dependencies = [ "async-io", "core-foundation", "fnv", - "futures 0.3.21", + "futures 0.3.25", "if-addrs", "ipnet", "log", @@ -3577,7 +3591,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", ] [[package]] @@ -3616,10 +3630,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown 0.12.3", + "hashbrown", "serde", ] +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array 0.14.6", +] + [[package]] name = "instant" version = "0.1.12" @@ -3646,9 +3669,9 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "0.5.3" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec58677acfea8a15352d42fc87d11d63596ade9239e0a7c9352914417515dbe6" +checksum = "1ea37f355c05dde75b84bba2d767906ad522e97cd9e2eef2be7a4ab7fb442c06" [[package]] name = "ip_network" @@ -3676,47 +3699,42 @@ checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - -[[package]] -name = "itoa" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" [[package]] name = "jobserver" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.59" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258451ab10b34f8af53416d1fdab72c22e805f0c92a1136d59470ec0b11138b2" +checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonrpsee" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bd0d559d5e679b1ab2f869b486a11182923863b1b3ee8b421763cdd707b783a" dependencies = [ "jsonrpsee-core", "jsonrpsee-http-server", @@ -3729,18 +3747,19 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8752740ecd374bcbf8b69f3e80b0327942df76f793f8d4e60d3355650c31fb74" dependencies = [ "futures-util", "http", "jsonrpsee-core", "jsonrpsee-types", - "pin-project 1.0.11", + "pin-project", "rustls-native-certs", "soketto", "thiserror", - "tokio 1.20.1", + "tokio", "tokio-rustls", "tokio-util", "tracing", @@ -3749,8 +3768,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3dc3e9cf2ba50b7b1d7d76a667619f82846caa39e8e8daa8a4962d74acaddca" dependencies = [ "anyhow", "arrayvec 0.7.2", @@ -3760,8 +3780,11 @@ dependencies = [ "futures-channel", "futures-timer", "futures-util", + "globset", + "http", "hyper", "jsonrpsee-types", + "lazy_static", "parking_lot 0.12.1", "rand 0.8.5", "rustc-hash", @@ -3769,32 +3792,35 @@ dependencies = [ "serde_json", "soketto", "thiserror", - "tokio 1.20.1", + "tokio", "tracing", + "tracing-futures", + "unicase", ] [[package]] name = "jsonrpsee-http-server" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03802f0373a38c2420c70b5144742d800b509e2937edc4afb116434f07120117" dependencies = [ "futures-channel", "futures-util", - "globset", "hyper", "jsonrpsee-core", "jsonrpsee-types", - "lazy_static", + "serde", "serde_json", - "tokio 1.20.1", + "tokio", "tracing", - "unicase", + "tracing-futures", ] [[package]] name = "jsonrpsee-proc-macros" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd67957d4280217247588ac86614ead007b301ca2fa9f19c19f880a536f029e3" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -3804,8 +3830,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e290bba767401b646812f608c099b922d8142603c9e73a50fb192d3ac86f4a0d" dependencies = [ "anyhow", "beef", @@ -3817,9 +3844,11 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ee5feddd5188e62ac08fcf0e56478138e581509d4730f3f7be9b57dd402a4ff" dependencies = [ + "http", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -3827,18 +3856,22 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-server" -version = "0.13.1" -source = "git+https://github.com/uniquenetwork/jsonrpsee?branch=unique-v0.13.1-fix-unknown-fields#ffcb6ffda701192cdb6ca66594345e6e454cac16" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d488ba74fb369e5ab68926feb75a483458b88e768d44319f37e4ecad283c7325" dependencies = [ "futures-channel", "futures-util", + "http", "jsonrpsee-core", "jsonrpsee-types", "serde_json", "soketto", - "tokio 1.20.1", + "tokio", + "tokio-stream", "tokio-util", "tracing", + "tracing-futures", ] [[package]] @@ -3861,8 +3894,8 @@ checksum = "f9b7d56ba4a8344d6be9729995e6b06f928af29998cdf79fe390cbf6b1fee838" [[package]] name = "kusama-runtime" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "bitvec 1.0.1", @@ -3889,6 +3922,7 @@ dependencies = [ "pallet-election-provider-multi-phase", "pallet-election-provider-support-benchmarking", "pallet-elections-phragmen", + "pallet-fast-unstake", "pallet-gilt", "pallet-grandpa", "pallet-identity", @@ -3896,9 +3930,9 @@ dependencies = [ "pallet-indices", "pallet-membership", "pallet-multisig", - "pallet-nicks", "pallet-nomination-pools", "pallet-nomination-pools-benchmarking", + "pallet-nomination-pools-runtime-api", "pallet-offences", "pallet-offences-benchmarking", "pallet-preimage", @@ -3919,7 +3953,7 @@ dependencies = [ "pallet-vesting", "pallet-xcm", "pallet-xcm-benchmarks", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "polkadot-runtime-common", "polkadot-runtime-parachains", @@ -3954,8 +3988,8 @@ dependencies = [ [[package]] name = "kusama-runtime-constants" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "polkadot-primitives", @@ -4026,9 +4060,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.126" +version = "0.2.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" +checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" [[package]] name = "libloading" @@ -4052,24 +4086,24 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" +checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "libp2p" -version = "0.45.1" +version = "0.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41726ee8f662563fafba2d2d484b14037cc8ecb8c953fbfc8439d4ce3a0a9029" +checksum = "81327106887e42d004fbdab1fef93675be2e2e07c1b95fce45e2cc813485611d" dependencies = [ - "bytes 1.2.0", - "futures 0.3.21", + "bytes", + "futures 0.3.25", "futures-timer", - "getrandom 0.2.7", + "getrandom 0.2.8", "instant", "lazy_static", "libp2p-autonat", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-deflate", "libp2p-dns", "libp2p-floodsub", @@ -4095,22 +4129,22 @@ dependencies = [ "libp2p-yamux", "multiaddr", "parking_lot 0.12.1", - "pin-project 1.0.11", + "pin-project", "rand 0.7.3", "smallvec", ] [[package]] name = "libp2p-autonat" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d45945fd2f96c4b133c23d5c28a8b7fc8d7138e6dd8d5a8cd492dd384f888e3" +checksum = "4decc51f3573653a9f4ecacb31b1b922dd20c25a6322bb15318ec04287ec46f9" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-request-response", "libp2p-swarm", "log", @@ -4121,50 +4155,16 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.32.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db5b02602099fb75cb2d16f9ea860a320d6eb82ce41e95ab680912c454805cd5" -dependencies = [ - "asn1_der", - "bs58", - "ed25519-dalek", - "either", - "fnv", - "futures 0.3.21", - "futures-timer", - "instant", - "lazy_static", - "log", - "multiaddr", - "multihash", - "multistream-select", - "parking_lot 0.12.1", - "pin-project 1.0.11", - "prost 0.9.0", - "prost-build 0.9.0", - "rand 0.8.5", - "ring", - "rw-stream-sink 0.2.1", - "sha2 0.10.2", - "smallvec", - "thiserror", - "unsigned-varint", - "void", - "zeroize", -] - -[[package]] -name = "libp2p-core" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d46fca305dee6757022e2f5a4f6c023315084d0ed7441c3ab244e76666d979" +checksum = "fbf9b94cefab7599b2d3dff2f93bee218c6621d68590b23ede4485813cbcece6" dependencies = [ "asn1_der", "bs58", "ed25519-dalek", "either", "fnv", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", "lazy_static", @@ -4174,13 +4174,13 @@ dependencies = [ "multihash", "multistream-select", "parking_lot 0.12.1", - "pin-project 1.0.11", + "pin-project", "prost 0.10.4", "prost-build 0.10.4", "rand 0.8.5", "ring", - "rw-stream-sink 0.3.0", - "sha2 0.10.2", + "rw-stream-sink", + "sha2 0.10.6", "smallvec", "thiserror", "unsigned-varint", @@ -4190,24 +4190,24 @@ dependencies = [ [[package]] name = "libp2p-deflate" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86adefc55ea4ed8201149f052fb441210727481dff1fb0b8318460206a79f5fb" +checksum = "d0183dc2a3da1fbbf85e5b6cf51217f55b14f5daea0c455a9536eef646bfec71" dependencies = [ "flate2", - "futures 0.3.21", - "libp2p-core 0.33.0", + "futures 0.3.25", + "libp2p-core", ] [[package]] name = "libp2p-dns" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb462ec3a51fab457b4b44ac295e8b0a4b04dc175127e615cf996b1f0f1a268" +checksum = "6cbf54723250fa5d521383be789bf60efdabe6bacfb443f87da261019a49b4b5" dependencies = [ "async-std-resolver", - "futures 0.3.21", - "libp2p-core 0.33.0", + "futures 0.3.25", + "libp2p-core", "log", "parking_lot 0.12.1", "smallvec", @@ -4216,14 +4216,14 @@ dependencies = [ [[package]] name = "libp2p-floodsub" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a505d0c6f851cbf2919535150198e530825def8bd3757477f13dc3a57f46cbcc" +checksum = "98a4b6ffd53e355775d24b76f583fdda54b3284806f678499b57913adb94f231" dependencies = [ "cuckoofilter", "fnv", - "futures 0.3.21", - "libp2p-core 0.33.0", + "futures 0.3.25", + "libp2p-core", "libp2p-swarm", "log", "prost 0.10.4", @@ -4234,19 +4234,19 @@ dependencies = [ [[package]] name = "libp2p-gossipsub" -version = "0.38.1" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43e064ba4d7832e01c738626c6b274ae100baba05f5ffcc7b265c2a3ed398108" +checksum = "74b4b888cfbeb1f5551acd3aa1366e01bf88ede26cc3c4645d0d2d004d5ca7b0" dependencies = [ "asynchronous-codec", "base64", "byteorder", - "bytes 1.2.0", + "bytes", "fnv", - "futures 0.3.21", + "futures 0.3.25", "hex_fmt", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "prometheus-client", @@ -4254,7 +4254,7 @@ dependencies = [ "prost-build 0.10.4", "rand 0.7.3", "regex", - "sha2 0.10.2", + "sha2 0.10.6", "smallvec", "unsigned-varint", "wasm-timer", @@ -4262,14 +4262,14 @@ dependencies = [ [[package]] name = "libp2p-identify" -version = "0.36.1" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84b53490442d086db1fa5375670c9666e79143dccadef3f7c74a4346899a984" +checksum = "c50b585518f8efd06f93ac2f976bd672e17cdac794644b3117edd078e96bda06" dependencies = [ "asynchronous-codec", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "lru 0.7.8", @@ -4283,25 +4283,25 @@ dependencies = [ [[package]] name = "libp2p-kad" -version = "0.37.1" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6b5d4de90fcd35feb65ea6223fd78f3b747a64ca4b65e0813fbe66a27d56aa" +checksum = "740862893bb5f06ac24acc9d49bdeadc3a5e52e51818a30a25c1f3519da2c851" dependencies = [ "arrayvec 0.7.2", "asynchronous-codec", - "bytes 1.2.0", + "bytes", "either", "fnv", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "prost 0.10.4", "prost-build 0.10.4", "rand 0.7.3", - "sha2 0.10.2", + "sha2 0.10.6", "smallvec", "thiserror", "uint", @@ -4311,17 +4311,17 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.37.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4783f8cf00c7b6c1ff0f1870b4fcf50b042b45533d2e13b6fb464caf447a6951" +checksum = "66e5e5919509603281033fd16306c61df7a4428ce274b67af5e14b07de5cdcb2" dependencies = [ "async-io", "data-encoding", "dns-parser", - "futures 0.3.21", + "futures 0.3.25", "if-watch", "lazy_static", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "rand 0.8.5", @@ -4332,11 +4332,11 @@ dependencies = [ [[package]] name = "libp2p-metrics" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "564a7e5284d7d9b3140fdfc3cb6567bc32555e86a21de5604c2ec85da05cf384" +checksum = "ef8aff4a1abef42328fbb30b17c853fff9be986dc39af17ee39f9c5f755c5e0c" dependencies = [ - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-gossipsub", "libp2p-identify", "libp2p-kad", @@ -4348,14 +4348,14 @@ dependencies = [ [[package]] name = "libp2p-mplex" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ff9c893f2367631a711301d703c47432af898c9bb8253bea0e2c051a13f7640" +checksum = "61fd1b20638ec209c5075dfb2e8ce6a7ea4ec3cd3ad7b77f7a477c06d53322e2" dependencies = [ "asynchronous-codec", - "bytes 1.2.0", - "futures 0.3.21", - "libp2p-core 0.33.0", + "bytes", + "futures 0.3.25", + "libp2p-core", "log", "nohash-hasher", "parking_lot 0.12.1", @@ -4366,20 +4366,20 @@ dependencies = [ [[package]] name = "libp2p-noise" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf2cee1dad1c83325bbd182a8e94555778699cec8a9da00086efb7522c4c15ad" +checksum = "762408cb5d84b49a600422d7f9a42c18012d8da6ebcd570f9a4a4290ba41fb6f" dependencies = [ - "bytes 1.2.0", + "bytes", "curve25519-dalek 3.2.0", - "futures 0.3.21", + "futures 0.3.25", "lazy_static", - "libp2p-core 0.33.0", + "libp2p-core", "log", "prost 0.10.4", "prost-build 0.10.4", "rand 0.8.5", - "sha2 0.10.2", + "sha2 0.10.6", "snow", "static_assertions", "x25519-dalek", @@ -4388,14 +4388,14 @@ dependencies = [ [[package]] name = "libp2p-ping" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d41516c82fe8dd148ec925eead0c5ec08a0628f7913597e93e126e4dfb4e0787" +checksum = "100a6934ae1dbf8a693a4e7dd1d730fd60b774dafc45688ed63b554497c6c925" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "rand 0.7.3", @@ -4404,14 +4404,14 @@ dependencies = [ [[package]] name = "libp2p-plaintext" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db007e737adc5d28b2e03223b0210164928ad742591127130796a72aa8eaf54f" +checksum = "be27bf0820a6238a4e06365b096d428271cce85a129cf16f2fe9eb1610c4df86" dependencies = [ "asynchronous-codec", - "bytes 1.2.0", - "futures 0.3.21", - "libp2p-core 0.33.0", + "bytes", + "futures 0.3.25", + "libp2p-core", "log", "prost 0.10.4", "prost-build 0.10.4", @@ -4421,34 +4421,34 @@ dependencies = [ [[package]] name = "libp2p-pnet" -version = "0.22.0" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1a458bbda880107b5b36fcb9b5a1ef0c329685da0e203ed692a8ebe64cc92c" +checksum = "1a5a702574223aa55d8878bdc8bf55c84a6086f87ddaddc28ce730b4caa81538" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "log", - "pin-project 1.0.11", - "rand 0.7.3", + "pin-project", + "rand 0.8.5", "salsa20", - "sha3 0.9.1", + "sha3", ] [[package]] name = "libp2p-relay" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624ead3406f64437a0d4567c31bd128a9a0b8226d5f16c074038f5d0fc32f650" +checksum = "4931547ee0cce03971ccc1733ff05bb0c4349fd89120a39e9861e2bbe18843c3" dependencies = [ "asynchronous-codec", - "bytes 1.2.0", + "bytes", "either", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", - "pin-project 1.0.11", + "pin-project", "prost 0.10.4", "prost-build 0.10.4", "prost-codec", @@ -4461,22 +4461,22 @@ dependencies = [ [[package]] name = "libp2p-rendezvous" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59967ea2db2c7560f641aa58ac05982d42131863fcd3dd6dcf0dd1daf81c60c" +checksum = "9511c9672ba33284838e349623319c8cad2d18cfad243ae46c6b7e8a2982ea4e" dependencies = [ "asynchronous-codec", "bimap", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "prost 0.10.4", "prost-build 0.10.4", "rand 0.8.5", - "sha2 0.10.2", + "sha2 0.10.6", "thiserror", "unsigned-varint", "void", @@ -4484,15 +4484,15 @@ dependencies = [ [[package]] name = "libp2p-request-response" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b02e0acb725e5a757d77c96b95298fd73a7394fe82ba7b8bbeea510719cbe441" +checksum = "508a189e2795d892c8f5c1fa1e9e0b1845d32d7b0b249dbf7b05b18811361843" dependencies = [ "async-trait", - "bytes 1.2.0", - "futures 0.3.21", + "bytes", + "futures 0.3.25", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "libp2p-swarm", "log", "rand 0.7.3", @@ -4502,18 +4502,18 @@ dependencies = [ [[package]] name = "libp2p-swarm" -version = "0.36.1" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f4bb21c5abadbf00360c734f16bf87f1712ed4f23cd46148f625d2ddb867346" +checksum = "95ac5be6c2de2d1ff3f7693fda6faf8a827b1f3e808202277783fea9f527d114" dependencies = [ "either", "fnv", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "instant", - "libp2p-core 0.33.0", + "libp2p-core", "log", - "pin-project 1.0.11", + "pin-project", "rand 0.7.3", "smallvec", "thiserror", @@ -4522,9 +4522,9 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" -version = "0.27.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f693c8c68213034d472cbb93a379c63f4f307d97c06f1c41e4985de481687a5" +checksum = "9f54a64b6957249e0ce782f8abf41d97f69330d02bf229f0672d864f0650cc76" dependencies = [ "quote", "syn", @@ -4532,42 +4532,42 @@ dependencies = [ [[package]] name = "libp2p-tcp" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4933e38ef21b50698aefc87799c24f2a365c9d3f6cf50471f3f6a0bc410892" +checksum = "8a6771dc19aa3c65d6af9a8c65222bfc8fcd446630ddca487acd161fa6096f3b" dependencies = [ "async-io", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "if-watch", "ipnet", "libc", - "libp2p-core 0.33.0", + "libp2p-core", "log", "socket2", ] [[package]] name = "libp2p-uds" -version = "0.32.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24bdab114f7f2701757d6541266e1131b429bbae382008f207f2114ee4222dcb" +checksum = "d125e3e5f0d58f3c6ac21815b20cf4b6a88b8db9dc26368ea821838f4161fd4d" dependencies = [ "async-std", - "futures 0.3.21", - "libp2p-core 0.32.1", + "futures 0.3.25", + "libp2p-core", "log", ] [[package]] name = "libp2p-wasm-ext" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f066f2b8b1a1d64793f05da2256e6842ecd0293d6735ca2e9bda89831a1bdc06" +checksum = "ec894790eec3c1608f8d1a8a0bdf0dbeb79ed4de2dce964222011c2896dfa05a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "js-sys", - "libp2p-core 0.33.0", + "libp2p-core", "parity-send-wrapper", "wasm-bindgen", "wasm-bindgen-futures", @@ -4575,18 +4575,18 @@ dependencies = [ [[package]] name = "libp2p-websocket" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39d398fbb29f432c4128fabdaac2ed155c3bcaf1b9bd40eeeb10a471eefacbf5" +checksum = "9808e57e81be76ff841c106b4c5974fb4d41a233a7bdd2afbf1687ac6def3818" dependencies = [ "either", - "futures 0.3.21", + "futures 0.3.25", "futures-rustls", - "libp2p-core 0.33.0", + "libp2p-core", "log", "parking_lot 0.12.1", "quicksink", - "rw-stream-sink 0.3.0", + "rw-stream-sink", "soketto", "url", "webpki-roots", @@ -4594,12 +4594,12 @@ dependencies = [ [[package]] name = "libp2p-yamux" -version = "0.37.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe653639ad74877c759720febb0cbcbf4caa221adde4eed2d3126ce5c6f381f" +checksum = "c6dea686217a06072033dc025631932810e2f6ad784e4fafa42e27d311c7a81c" dependencies = [ - "futures 0.3.21", - "libp2p-core 0.33.0", + "futures 0.3.25", + "libp2p-core", "parking_lot 0.12.1", "thiserror", "yamux", @@ -4679,6 +4679,15 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "link-cplusplus" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" +dependencies = [ + "cc", +] + [[package]] name = "linked-hash-map" version = "0.5.6" @@ -4706,15 +4715,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.0.42" +version = "0.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5284f00d480e1c39af34e72f8ad60b94f47007e3481cd3b731c1d67190ddc7b7" +checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg", "scopeguard", @@ -4731,12 +4740,13 @@ dependencies = [ ] [[package]] -name = "lru" -version = "0.6.6" +name = "logtest" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ea2d928b485416e8908cff2d97d621db22b27f7b3b6729e438bcf42c671ba91" +checksum = "eb3e43a8657c1d64516dcc9db8ca03826a4aceaf89d5ce1b37b59f6ff0e43026" dependencies = [ - "hashbrown 0.11.2", + "lazy_static", + "log", ] [[package]] @@ -4745,7 +4755,16 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a" dependencies = [ - "hashbrown 0.12.3", + "hashbrown", +] + +[[package]] +name = "lru" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6e8aaa3f231bb4bd57b84b2d5dc3ae7f350265df8aa96492e0bc394a1571909" +dependencies = [ + "hashbrown", ] [[package]] @@ -4759,9 +4778,9 @@ dependencies = [ [[package]] name = "lz4" -version = "1.23.3" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4edcb94251b1c375c459e5abe9fb0168c1c826c3370172684844f8f3f8d1a885" +checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" dependencies = [ "libc", "lz4-sys", @@ -4769,9 +4788,9 @@ dependencies = [ [[package]] name = "lz4-sys" -version = "1.9.3" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7be8908e2ed6f31c02db8a9fa962f03e36c53fbfde437363eae3306b85d7e17" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" dependencies = [ "cc", "libc", @@ -4786,12 +4805,6 @@ dependencies = [ "libc", ] -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - [[package]] name = "match_cfg" version = "0.1.0" @@ -4830,27 +4843,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memfd" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6627dc657574b49d6ad27105ed671822be56e0d2547d413bfbf3e8d8fa92e7a" -dependencies = [ - "libc", -] - -[[package]] -name = "memmap2" -version = "0.2.3" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "723e3ebdcdc5c023db1df315364573789f8857c11b631a2fdfad7c00f5c046b4" +checksum = "480b5a5de855d11ff13195950bdc8b98b5e942ef47afc447f6615cdcc4e15d80" dependencies = [ - "libc", + "rustix", ] [[package]] name = "memmap2" -version = "0.5.5" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a79b39c93a7a5a27eeaf9a23b5ff43f1b9e0ad6b1cdd441140ae53c35613fc7" +checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498" dependencies = [ "libc", ] @@ -4871,24 +4875,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6566c70c1016f525ced45d7b7f97730a2bafb037c788211d0c186ef5b2189f0a" dependencies = [ "hash-db", - "hashbrown 0.12.3", + "hashbrown", "parity-util-mem", ] [[package]] name = "memory-lru" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beeb98b3d1ed2c0054bd81b5ba949a0243c3ccad751d45ea898fa8059fa2860a" +checksum = "ce95ae042940bad7e312857b929ee3d11b8f799a80cb7b9c7ec5125516906395" dependencies = [ - "lru 0.6.6", + "lru 0.8.1", ] [[package]] name = "memory_units" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d96e3f3c0b6325d8ccd83c33b28acb183edcb6c67938ba104ec546854b0882" +checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" [[package]] name = "merlin" @@ -4908,7 +4912,7 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69672161530e8aeca1d1400fbf3f1a1747ff60ea604265a4e906c2442df20532" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "rand 0.8.5", "thrift", ] @@ -4921,9 +4925,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -4937,15 +4941,9 @@ dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "windows-sys 0.36.1", ] -[[package]] -name = "more-asserts" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" - [[package]] name = "multiaddr" version = "0.14.0" @@ -4977,18 +4975,18 @@ dependencies = [ [[package]] name = "multihash" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3db354f401db558759dfc1e568d010a5d4146f4d3f637be1275ec4a3cf09689" +checksum = "1c346cf9999c631f002d8f977c4eaeaa0e6386f16007202308d0b3757522c2cc" dependencies = [ "blake2b_simd", "blake2s_simd", "blake3", "core2", - "digest 0.10.3", + "digest 0.10.5", "multihash-derive", - "sha2 0.10.2", - "sha3 0.10.1", + "sha2 0.10.6", + "sha3", "unsigned-varint", ] @@ -5018,10 +5016,10 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "363a84be6453a70e63513660f4894ef815daf88e3356bffcda9ca27d810ce83b" dependencies = [ - "bytes 1.2.0", - "futures 0.3.21", + "bytes", + "futures 0.3.25", "log", - "pin-project 1.0.11", + "pin-project", "smallvec", "unsigned-varint", ] @@ -5114,13 +5112,13 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65b4b14489ab424703c092062176d52ba55485a89c076b4f9db05092b7223aa6" dependencies = [ - "bytes 1.2.0", - "futures 0.3.21", + "bytes", + "futures 0.3.25", "log", "netlink-packet-core", "netlink-sys", "thiserror", - "tokio 1.20.1", + "tokio", ] [[package]] @@ -5130,8 +5128,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92b654097027250401127914afb37cb1f311df6610a9891ff07a757e94199027" dependencies = [ "async-io", - "bytes 1.2.0", - "futures 0.3.21", + "bytes", + "futures 0.3.25", "libc", "log", ] @@ -5180,6 +5178,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-complex" version = "0.4.2" @@ -5191,12 +5200,12 @@ dependencies = [ [[package]] name = "num-format" -version = "0.4.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bafe4179722c2894288ee77a9f044f02811c86af699344c498b0840c698a2465" +checksum = "54b862ff8df690cf089058c98b183676a7ed0f974cc08b426800093227cbff3b" dependencies = [ - "arrayvec 0.4.12", - "itoa 0.4.8", + "arrayvec 0.7.2", + "itoa", ] [[package]] @@ -5216,7 +5225,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef" dependencies = [ "autocfg", - "num-bigint", + "num-bigint 0.2.6", "num-integer", "num-traits", ] @@ -5228,6 +5237,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg", + "num-bigint 0.4.3", "num-integer", "num-traits", ] @@ -5253,14 +5263,12 @@ dependencies = [ ] [[package]] -name = "object" -version = "0.27.1" +name = "num_threads" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ - "crc32fast", - "indexmap", - "memchr", + "libc", ] [[package]] @@ -5269,19 +5277,23 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ + "crc32fast", + "hashbrown", + "indexmap", "memchr", ] [[package]] name = "once_cell" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "opal-runtime" -version = "0.9.24" +version = "0.9.30" dependencies = [ + "app-promotion-rpc", "cumulus-pallet-aura-ext", "cumulus-pallet-dmp-queue", "cumulus-pallet-parachain-system", @@ -5291,6 +5303,7 @@ dependencies = [ "cumulus-primitives-timestamp", "cumulus-primitives-utility", "derivative", + "evm-coder", "fp-evm-mapping", "fp-rpc", "fp-self-contained", @@ -5302,18 +5315,26 @@ dependencies = [ "frame-system-rpc-runtime-api", "frame-try-runtime", "hex-literal", + "impl-trait-for-tuples", "log", + "logtest", + "orml-tokens", + "orml-traits", "orml-vesting", + "orml-xtokens", + "pallet-app-promotion", "pallet-aura", "pallet-balances", "pallet-base-fee", "pallet-common", + "pallet-configuration", "pallet-ethereum", "pallet-evm", "pallet-evm-coder-substrate", "pallet-evm-contract-helpers", "pallet-evm-migration", "pallet-evm-transaction-payment", + "pallet-foreign-assets", "pallet-fungible", "pallet-inflation", "pallet-maintenance", @@ -5333,7 +5354,7 @@ dependencies = [ "pallet-unique-scheduler", "pallet-xcm", "parachain-info", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "rmrk-rpc", "scale-info", @@ -5353,9 +5374,10 @@ dependencies = [ "sp-transaction-pool", "sp-version", "substrate-wasm-builder", - "unique-runtime-common", + "up-common", "up-data-structs", "up-rpc", + "up-sponsorship", "xcm", "xcm-builder", "xcm-executor", @@ -5382,14 +5404,14 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "orchestra" version = "0.0.1" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", "dyn-clonable", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "orchestra-proc-macro", - "pin-project 1.0.11", + "pin-project", "prioritized-metered-channel", "thiserror", "tracing", @@ -5398,9 +5420,10 @@ dependencies = [ [[package]] name = "orchestra-proc-macro" version = "0.0.1" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "expander 0.0.6", + "itertools", "petgraph", "proc-macro-crate", "proc-macro2", @@ -5417,26 +5440,108 @@ dependencies = [ "num-traits", ] +[[package]] +name = "orml-tokens" +version = "0.4.1-dev" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" +dependencies = [ + "frame-support", + "frame-system", + "orml-traits", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "sp-runtime", + "sp-std", +] + +[[package]] +name = "orml-traits" +version = "0.4.1-dev" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" +dependencies = [ + "frame-support", + "impl-trait-for-tuples", + "num-traits", + "orml-utilities", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "sp-io", + "sp-runtime", + "sp-std", + "xcm", +] + +[[package]] +name = "orml-utilities" +version = "0.4.1-dev" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" +dependencies = [ + "frame-support", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "sp-io", + "sp-runtime", + "sp-std", +] + [[package]] name = "orml-vesting" version = "0.4.1-dev" -source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=unique-polkadot-v0.9.24#e69cabf5dc293e54a3ce60e3db4bf2f381bd20eb" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" +dependencies = [ + "frame-support", + "frame-system", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "sp-io", + "sp-runtime", + "sp-std", +] + +[[package]] +name = "orml-xcm-support" +version = "0.4.1-dev" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" +dependencies = [ + "frame-support", + "orml-traits", + "parity-scale-codec 3.2.1", + "sp-runtime", + "sp-std", + "xcm", + "xcm-executor", +] + +[[package]] +name = "orml-xtokens" +version = "0.4.1-dev" +source = "git+https://github.com/uniquenetwork/open-runtime-module-library?branch=polkadot-v0.9.30#4020ff64cfcad3dcc7f4f090cc9bc7699a78cc9c" dependencies = [ + "cumulus-primitives-core", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "orml-traits", + "orml-xcm-support", + "pallet-xcm", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-io", "sp-runtime", "sp-std", + "xcm", + "xcm-executor", ] [[package]] name = "os_str_bytes" -version = "6.2.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "648001efe5d5c0102d8cea768e348da85d90af8ba91f0bea908f157951493cd4" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" [[package]] name = "owning_ref" @@ -5447,15 +5552,40 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "pallet-app-promotion" +version = "0.1.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "pallet-balances", + "pallet-common", + "pallet-evm", + "pallet-evm-contract-helpers", + "pallet-evm-migration", + "pallet-randomness-collective-flip", + "pallet-timestamp", + "pallet-unique", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", + "up-data-structs", +] + [[package]] name = "pallet-aura" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-application-crypto", "sp-consensus-aura", @@ -5466,12 +5596,12 @@ dependencies = [ [[package]] name = "pallet-authority-discovery" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "pallet-session", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-application-crypto", "sp-authority-discovery", @@ -5482,12 +5612,12 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-authorship", "sp-runtime", @@ -5497,7 +5627,7 @@ dependencies = [ [[package]] name = "pallet-babe" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", @@ -5506,7 +5636,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-application-crypto", "sp-consensus-babe", @@ -5521,7 +5651,7 @@ dependencies = [ [[package]] name = "pallet-bags-list" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -5529,7 +5659,7 @@ dependencies = [ "frame-system", "log", "pallet-balances", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5541,13 +5671,13 @@ dependencies = [ [[package]] name = "pallet-balances" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", "sp-std", @@ -5556,12 +5686,12 @@ dependencies = [ [[package]] name = "pallet-base-fee" version = "1.0.0" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "fp-evm", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -5571,13 +5701,13 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "beefy-primitives", "frame-support", "frame-system", "pallet-session", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-runtime", @@ -5587,18 +5717,18 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "beefy-merkle-tree", "beefy-primitives", "frame-support", "frame-system", - "hex", "log", "pallet-beefy", "pallet-mmr", "pallet-session", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -5610,14 +5740,14 @@ dependencies = [ [[package]] name = "pallet-bounties" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", "pallet-treasury", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5625,70 +5755,10 @@ dependencies = [ "sp-std", ] -[[package]] -name = "pallet-bridge-dispatch" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-message-dispatch", - "bp-runtime", - "frame-support", - "frame-system", - "log", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-core", - "sp-runtime", - "sp-std", -] - -[[package]] -name = "pallet-bridge-grandpa" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bp-header-chain", - "bp-runtime", - "bp-test-utils", - "finality-grandpa", - "frame-support", - "frame-system", - "log", - "num-traits", - "parity-scale-codec 3.1.5", - "scale-info", - "serde", - "sp-finality-grandpa", - "sp-runtime", - "sp-std", - "sp-trie", -] - -[[package]] -name = "pallet-bridge-messages" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" -dependencies = [ - "bitvec 1.0.1", - "bp-message-dispatch", - "bp-messages", - "bp-runtime", - "frame-support", - "frame-system", - "log", - "num-traits", - "parity-scale-codec 3.1.5", - "scale-info", - "serde", - "sp-core", - "sp-runtime", - "sp-std", -] - [[package]] name = "pallet-child-bounties" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", @@ -5696,7 +5766,7 @@ dependencies = [ "log", "pallet-bounties", "pallet-treasury", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5707,13 +5777,13 @@ dependencies = [ [[package]] name = "pallet-collective" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5723,7 +5793,7 @@ dependencies = [ [[package]] name = "pallet-common" -version = "0.1.0" +version = "0.1.8" dependencies = [ "ethereum", "evm-coder", @@ -5733,7 +5803,7 @@ dependencies = [ "frame-system", "pallet-evm", "pallet-evm-coder-substrate", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -5742,15 +5812,31 @@ dependencies = [ "up-data-structs", ] +[[package]] +name = "pallet-configuration" +version = "0.1.1" +dependencies = [ + "fp-evm", + "frame-support", + "frame-system", + "parity-scale-codec 3.2.1", + "scale-info", + "smallvec", + "sp-arithmetic", + "sp-core", + "sp-runtime", + "sp-std", +] + [[package]] name = "pallet-democracy" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-io", @@ -5761,14 +5847,15 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-phase" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "pallet-election-provider-support-benchmarking", + "parity-scale-codec 3.2.1", "rand 0.7.3", "scale-info", "sp-arithmetic", @@ -5778,18 +5865,18 @@ dependencies = [ "sp-runtime", "sp-std", "static_assertions", - "strum 0.23.0", + "strum", ] [[package]] name = "pallet-election-provider-support-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-npos-elections", "sp-runtime", ] @@ -5797,13 +5884,13 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" version = "5.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5815,7 +5902,7 @@ dependencies = [ [[package]] name = "pallet-ethereum" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "ethereum", "ethereum-types", @@ -5831,11 +5918,11 @@ dependencies = [ "log", "pallet-evm", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rlp", "scale-info", "serde", - "sha3 0.10.1", + "sha3", "sp-io", "sp-runtime", "sp-std", @@ -5844,7 +5931,7 @@ dependencies = [ [[package]] name = "pallet-evm" version = "6.0.0-dev" -source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.24#b1941ba0e691e4a95b414343fd89aceb94c4172b" +source = "git+https://github.com/uniquenetwork/frontier?branch=unique-polkadot-v0.9.30#65930cb2982258bee67b73a1f017711f6f4aa0a4" dependencies = [ "evm", "fp-evm", @@ -5856,12 +5943,12 @@ dependencies = [ "impl-trait-for-tuples", "log", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "primitive-types", "rlp", "scale-info", "serde", - "sha3 0.10.1", + "sha3", "sp-core", "sp-io", "sp-runtime", @@ -5870,7 +5957,7 @@ dependencies = [ [[package]] name = "pallet-evm-coder-substrate" -version = "0.1.0" +version = "0.1.3" dependencies = [ "ethereum", "evm-coder", @@ -5879,7 +5966,7 @@ dependencies = [ "frame-system", "pallet-ethereum", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-std", @@ -5888,8 +5975,9 @@ dependencies = [ [[package]] name = "pallet-evm-contract-helpers" -version = "0.1.0" +version = "0.3.0" dependencies = [ + "ethereum", "evm-coder", "fp-evm-mapping", "frame-support", @@ -5898,7 +5986,8 @@ dependencies = [ "pallet-common", "pallet-evm", "pallet-evm-coder-substrate", - "parity-scale-codec 3.1.5", + "pallet-evm-transaction-payment", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-runtime", @@ -5909,14 +5998,14 @@ dependencies = [ [[package]] name = "pallet-evm-migration" -version = "0.1.0" +version = "0.1.1" dependencies = [ "fp-evm", "frame-benchmarking", "frame-support", "frame-system", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5926,7 +6015,7 @@ dependencies = [ [[package]] name = "pallet-evm-transaction-payment" -version = "0.1.0" +version = "0.1.1" dependencies = [ "fp-evm", "fp-evm-mapping", @@ -5934,7 +6023,7 @@ dependencies = [ "frame-system", "pallet-ethereum", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -5944,8 +6033,57 @@ dependencies = [ ] [[package]] -name = "pallet-fungible" +name = "pallet-fast-unstake" +version = "4.0.0-dev" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "frame-benchmarking", + "frame-election-provider-support", + "frame-support", + "frame-system", + "log", + "pallet-balances", + "pallet-staking", + "pallet-timestamp", + "parity-scale-codec 3.2.1", + "scale-info", + "sp-io", + "sp-runtime", + "sp-staking", + "sp-std", +] + +[[package]] +name = "pallet-foreign-assets" version = "0.1.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "hex", + "log", + "orml-tokens", + "pallet-balances", + "pallet-common", + "pallet-fungible", + "pallet-timestamp", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "serde_json", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", + "up-data-structs", + "xcm", + "xcm-builder", + "xcm-executor", +] + +[[package]] +name = "pallet-fungible" +version = "0.1.5" dependencies = [ "ethereum", "evm-coder", @@ -5956,7 +6094,7 @@ dependencies = [ "pallet-evm", "pallet-evm-coder-substrate", "pallet-structure", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-runtime", @@ -5967,12 +6105,12 @@ dependencies = [ [[package]] name = "pallet-gilt" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-arithmetic", "sp-runtime", @@ -5982,7 +6120,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", @@ -5990,7 +6128,7 @@ dependencies = [ "log", "pallet-authorship", "pallet-session", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-application-crypto", "sp-core", @@ -6005,13 +6143,13 @@ dependencies = [ [[package]] name = "pallet-identity" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "enumflags2", "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6021,14 +6159,14 @@ dependencies = [ [[package]] name = "pallet-im-online" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", "pallet-authorship", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-application-crypto", "sp-core", @@ -6041,12 +6179,12 @@ dependencies = [ [[package]] name = "pallet-indices" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6057,7 +6195,7 @@ dependencies = [ [[package]] name = "pallet-inflation" -version = "0.1.0" +version = "0.1.1" dependencies = [ "frame-benchmarking", "frame-support", @@ -6065,7 +6203,7 @@ dependencies = [ "pallet-balances", "pallet-randomness-collective-flip", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6081,7 +6219,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-std", ] @@ -6089,13 +6227,13 @@ dependencies = [ [[package]] name = "pallet-membership" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6106,13 +6244,13 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ckb-merkle-mountain-range", "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6124,10 +6262,10 @@ dependencies = [ [[package]] name = "pallet-mmr-rpc" version = "3.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "jsonrpsee", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "serde", "sp-api", "sp-blockchain", @@ -6139,26 +6277,12 @@ dependencies = [ [[package]] name = "pallet-multisig" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", - "scale-info", - "sp-io", - "sp-runtime", - "sp-std", -] - -[[package]] -name = "pallet-nicks" -version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" -dependencies = [ - "frame-support", - "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6168,14 +6292,15 @@ dependencies = [ [[package]] name = "pallet-nomination-pools" version = "1.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", + "sp-io", "sp-runtime", "sp-staking", "sp-std", @@ -6184,7 +6309,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-benchmarking" version = "1.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -6193,16 +6318,27 @@ dependencies = [ "pallet-bags-list", "pallet-nomination-pools", "pallet-staking", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", + "sp-runtime-interface", "sp-staking", "sp-std", ] +[[package]] +name = "pallet-nomination-pools-runtime-api" +version = "1.0.0-dev" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "parity-scale-codec 3.2.1", + "sp-api", + "sp-std", +] + [[package]] name = "pallet-nonfungible" -version = "0.1.0" +version = "0.1.5" dependencies = [ "ethereum", "evm-coder", @@ -6213,7 +6349,7 @@ dependencies = [ "pallet-evm", "pallet-evm-coder-substrate", "pallet-structure", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-runtime", @@ -6225,13 +6361,13 @@ dependencies = [ [[package]] name = "pallet-offences" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "log", "pallet-balances", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-runtime", @@ -6242,7 +6378,7 @@ dependencies = [ [[package]] name = "pallet-offences-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -6255,7 +6391,7 @@ dependencies = [ "pallet-offences", "pallet-session", "pallet-staking", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", "sp-staking", @@ -6265,12 +6401,12 @@ dependencies = [ [[package]] name = "pallet-preimage" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6281,12 +6417,12 @@ dependencies = [ [[package]] name = "pallet-proxy" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6296,11 +6432,11 @@ dependencies = [ [[package]] name = "pallet-randomness-collective-flip" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "safe-mix", "scale-info", "sp-runtime", @@ -6310,12 +6446,12 @@ dependencies = [ [[package]] name = "pallet-recovery" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6324,15 +6460,19 @@ dependencies = [ [[package]] name = "pallet-refungible" -version = "0.1.0" +version = "0.2.4" dependencies = [ + "derivative", + "ethereum", + "evm-coder", "frame-benchmarking", "frame-support", "frame-system", "pallet-common", "pallet-evm", + "pallet-evm-coder-substrate", "pallet-structure", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-runtime", @@ -6343,7 +6483,7 @@ dependencies = [ [[package]] name = "pallet-rmrk-core" -version = "0.1.0" +version = "0.1.2" dependencies = [ "derivative", "frame-benchmarking", @@ -6353,7 +6493,7 @@ dependencies = [ "pallet-evm", "pallet-nonfungible", "pallet-structure", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rmrk-traits", "scale-info", "sp-core", @@ -6364,7 +6504,7 @@ dependencies = [ [[package]] name = "pallet-rmrk-equip" -version = "0.1.0" +version = "0.1.2" dependencies = [ "frame-benchmarking", "frame-support", @@ -6373,7 +6513,7 @@ dependencies = [ "pallet-evm", "pallet-nonfungible", "pallet-rmrk-core", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rmrk-traits", "scale-info", "sp-core", @@ -6385,13 +6525,13 @@ dependencies = [ [[package]] name = "pallet-scheduler" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6401,14 +6541,14 @@ dependencies = [ [[package]] name = "pallet-session" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples", "log", "pallet-timestamp", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6422,7 +6562,7 @@ dependencies = [ [[package]] name = "pallet-session-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", @@ -6438,11 +6578,11 @@ dependencies = [ [[package]] name = "pallet-society" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rand_chacha 0.2.2", "scale-info", "sp-runtime", @@ -6452,7 +6592,7 @@ dependencies = [ [[package]] name = "pallet-staking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -6461,7 +6601,7 @@ dependencies = [ "log", "pallet-authorship", "pallet-session", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rand_chacha 0.2.2", "scale-info", "serde", @@ -6475,7 +6615,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-curve" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -6486,7 +6626,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-fn" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", "sp-arithmetic", @@ -6494,14 +6634,14 @@ dependencies = [ [[package]] name = "pallet-structure" -version = "0.1.0" +version = "0.1.2" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "pallet-common", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-std", "up-data-structs", @@ -6510,11 +6650,11 @@ dependencies = [ [[package]] name = "pallet-sudo" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-io", "sp-runtime", @@ -6524,14 +6664,14 @@ dependencies = [ [[package]] name = "pallet-template-transaction-payment" version = "3.0.0" -source = "git+https://github.com/uniquenetwork/pallet-sponsoring?branch=polkadot-v0.9.24#05cb0f02abecad915d32455df7a7724b3e2869aa" +source = "git+https://github.com/uniquenetwork/pallet-sponsoring?branch=polkadot-v0.9.30#39dd82158d6caa9d89105441bf2f7111a6e686e5" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "pallet-balances", "pallet-transaction-payment", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6544,13 +6684,13 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-inherents", "sp-io", @@ -6562,14 +6702,14 @@ dependencies = [ [[package]] name = "pallet-tips" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", "pallet-treasury", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6581,11 +6721,11 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6597,11 +6737,11 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "jsonrpsee", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", "sp-blockchain", "sp-core", @@ -6612,10 +6752,10 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "pallet-transaction-payment", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", "sp-runtime", ] @@ -6623,14 +6763,14 @@ dependencies = [ [[package]] name = "pallet-treasury" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "impl-trait-for-tuples", "pallet-balances", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-runtime", @@ -6639,7 +6779,7 @@ dependencies = [ [[package]] name = "pallet-unique" -version = "0.1.0" +version = "0.2.0" dependencies = [ "ethereum", "evm-coder", @@ -6650,7 +6790,8 @@ dependencies = [ "pallet-evm", "pallet-evm-coder-substrate", "pallet-nonfungible", - "parity-scale-codec 3.1.5", + "pallet-refungible", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6662,13 +6803,13 @@ dependencies = [ [[package]] name = "pallet-unique-scheduler" -version = "0.1.0" +version = "0.1.1" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6682,12 +6823,12 @@ dependencies = [ [[package]] name = "pallet-utility" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", @@ -6698,13 +6839,13 @@ dependencies = [ [[package]] name = "pallet-vesting" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", "sp-std", @@ -6712,13 +6853,13 @@ dependencies = [ [[package]] name = "pallet-xcm" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -6730,14 +6871,14 @@ dependencies = [ [[package]] name = "pallet-xcm-benchmarks" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", "sp-std", @@ -6748,21 +6889,21 @@ dependencies = [ [[package]] name = "parachain-info" version = "0.1.0" -source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.24#95ca5a085727c1494ddeeae4a2b2e69c4ee1933b" +source = "git+https://github.com/paritytech/cumulus?branch=polkadot-v0.9.30#7b1fc0ed107fe42bb7e6a5dfefb586f4c3ae4328" dependencies = [ "cumulus-primitives-core", "frame-support", "frame-system", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", ] [[package]] name = "parity-db" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb474d0ed0836e185cb998a6b140ed1073d1fbf27d690ecf9ede8030289382c" +checksum = "2c8fdb726a43661fa54b43e7114e6b88b2289cae388eb3ad766d9d1754d83fce" dependencies = [ "blake2-rfc", "crc32fast", @@ -6771,8 +6912,8 @@ dependencies = [ "libc", "log", "lz4", - "memmap2 0.2.3", - "parking_lot 0.11.2", + "memmap2", + "parking_lot 0.12.1", "rand 0.8.5", "snap", ] @@ -6793,13 +6934,14 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.1.5" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" +checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" dependencies = [ "arrayvec 0.7.2", "bitvec 1.0.1", "byte-slice-cast", + "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive 3.1.3", "serde", @@ -6843,7 +6985,7 @@ checksum = "c32561d248d352148124f036cac253a644685a21dc9fea383eb4907d7bd35a8f" dependencies = [ "cfg-if 1.0.0", "ethereum-types", - "hashbrown 0.12.3", + "hashbrown", "impl-trait-for-tuples", "lru 0.7.8", "parity-util-mem-derive", @@ -6875,9 +7017,9 @@ dependencies = [ [[package]] name = "parity-wasm" -version = "0.42.2" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" +checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304" [[package]] name = "parking" @@ -6903,7 +7045,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.3", + "parking_lot_core 0.9.4", ] [[package]] @@ -6922,22 +7064,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" +checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" dependencies = [ "cfg-if 1.0.0", "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] name = "paste" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "pbkdf2" @@ -6965,24 +7107,25 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "pest" -version = "2.1.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +checksum = "dbc7bc69c062e492337d74d59b120c274fd3d261b6bf6d3207d499b4b379c41a" dependencies = [ + "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.1.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" +checksum = "60b75706b9642ebcb34dab3bc7750f811609a0eb1dd8b88c2d15bf628c1c65b2" dependencies = [ "pest", "pest_generator", @@ -6990,9 +7133,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.1.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" +checksum = "f4f9272122f5979a6511a749af9db9bfc810393f63119970d7085fed1c4ea0db" dependencies = [ "pest", "pest_meta", @@ -7003,13 +7146,13 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.1.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" +checksum = "4c8717927f9b79515e565a64fe46c38b8cd0427e64c40680b14a7365ab09ac8d" dependencies = [ - "maplit", + "once_cell", "pest", - "sha-1 0.8.2", + "sha1", ] [[package]] @@ -7024,38 +7167,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ef0f924a5ee7ea9cbcea77529dba45f8a9ba9f622419fe3386ca581a3ae9d5a" -dependencies = [ - "pin-project-internal 0.4.30", -] - -[[package]] -name = "pin-project" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260" -dependencies = [ - "pin-project-internal 1.0.11", -] - -[[package]] -name = "pin-project-internal" -version = "0.4.30" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "851c8d0ce9bebe43790dedfc86614c23494ac9f423dd618d3a61fc693eafe61e" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ - "proc-macro2", - "quote", - "syn", + "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", @@ -7080,6 +7203,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkcs8" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cabda3fb821068a9a4fab19a683eac3af12edf0f34b94a8be53c4972b8149d0" +dependencies = [ + "der", + "spki", + "zeroize", +] + [[package]] name = "pkg-config" version = "0.3.25" @@ -7094,10 +7228,10 @@ checksum = "e8d0eef3571242013a0d5dc84861c3ae4a652e56e12adf8bdc26ff5f8cb34c94" [[package]] name = "polkadot-approval-distribution" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "polkadot-node-network-protocol", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7109,10 +7243,10 @@ dependencies = [ [[package]] name = "polkadot-availability-bitfield-distribution" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "polkadot-node-network-protocol", "polkadot-node-subsystem", "polkadot-node-subsystem-util", @@ -7123,14 +7257,14 @@ dependencies = [ [[package]] name = "polkadot-availability-distribution" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "derive_more", "fatality", - "futures 0.3.21", + "futures 0.3.25", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-erasure-coding", "polkadot-node-network-protocol", "polkadot-node-primitives", @@ -7146,13 +7280,13 @@ dependencies = [ [[package]] name = "polkadot-availability-recovery" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "fatality", - "futures 0.3.21", + "futures 0.3.25", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-erasure-coding", "polkadot-node-network-protocol", "polkadot-node-primitives", @@ -7167,12 +7301,12 @@ dependencies = [ [[package]] name = "polkadot-cli" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "clap", "frame-benchmarking-cli", - "futures 0.3.21", + "futures 0.3.25", "log", "polkadot-client", "polkadot-node-core-pvf", @@ -7184,6 +7318,7 @@ dependencies = [ "sc-sysinfo", "sc-tracing", "sp-core", + "sp-keyring", "sp-trie", "substrate-build-script-utils", "thiserror", @@ -7192,8 +7327,8 @@ dependencies = [ [[package]] name = "polkadot-client" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "frame-benchmarking", @@ -7232,12 +7367,12 @@ dependencies = [ [[package]] name = "polkadot-collator-protocol" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "always-assert", "fatality", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "polkadot-node-network-protocol", "polkadot-node-primitives", @@ -7253,10 +7388,10 @@ dependencies = [ [[package]] name = "polkadot-core-primitives" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "scale-info", "sp-core", @@ -7266,14 +7401,14 @@ dependencies = [ [[package]] name = "polkadot-dispute-distribution" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "derive_more", "fatality", - "futures 0.3.21", + "futures 0.3.25", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-erasure-coding", "polkadot-node-network-protocol", "polkadot-node-primitives", @@ -7289,10 +7424,10 @@ dependencies = [ [[package]] name = "polkadot-erasure-coding" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-primitives", "polkadot-primitives", "reed-solomon-novelpoly", @@ -7303,10 +7438,10 @@ dependencies = [ [[package]] name = "polkadot-gossip-support" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "polkadot-node-network-protocol", "polkadot-node-subsystem", @@ -7323,14 +7458,15 @@ dependencies = [ [[package]] name = "polkadot-network-bridge" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "always-assert", "async-trait", - "bytes 1.2.0", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "bytes", + "fatality", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-node-network-protocol", "polkadot-node-subsystem", @@ -7338,17 +7474,19 @@ dependencies = [ "polkadot-overseer", "polkadot-primitives", "sc-network", + "sc-network-common", "sp-consensus", + "thiserror", "tracing-gum", ] [[package]] name = "polkadot-node-collation-generation" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "polkadot-erasure-coding", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7362,17 +7500,17 @@ dependencies = [ [[package]] name = "polkadot-node-core-approval-voting" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitvec 1.0.1", "derive_more", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "kvdb", "lru 0.7.8", "merlin", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-jaeger", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7391,14 +7529,14 @@ dependencies = [ [[package]] name = "polkadot-node-core-av-store" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitvec 1.0.1", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "kvdb", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-erasure-coding", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7411,12 +7549,12 @@ dependencies = [ [[package]] name = "polkadot-node-core-backing" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitvec 1.0.1", "fatality", - "futures 0.3.21", + "futures 0.3.25", "polkadot-erasure-coding", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7430,10 +7568,10 @@ dependencies = [ [[package]] name = "polkadot-node-core-bitfield-signing" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "polkadot-node-subsystem", "polkadot-node-subsystem-util", "polkadot-primitives", @@ -7445,12 +7583,12 @@ dependencies = [ [[package]] name = "polkadot-node-core-candidate-validation" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "polkadot-node-core-pvf", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7463,10 +7601,10 @@ dependencies = [ [[package]] name = "polkadot-node-core-chain-api" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "polkadot-node-subsystem", "polkadot-node-subsystem-util", "polkadot-primitives", @@ -7478,13 +7616,13 @@ dependencies = [ [[package]] name = "polkadot-node-core-chain-selection" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "kvdb", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-primitives", "polkadot-node-subsystem", "polkadot-node-subsystem-util", @@ -7495,14 +7633,14 @@ dependencies = [ [[package]] name = "polkadot-node-core-dispute-coordinator" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "fatality", - "futures 0.3.21", + "futures 0.3.25", "kvdb", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-primitives", "polkadot-node-subsystem", "polkadot-node-subsystem-util", @@ -7514,11 +7652,11 @@ dependencies = [ [[package]] name = "polkadot-node-core-parachains-inherent" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "polkadot-node-subsystem", "polkadot-primitives", @@ -7531,12 +7669,12 @@ dependencies = [ [[package]] name = "polkadot-node-core-provisioner" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitvec 1.0.1", "fatality", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -7549,21 +7687,22 @@ dependencies = [ [[package]] name = "polkadot-node-core-pvf" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "always-assert", "assert_matches", "async-process", "async-std", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "parity-scale-codec 3.1.5", - "pin-project 1.0.11", + "parity-scale-codec 3.2.1", + "pin-project", "polkadot-core-primitives", - "polkadot-node-subsystem-util", + "polkadot-node-metrics", "polkadot-parachain", "rand 0.8.5", + "rayon", "sc-executor", "sc-executor-common", "sc-executor-wasmtime", @@ -7580,10 +7719,10 @@ dependencies = [ [[package]] name = "polkadot-node-core-pvf-checker" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "polkadot-node-primitives", "polkadot-node-subsystem", "polkadot-node-subsystem-util", @@ -7596,31 +7735,30 @@ dependencies = [ [[package]] name = "polkadot-node-core-runtime-api" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "memory-lru", "parity-util-mem", "polkadot-node-subsystem", + "polkadot-node-subsystem-types", "polkadot-node-subsystem-util", "polkadot-primitives", - "sp-api", - "sp-authority-discovery", "sp-consensus-babe", "tracing-gum", ] [[package]] name = "polkadot-node-jaeger" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-std", "lazy_static", "log", "mick-jaeger", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "polkadot-node-primitives", "polkadot-primitives", @@ -7631,14 +7769,14 @@ dependencies = [ [[package]] name = "polkadot-node-metrics" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bs58", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "prioritized-metered-channel", "sc-cli", @@ -7650,33 +7788,35 @@ dependencies = [ [[package]] name = "polkadot-node-network-protocol" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", "derive_more", "fatality", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "hex", + "parity-scale-codec 3.2.1", "polkadot-node-jaeger", "polkadot-node-primitives", "polkadot-primitives", "rand 0.8.5", "sc-authority-discovery", "sc-network", - "strum 0.24.1", + "sc-network-common", + "strum", "thiserror", "tracing-gum", ] [[package]] name = "polkadot-node-primitives" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bounded-vec", - "futures 0.3.21", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "polkadot-parachain", "polkadot-primitives", "schnorrkel", @@ -7693,8 +7833,8 @@ dependencies = [ [[package]] name = "polkadot-node-subsystem" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "polkadot-node-jaeger", "polkadot-node-subsystem-types", @@ -7703,11 +7843,12 @@ dependencies = [ [[package]] name = "polkadot-node-subsystem-types" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ + "async-trait", "derive_more", - "futures 0.3.21", + "futures 0.3.25", "orchestra", "polkadot-node-jaeger", "polkadot-node-network-protocol", @@ -7716,27 +7857,30 @@ dependencies = [ "polkadot-statement-table", "sc-network", "smallvec", + "sp-api", + "sp-authority-discovery", + "sp-consensus-babe", "substrate-prometheus-endpoint", "thiserror", ] [[package]] name = "polkadot-node-subsystem-util" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", "derive_more", "fatality", - "futures 0.3.21", + "futures 0.3.25", "itertools", "kvdb", "lru 0.7.8", "parity-db", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "parking_lot 0.11.2", - "pin-project 1.0.11", + "pin-project", "polkadot-node-jaeger", "polkadot-node-metrics", "polkadot-node-network-protocol", @@ -7755,10 +7899,11 @@ dependencies = [ [[package]] name = "polkadot-overseer" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "futures 0.3.21", + "async-trait", + "futures 0.3.25", "futures-timer", "lru 0.7.8", "orchestra", @@ -7777,12 +7922,12 @@ dependencies = [ [[package]] name = "polkadot-parachain" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "derive_more", "frame-support", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "polkadot-core-primitives", "scale-info", @@ -7794,8 +7939,8 @@ dependencies = [ [[package]] name = "polkadot-performance-test" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "env_logger", "kusama-runtime", @@ -7809,13 +7954,13 @@ dependencies = [ [[package]] name = "polkadot-primitives" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitvec 1.0.1", "frame-system", "hex-literal", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "polkadot-core-primitives", "polkadot-parachain", @@ -7839,8 +7984,8 @@ dependencies = [ [[package]] name = "polkadot-rpc" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-gadget", "beefy-gadget-rpc", @@ -7871,8 +8016,8 @@ dependencies = [ [[package]] name = "polkadot-runtime" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "bitvec 1.0.1", @@ -7898,13 +8043,16 @@ dependencies = [ "pallet-election-provider-multi-phase", "pallet-election-provider-support-benchmarking", "pallet-elections-phragmen", + "pallet-fast-unstake", "pallet-grandpa", "pallet-identity", "pallet-im-online", "pallet-indices", "pallet-membership", "pallet-multisig", - "pallet-nicks", + "pallet-nomination-pools", + "pallet-nomination-pools-benchmarking", + "pallet-nomination-pools-runtime-api", "pallet-offences", "pallet-offences-benchmarking", "pallet-preimage", @@ -7922,7 +8070,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "pallet-xcm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "polkadot-runtime-common", "polkadot-runtime-constants", @@ -7957,8 +8105,8 @@ dependencies = [ [[package]] name = "polkadot-runtime-common" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "bitvec 1.0.1", @@ -7981,7 +8129,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-treasury", "pallet-vesting", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "polkadot-runtime-parachains", "rustc-hex", @@ -8004,8 +8152,8 @@ dependencies = [ [[package]] name = "polkadot-runtime-constants" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "polkadot-primitives", @@ -8016,11 +8164,11 @@ dependencies = [ [[package]] name = "polkadot-runtime-metrics" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bs58", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "sp-std", "sp-tracing", @@ -8028,8 +8176,8 @@ dependencies = [ [[package]] name = "polkadot-runtime-parachains" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "bitflags", "bitvec 1.0.1", @@ -8046,7 +8194,7 @@ dependencies = [ "pallet-staking", "pallet-timestamp", "pallet-vesting", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "polkadot-runtime-metrics", "rand 0.8.5", @@ -8071,14 +8219,15 @@ dependencies = [ [[package]] name = "polkadot-service" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "async-trait", "beefy-gadget", "beefy-primitives", + "frame-support", "frame-system-rpc-runtime-api", - "futures 0.3.21", + "futures 0.3.25", "hex-literal", "kusama-runtime", "kvdb", @@ -8134,11 +8283,11 @@ dependencies = [ "sc-consensus", "sc-consensus-babe", "sc-consensus-slots", - "sc-consensus-uncles", "sc-executor", "sc-finality-grandpa", "sc-keystore", "sc-network", + "sc-network-common", "sc-offchain", "sc-service", "sc-sync-state-rpc", @@ -8174,14 +8323,14 @@ dependencies = [ [[package]] name = "polkadot-statement-distribution" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "arrayvec 0.5.2", "fatality", - "futures 0.3.21", + "futures 0.3.25", "indexmap", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-node-network-protocol", "polkadot-node-primitives", "polkadot-node-subsystem", @@ -8195,18 +8344,18 @@ dependencies = [ [[package]] name = "polkadot-statement-table" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-primitives", "sp-core", ] [[package]] name = "polkadot-test-runtime" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "bitvec 1.0.1", @@ -8222,7 +8371,6 @@ dependencies = [ "pallet-balances", "pallet-grandpa", "pallet-indices", - "pallet-nicks", "pallet-offences", "pallet-session", "pallet-staking", @@ -8233,7 +8381,7 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "pallet-vesting", "pallet-xcm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "polkadot-primitives", "polkadot-runtime-common", @@ -8267,13 +8415,12 @@ dependencies = [ [[package]] name = "polkadot-test-service" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-benchmarking", "frame-system", - "futures 0.1.31", - "futures 0.3.21", + "futures 0.3.25", "hex", "pallet-balances", "pallet-staking", @@ -8298,6 +8445,7 @@ dependencies = [ "sc-executor", "sc-finality-grandpa", "sc-network", + "sc-network-common", "sc-service", "sc-tracing", "sc-transaction-pool", @@ -8315,16 +8463,17 @@ dependencies = [ "substrate-test-client", "tempfile", "test-runtime-constants", - "tokio 1.20.1", + "tokio", "tracing-gum", ] [[package]] name = "polling" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" +checksum = "899b00b9c8ab553c743b3e11e87c5c7d423b2a2de229ba95b24a756344748011" dependencies = [ + "autocfg", "cfg-if 1.0.0", "libc", "log", @@ -8378,12 +8527,12 @@ dependencies = [ [[package]] name = "prioritized-metered-channel" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "coarsetime", "crossbeam-queue", "derive_more", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "nanorand", "thiserror", @@ -8392,10 +8541,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" dependencies = [ + "once_cell", "thiserror", "toml", ] @@ -8426,18 +8576,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.42" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c278e965f1d8cf32d6e0e96de3d3e79712178ae67986d9cf9151f51e95aac89b" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] [[package]] name = "prometheus" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cface98dfa6d645ea4c789839f176e4b072265d085bfcc48eaa8d137f58d3c39" +checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" dependencies = [ "cfg-if 1.0.0", "fnv", @@ -8454,7 +8604,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac1abe0255c04d15f571427a2d1e00099016506cf3297b53853acd2b7eb87825" dependencies = [ "dtoa", - "itoa 1.0.2", + "itoa", "owning_ref", "prometheus-client-derive-text-encode", ] @@ -8472,39 +8622,41 @@ dependencies = [ [[package]] name = "prost" -version = "0.9.0" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +checksum = "71adf41db68aa0daaefc69bb30bcd68ded9b9abaad5d1fbb6304c4fb390e083e" dependencies = [ - "bytes 1.2.0", - "prost-derive 0.9.0", + "bytes", + "prost-derive 0.10.1", ] [[package]] name = "prost" -version = "0.10.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71adf41db68aa0daaefc69bb30bcd68ded9b9abaad5d1fbb6304c4fb390e083e" +checksum = "399c3c31cdec40583bb68f0b18403400d01ec4289c383aa047560439952c4dd7" dependencies = [ - "bytes 1.2.0", - "prost-derive 0.10.1", + "bytes", + "prost-derive 0.11.0", ] [[package]] name = "prost-build" -version = "0.9.0" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +checksum = "8ae5a4388762d5815a9fc0dea33c56b021cdc8dde0c55e0c9ca57197254b0cab" dependencies = [ - "bytes 1.2.0", - "heck 0.3.3", + "bytes", + "cfg-if 1.0.0", + "cmake", + "heck", "itertools", "lazy_static", "log", "multimap", "petgraph", - "prost 0.9.0", - "prost-types 0.9.0", + "prost 0.10.4", + "prost-types 0.10.1", "regex", "tempfile", "which", @@ -8512,21 +8664,19 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.10.4" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae5a4388762d5815a9fc0dea33c56b021cdc8dde0c55e0c9ca57197254b0cab" +checksum = "7f835c582e6bd972ba8347313300219fed5bfa52caf175298d860b61ff6069bb" dependencies = [ - "bytes 1.2.0", - "cfg-if 1.0.0", - "cmake", - "heck 0.4.0", + "bytes", + "heck", "itertools", "lazy_static", "log", "multimap", "petgraph", - "prost 0.10.4", - "prost-types 0.10.1", + "prost 0.11.0", + "prost-types 0.11.1", "regex", "tempfile", "which", @@ -8539,7 +8689,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00af1e92c33b4813cc79fda3f2dbf56af5169709be0202df730e9ebc3e4cd007" dependencies = [ "asynchronous-codec", - "bytes 1.2.0", + "bytes", "prost 0.10.4", "thiserror", "unsigned-varint", @@ -8547,9 +8697,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.9.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +checksum = "7b670f45da57fb8542ebdbb6105a925fe571b67f9e7ed9f47a06a84e72b4e7cc" dependencies = [ "anyhow", "itertools", @@ -8560,9 +8710,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.10.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b670f45da57fb8542ebdbb6105a925fe571b67f9e7ed9f47a06a84e72b4e7cc" +checksum = "7345d5f0e08c0536d7ac7229952590239e77abf0a0100a1b1d890add6ea96364" dependencies = [ "anyhow", "itertools", @@ -8573,37 +8723,38 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.9.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +checksum = "2d0a014229361011dc8e69c8a1ec6c2e8d0f2af7c91e3ea3f5b2170298461e68" dependencies = [ - "bytes 1.2.0", - "prost 0.9.0", + "bytes", + "prost 0.10.4", ] [[package]] name = "prost-types" -version = "0.10.1" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d0a014229361011dc8e69c8a1ec6c2e8d0f2af7c91e3ea3f5b2170298461e68" +checksum = "4dfaa718ad76a44b3415e6c4d53b17c8f99160dcb3a99b10470fce8ad43f6e3e" dependencies = [ - "bytes 1.2.0", - "prost 0.10.4", + "bytes", + "prost 0.11.0", ] [[package]] name = "psm" -version = "0.1.20" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f446d0a6efba22928558c4fb4ce0b3fd6c89b0061343e390bf01a703742b8125" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" dependencies = [ "cc", ] [[package]] name = "quartz-runtime" -version = "0.9.24" +version = "0.9.30" dependencies = [ + "app-promotion-rpc", "cumulus-pallet-aura-ext", "cumulus-pallet-dmp-queue", "cumulus-pallet-parachain-system", @@ -8613,6 +8764,7 @@ dependencies = [ "cumulus-primitives-timestamp", "cumulus-primitives-utility", "derivative", + "evm-coder", "fp-evm-mapping", "fp-rpc", "fp-self-contained", @@ -8624,18 +8776,26 @@ dependencies = [ "frame-system-rpc-runtime-api", "frame-try-runtime", "hex-literal", + "impl-trait-for-tuples", "log", + "logtest", + "orml-tokens", + "orml-traits", "orml-vesting", + "orml-xtokens", + "pallet-app-promotion", "pallet-aura", "pallet-balances", "pallet-base-fee", "pallet-common", + "pallet-configuration", "pallet-ethereum", "pallet-evm", "pallet-evm-coder-substrate", "pallet-evm-contract-helpers", "pallet-evm-migration", "pallet-evm-transaction-payment", + "pallet-foreign-assets", "pallet-fungible", "pallet-inflation", "pallet-maintenance", @@ -8655,7 +8815,7 @@ dependencies = [ "pallet-unique-scheduler", "pallet-xcm", "parachain-info", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "rmrk-rpc", "scale-info", @@ -8675,9 +8835,10 @@ dependencies = [ "sp-transaction-pool", "sp-version", "substrate-wasm-builder", - "unique-runtime-common", + "up-common", "up-data-structs", "up-rpc", + "up-sponsorship", "xcm", "xcm-builder", "xcm-executor", @@ -8702,9 +8863,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -8743,7 +8904,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -8763,7 +8924,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -8777,11 +8938,11 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.7", + "getrandom 0.2.8", ] [[package]] @@ -8818,7 +8979,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" dependencies = [ - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -8853,9 +9014,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534cfe58d6a18cc17120fbf4635d53d14691c1fe4d951064df9bd326178d7d5a" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] @@ -8866,7 +9027,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.7", + "getrandom 0.2.8", "redox_syscall", "thiserror", ] @@ -8880,24 +9041,24 @@ dependencies = [ "derive_more", "fs-err", "itertools", - "static_init", + "static_init 0.5.2", "thiserror", ] [[package]] name = "ref-cast" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "776c8940430cf563f66a93f9111d1cd39306dc6c68149ecc6b934742a44a828a" +checksum = "12a733f1746c929b4913fe48f8697fcf9c55e3304ba251a79ffb41adfeaf49c2" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f26c4704460286103bff62ea1fb78d137febc86aaf76952e6c5a2249af01f54" +checksum = "5887de4a01acafd221861463be6113e6e87275e79804e56779f4cdc131c60368" dependencies = [ "proc-macro2", "quote", @@ -8905,13 +9066,14 @@ dependencies = [ ] [[package]] -name = "regalloc" -version = "0.0.34" +name = "regalloc2" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62446b1d3ebf980bdc68837700af1d77b37bc430e524bf95319c6eada2a4cc02" +checksum = "d43a209257d978ef079f3d446331d0f1794f5e0fc19b306a199983857833a779" dependencies = [ + "fxhash", "log", - "rustc-hash", + "slice-group-by", "smallvec", ] @@ -8941,27 +9103,15 @@ version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" -[[package]] -name = "region" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877e54ea2adcd70d80e9179344c97f93ef0dffd6b03e1f4529e6e83ab2fa9ae0" -dependencies = [ - "bitflags", - "libc", - "mach", - "winapi", -] - [[package]] name = "remote-externalities" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "env_logger", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "serde", "serde_json", "sp-core", @@ -8989,12 +9139,6 @@ dependencies = [ "quick-error", ] -[[package]] -name = "retain_mut" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" - [[package]] name = "rfc6979" version = "0.1.0" @@ -9023,11 +9167,11 @@ dependencies = [ [[package]] name = "rlp" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ - "bytes 1.2.0", + "bytes", "rustc-hex", ] @@ -9044,7 +9188,7 @@ dependencies = [ [[package]] name = "rmrk-rpc" -version = "0.0.1" +version = "0.0.2" dependencies = [ "parity-scale-codec 2.3.1", "rmrk-traits", @@ -9059,7 +9203,7 @@ dependencies = [ name = "rmrk-traits" version = "0.1.0" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", ] @@ -9076,16 +9220,11 @@ dependencies = [ [[package]] name = "rococo-runtime" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-merkle-tree", "beefy-primitives", - "bp-messages", - "bp-rococo", - "bp-runtime", - "bp-wococo", - "bridge-runtime-common", "frame-benchmarking", "frame-executive", "frame-support", @@ -9100,27 +9239,38 @@ dependencies = [ "pallet-balances", "pallet-beefy", "pallet-beefy-mmr", - "pallet-bridge-dispatch", - "pallet-bridge-grandpa", - "pallet-bridge-messages", + "pallet-bounties", + "pallet-child-bounties", "pallet-collective", + "pallet-democracy", + "pallet-elections-phragmen", + "pallet-gilt", "pallet-grandpa", + "pallet-identity", "pallet-im-online", "pallet-indices", "pallet-membership", "pallet-mmr", "pallet-multisig", "pallet-offences", + "pallet-preimage", "pallet-proxy", + "pallet-recovery", + "pallet-scheduler", "pallet-session", + "pallet-society", "pallet-staking", "pallet-sudo", "pallet-timestamp", + "pallet-tips", "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", + "pallet-treasury", "pallet-utility", + "pallet-vesting", "pallet-xcm", - "parity-scale-codec 3.1.5", + "pallet-xcm-benchmarks", + "parity-scale-codec 3.2.1", "polkadot-parachain", "polkadot-primitives", "polkadot-runtime-common", @@ -9145,6 +9295,7 @@ dependencies = [ "sp-std", "sp-transaction-pool", "sp-version", + "static_assertions", "substrate-wasm-builder", "xcm", "xcm-builder", @@ -9153,8 +9304,8 @@ dependencies = [ [[package]] name = "rococo-runtime-constants" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "polkadot-primitives", @@ -9165,9 +9316,9 @@ dependencies = [ [[package]] name = "rpassword" -version = "5.0.1" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc936cf8a7ea60c58f030fd36a612a48f440610214dc54bc36431f9ea0c3efb" +checksum = "20c9f5d2a0c3e2ea729ab3706d22217177770654c3ef5056b68b69d07332d3f5" dependencies = [ "libc", "winapi", @@ -9180,7 +9331,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "322c53fd76a18698f1c27381d58091de3a043d356aa5bd0d510608b565f469a0" dependencies = [ "async-global-executor", - "futures 0.3.21", + "futures 0.3.25", "log", "netlink-packet-route", "netlink-proto", @@ -9221,28 +9372,28 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.12", + "semver 1.0.14", ] [[package]] name = "rustix" -version = "0.33.7" +version = "0.35.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938a344304321a9da4973b9ff4f9f8db9caf4597dfd9dda6a60b523340a0fff0" +checksum = "fbb2fda4666def1433b1b05431ab402e42a1084285477222b72d6c564c417cef" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", - "winapi", + "windows-sys 0.36.1", ] [[package]] name = "rustls" -version = "0.20.6" +version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" +checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" dependencies = [ "log", "ring", @@ -9264,29 +9415,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7522c9de787ff061458fe9a829dc790a3f5b22dc571694fc5883f448b94d9a9" +checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" dependencies = [ "base64", ] [[package]] name = "rustversion" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24c8ad4f0c00e1eb5bc7614d236a7f1300e3dbd76b68cac8e06fb00b015ad8d8" - -[[package]] -name = "rw-stream-sink" -version = "0.2.1" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" -dependencies = [ - "futures 0.3.21", - "pin-project 0.4.30", - "static_assertions", -] +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "rw-stream-sink" @@ -9294,16 +9434,16 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26338f5e09bb721b85b135ea05af7767c90b52f6de4f087d4f4a3a9d64e7dc04" dependencies = [ - "futures 0.3.21", - "pin-project 1.0.11", + "futures 0.3.25", + "pin-project", "static_assertions", ] [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safe-mix" @@ -9316,11 +9456,11 @@ dependencies = [ [[package]] name = "salsa20" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0fbb5f676da676c260ba276a8f43a8dc67cf02d1438423aeb1c677a7212686" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" dependencies = [ - "cipher", + "cipher 0.4.3", ] [[package]] @@ -9335,7 +9475,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "4.1.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", "sp-core", @@ -9346,20 +9486,20 @@ dependencies = [ [[package]] name = "sc-authority-discovery" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "ip_network", "libp2p", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "prost 0.10.4", - "prost-build 0.9.0", + "prost-build 0.10.4", "rand 0.7.3", "sc-client-api", - "sc-network", + "sc-network-common", "sp-api", "sp-authority-discovery", "sp-blockchain", @@ -9373,12 +9513,12 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-block-builder", "sc-client-api", "sc-proposer-metrics", @@ -9396,9 +9536,9 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sp-api", "sp-block-builder", @@ -9412,13 +9552,13 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "impl-trait-for-tuples", - "memmap2 0.5.5", - "parity-scale-codec 3.1.5", + "memmap2", + "parity-scale-codec 3.2.1", "sc-chain-spec-derive", - "sc-network", + "sc-network-common", "sc-telemetry", "serde", "serde_json", @@ -9429,7 +9569,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -9440,17 +9580,17 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "chrono", "clap", "fdlimit", - "futures 0.3.21", - "hex", + "futures 0.3.25", "libp2p", "log", "names", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rand 0.7.3", "regex", "rpassword", @@ -9458,6 +9598,7 @@ dependencies = [ "sc-client-db", "sc-keystore", "sc-network", + "sc-network-common", "sc-service", "sc-telemetry", "sc-tracing", @@ -9473,19 +9614,19 @@ dependencies = [ "sp-version", "thiserror", "tiny-bip39", - "tokio 1.20.1", + "tokio", ] [[package]] name = "sc-client-api" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "fnv", - "futures 0.3.21", + "futures 0.3.25", "hash-db", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-executor", "sc-transaction-pool-api", @@ -9507,7 +9648,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "hash-db", "kvdb", @@ -9516,7 +9657,7 @@ dependencies = [ "linked-hash-map", "log", "parity-db", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-client-api", "sc-state-db", @@ -9532,10 +9673,10 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "libp2p", "log", @@ -9556,12 +9697,12 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-block-builder", "sc-client-api", "sc-consensus", @@ -9585,20 +9726,19 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "fork-tree", - "futures 0.3.21", + "futures 0.3.25", "log", "merlin", - "num-bigint", + "num-bigint 0.2.6", "num-rational 0.2.4", "num-traits", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "rand 0.7.3", - "retain_mut", "sc-client-api", "sc-consensus", "sc-consensus-epochs", @@ -9628,9 +9768,9 @@ dependencies = [ [[package]] name = "sc-consensus-babe-rpc" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "sc-consensus-babe", "sc-consensus-epochs", @@ -9650,10 +9790,10 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "fork-tree", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-consensus", "sp-blockchain", @@ -9663,14 +9803,14 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "assert_matches", "async-trait", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-consensus", "sc-consensus-aura", @@ -9697,13 +9837,13 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-consensus", "sc-telemetry", @@ -9715,29 +9855,17 @@ dependencies = [ "sp-inherents", "sp-runtime", "sp-state-machine", - "sp-timestamp", - "thiserror", -] - -[[package]] -name = "sc-consensus-uncles" -version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" -dependencies = [ - "sc-client-api", - "sp-authorship", - "sp-runtime", "thiserror", ] [[package]] name = "sc-executor" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "lazy_static", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-executor-common", "sc-executor-wasmi", @@ -9760,14 +9888,13 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "environmental", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-allocator", "sp-maybe-compressed-blob", "sp-sandbox", - "sp-serializer", "sp-wasm-interface", "thiserror", "wasm-instrument", @@ -9777,10 +9904,10 @@ dependencies = [ [[package]] name = "sc-executor-wasmi" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-allocator", "sc-executor-common", "sp-runtime-interface", @@ -9792,13 +9919,15 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "cfg-if 1.0.0", "libc", "log", - "parity-scale-codec 3.1.5", - "parity-wasm 0.42.2", + "once_cell", + "parity-scale-codec 3.2.1", + "parity-wasm 0.45.0", + "rustix", "sc-allocator", "sc-executor-common", "sp-runtime-interface", @@ -9810,18 +9939,18 @@ dependencies = [ [[package]] name = "sc-finality-grandpa" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ahash", + "array-bytes", "async-trait", "dyn-clone", "finality-grandpa", "fork-tree", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "hex", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "rand 0.8.5", "sc-block-builder", @@ -9830,6 +9959,7 @@ dependencies = [ "sc-consensus", "sc-keystore", "sc-network", + "sc-network-common", "sc-network-gossip", "sc-telemetry", "sc-utils", @@ -9850,13 +9980,13 @@ dependencies = [ [[package]] name = "sc-finality-grandpa-rpc" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "finality-grandpa", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-finality-grandpa", "sc-rpc", @@ -9871,15 +10001,15 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ansi_term", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", "parity-util-mem", "sc-client-api", - "sc-network", + "sc-network-common", "sc-transaction-pool-api", "sp-blockchain", "sp-runtime", @@ -9888,10 +10018,10 @@ dependencies = [ [[package]] name = "sc-keystore" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "async-trait", - "hex", "parking_lot 0.12.1", "serde_json", "sp-application-crypto", @@ -9903,37 +10033,34 @@ dependencies = [ [[package]] name = "sc-network" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "async-trait", "asynchronous-codec", "bitflags", - "bytes 1.2.0", + "bytes", "cid", "either", "fnv", "fork-tree", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "hex", "ip_network", "libp2p", "linked-hash-map", "linked_hash_set", "log", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", - "pin-project 1.0.11", + "pin-project", "prost 0.10.4", - "prost-build 0.9.0", "rand 0.7.3", "sc-block-builder", "sc-client-api", "sc-consensus", "sc-network-common", - "sc-network-light", - "sc-network-sync", "sc-peerset", "sc-utils", "serde", @@ -9943,40 +10070,72 @@ dependencies = [ "sp-blockchain", "sp-consensus", "sp-core", - "sp-finality-grandpa", "sp-runtime", "substrate-prometheus-endpoint", "thiserror", "unsigned-varint", - "void", "zeroize", ] +[[package]] +name = "sc-network-bitswap" +version = "0.10.0-dev" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "cid", + "futures 0.3.25", + "libp2p", + "log", + "prost 0.11.0", + "prost-build 0.11.1", + "sc-client-api", + "sc-network-common", + "sp-blockchain", + "sp-runtime", + "thiserror", + "unsigned-varint", + "void", +] + [[package]] name = "sc-network-common" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "async-trait", + "bitflags", + "bytes", + "futures 0.3.25", + "futures-timer", "libp2p", - "parity-scale-codec 3.1.5", - "prost-build 0.9.0", + "linked_hash_set", + "parity-scale-codec 3.2.1", + "prost-build 0.10.4", + "sc-consensus", "sc-peerset", + "serde", "smallvec", + "sp-blockchain", + "sp-consensus", + "sp-finality-grandpa", + "sp-runtime", + "substrate-prometheus-endpoint", + "thiserror", ] [[package]] name = "sc-network-gossip" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ahash", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "libp2p", "log", "lru 0.7.8", - "sc-network", + "sc-network-common", + "sc-peerset", "sp-runtime", "substrate-prometheus-endpoint", "tracing", @@ -9985,14 +10144,15 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "array-bytes", + "futures 0.3.25", "libp2p", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "prost 0.10.4", - "prost-build 0.9.0", + "prost-build 0.10.4", "sc-client-api", "sc-network-common", "sc-peerset", @@ -10005,18 +10165,17 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "bitflags", - "either", + "array-bytes", "fork-tree", - "futures 0.3.21", + "futures 0.3.25", "libp2p", "log", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "prost 0.10.4", - "prost-build 0.9.0", + "prost-build 0.10.4", "sc-client-api", "sc-consensus", "sc-network-common", @@ -10031,25 +10190,46 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sc-network-transactions" +version = "0.10.0-dev" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "array-bytes", + "futures 0.3.25", + "hex", + "libp2p", + "log", + "parity-scale-codec 3.2.1", + "pin-project", + "sc-network-common", + "sc-peerset", + "sp-consensus", + "sp-runtime", + "substrate-prometheus-endpoint", +] + [[package]] name = "sc-offchain" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "bytes 1.2.0", + "array-bytes", + "bytes", "fnv", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", - "hex", "hyper", "hyper-rustls", + "libp2p", "num_cpus", "once_cell", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "rand 0.7.3", "sc-client-api", - "sc-network", + "sc-network-common", + "sc-peerset", "sc-utils", "sp-api", "sp-core", @@ -10062,9 +10242,9 @@ dependencies = [ [[package]] name = "sc-peerset" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "libp2p", "log", "sc-utils", @@ -10075,7 +10255,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -10084,13 +10264,13 @@ dependencies = [ [[package]] name = "sc-rpc" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "hash-db", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-block-builder", "sc-chain-spec", @@ -10114,12 +10294,12 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sc-chain-spec", "sc-transaction-pool-api", @@ -10137,33 +10317,33 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", "serde_json", "substrate-prometheus-endpoint", - "tokio 1.20.1", + "tokio", ] [[package]] name = "sc-service" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "directories", "exit-future", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "hash-db", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "parking_lot 0.12.1", - "pin-project 1.0.11", + "pin-project", "rand 0.7.3", "sc-block-builder", "sc-chain-spec", @@ -10174,7 +10354,11 @@ dependencies = [ "sc-informant", "sc-keystore", "sc-network", + "sc-network-bitswap", "sc-network-common", + "sc-network-light", + "sc-network-sync", + "sc-network-transactions", "sc-offchain", "sc-rpc", "sc-rpc-server", @@ -10204,10 +10388,11 @@ dependencies = [ "sp-transaction-storage-proof", "sp-trie", "sp-version", + "static_init 1.0.3", "substrate-prometheus-endpoint", "tempfile", "thiserror", - "tokio 1.20.1", + "tokio", "tracing", "tracing-futures", ] @@ -10215,10 +10400,10 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "parity-util-mem-derive", "parking_lot 0.12.1", @@ -10229,10 +10414,10 @@ dependencies = [ [[package]] name = "sc-sync-state-rpc" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "jsonrpsee", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-chain-spec", "sc-client-api", "sc-consensus-babe", @@ -10248,9 +10433,9 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "6.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "libc", "log", "rand 0.7.3", @@ -10267,14 +10452,14 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "chrono", - "futures 0.3.21", + "futures 0.3.25", "libp2p", "log", "parking_lot 0.12.1", - "pin-project 1.0.11", + "pin-project", "rand 0.7.3", "serde", "serde_json", @@ -10285,7 +10470,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ansi_term", "atty", @@ -10316,7 +10501,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -10327,16 +10512,15 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "linked-hash-map", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "parking_lot 0.12.1", - "retain_mut", "sc-client-api", "sc-transaction-pool-api", "sc-utils", @@ -10354,9 +10538,9 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "log", "serde", "sp-blockchain", @@ -10367,9 +10551,9 @@ dependencies = [ [[package]] name = "sc-utils" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "lazy_static", "log", @@ -10379,23 +10563,23 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c46be926081c9f4dd5dd9b6f1d3e3229f2360bc6502dd8836f84a93b7c75e99a" +checksum = "333af15b02563b8182cd863f925bd31ef8fa86a0e095d30c091956057d436153" dependencies = [ "bitvec 1.0.1", "cfg-if 1.0.0", "derive_more", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info-derive", "serde", ] [[package]] name = "scale-info-derive" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50e334bb10a245e28e5fd755cabcafd96cfcd167c99ae63a46924ca8d8703a3c" +checksum = "53f56acbd0743d29ffa08f911ab5397def774ad01bab3786804cf6ee057fb5e1" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -10410,7 +10594,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -10437,6 +10621,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scratch" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" + [[package]] name = "sct" version = "0.7.0" @@ -10454,25 +10644,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08da66b8b0965a5555b6bd6639e68ccba85e1e2506f5fbb089e93f8a04e1a2d1" dependencies = [ "der", - "generic-array 0.14.5", + "generic-array 0.14.6", + "pkcs8", "subtle", "zeroize", ] [[package]] name = "secp256k1" -version = "0.21.3" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +checksum = "b7649a0b3ffb32636e60c7ce0d70511eda9c52c658cd0634e194d5a19943aeff" dependencies = [ "secp256k1-sys", ] [[package]] name = "secp256k1-sys" -version = "0.4.2" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +checksum = "83080e2c2fc1006e625be82e5d1eb6a43b7fd9578b617fcc55814daf286bba4b" dependencies = [ "cc", ] @@ -10488,9 +10679,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" +checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" dependencies = [ "bitflags", "core-foundation", @@ -10529,9 +10720,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1" +checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" dependencies = [ "serde", ] @@ -10544,18 +10735,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.140" +version = "1.0.146" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc855a42c7967b7c369eb5860f7164ef1f6f81c20c7cc1141f2a604e18723b03" +checksum = "6df50b7a60a0ad48e1b42eb38373eac8ff785d619fb14db917b4e63d5439361f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.140" +version = "1.0.146" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2122636b9fe3b81f1cb25099fcf2d3f542cdb1d45940d56c713158884a05da" +checksum = "a714fd32ba1d66047ce7d53dabd809e9922d538f9047de13cc4cffca47b36205" dependencies = [ "proc-macro2", "quote", @@ -10564,11 +10755,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.82" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" +checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" dependencies = [ - "itoa 1.0.2", + "itoa", "ryu", "serde", ] @@ -10582,18 +10773,6 @@ dependencies = [ "serde", ] -[[package]] -name = "sha-1" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - [[package]] name = "sha-1" version = "0.9.8" @@ -10607,6 +10786,17 @@ dependencies = [ "opaque-debug 0.3.0", ] +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.5", +] + [[package]] name = "sha2" version = "0.8.2" @@ -10634,34 +10824,22 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", -] - -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug 0.3.0", + "digest 0.10.5", ] [[package]] name = "sha3" -version = "0.10.1" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "881bf8156c87b6301fc5ca6b27f11eeb2761224c7081e69b409d5a1951a70c86" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "keccak", ] @@ -10706,7 +10884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" dependencies = [ "digest 0.9.0", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -10730,13 +10908,19 @@ dependencies = [ "autocfg", ] +[[package]] +name = "slice-group-by" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03b634d87b960ab1a38c4fe143b508576f075e7c978bfad18217645ebfdfa2ec" + [[package]] name = "slot-range-helper" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "enumn", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "paste", "sp-runtime", "sp-std", @@ -10753,9 +10937,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "snap" @@ -10773,18 +10957,18 @@ dependencies = [ "blake2", "chacha20poly1305", "curve25519-dalek 4.0.0-pre.1", - "rand_core 0.6.3", + "rand_core 0.6.4", "ring", "rustc_version 0.4.0", - "sha2 0.10.2", + "sha2 0.10.6", "subtle", ] [[package]] name = "socket2" -version = "0.4.4" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" dependencies = [ "libc", "winapi", @@ -10797,28 +10981,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" dependencies = [ "base64", - "bytes 1.2.0", + "bytes", "flate2", - "futures 0.3.21", + "futures 0.3.25", "httparse", "log", "rand 0.8.5", - "sha-1 0.9.8", + "sha-1", ] [[package]] name = "sp-api" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "hash-db", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api-proc-macro", "sp-core", "sp-runtime", "sp-state-machine", "sp-std", + "sp-trie", "sp-version", "thiserror", ] @@ -10826,7 +11011,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "blake2", "proc-macro-crate", @@ -10838,9 +11023,9 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-core", @@ -10851,11 +11036,11 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "5.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "integer-sqrt", "num-traits", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-debug-derive", @@ -10866,9 +11051,9 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-api", "sp-application-crypto", @@ -10879,10 +11064,10 @@ dependencies = [ [[package]] name = "sp-authorship" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-inherents", "sp-runtime", "sp-std", @@ -10891,9 +11076,9 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", "sp-inherents", "sp-runtime", @@ -10903,12 +11088,12 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "log", "lru 0.7.8", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "sp-api", "sp-consensus", @@ -10921,13 +11106,13 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "futures-timer", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-core", "sp-inherents", "sp-runtime", @@ -10940,10 +11125,10 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-api", "sp-application-crypto", @@ -10958,11 +11143,11 @@ dependencies = [ [[package]] name = "sp-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "merlin", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-api", @@ -10981,9 +11166,9 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-arithmetic", @@ -10995,9 +11180,9 @@ dependencies = [ [[package]] name = "sp-consensus-vrf" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "schnorrkel", "sp-core", @@ -11008,25 +11193,25 @@ dependencies = [ [[package]] name = "sp-core" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "base58", "bitflags", - "blake2-rfc", + "blake2", "byteorder", "dyn-clonable", - "ed25519-dalek", - "futures 0.3.21", + "ed25519-zebra", + "futures 0.3.25", "hash-db", "hash256-std-hasher", - "hex", "impl-serde", "lazy_static", "libsecp256k1", "log", "merlin", "num-traits", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "parking_lot 0.12.1", "primitive-types", @@ -11054,13 +11239,13 @@ dependencies = [ [[package]] name = "sp-core-hashing" version = "4.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "blake2", "byteorder", - "digest 0.10.3", - "sha2 0.10.2", - "sha3 0.10.1", + "digest 0.10.5", + "sha2 0.10.6", + "sha3", "sp-std", "twox-hash", ] @@ -11068,7 +11253,7 @@ dependencies = [ [[package]] name = "sp-core-hashing-proc-macro" version = "5.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro2", "quote", @@ -11079,7 +11264,7 @@ dependencies = [ [[package]] name = "sp-database" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -11088,7 +11273,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "4.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro2", "quote", @@ -11098,10 +11283,10 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.12.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "environmental", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-std", "sp-storage", ] @@ -11109,11 +11294,11 @@ dependencies = [ [[package]] name = "sp-finality-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "finality-grandpa", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-api", @@ -11127,11 +11312,11 @@ dependencies = [ [[package]] name = "sp-inherents" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "impl-trait-for-tuples", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-core", "sp-runtime", "sp-std", @@ -11141,13 +11326,14 @@ dependencies = [ [[package]] name = "sp-io" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "bytes", + "futures 0.3.25", "hash-db", "libsecp256k1", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "secp256k1", "sp-core", @@ -11166,23 +11352,23 @@ dependencies = [ [[package]] name = "sp-keyring" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "lazy_static", "sp-core", "sp-runtime", - "strum 0.23.0", + "strum", ] [[package]] name = "sp-keystore" version = "0.12.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", - "futures 0.3.21", + "futures 0.3.25", "merlin", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "schnorrkel", "serde", @@ -11194,7 +11380,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "4.1.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "thiserror", "zstd", @@ -11203,10 +11389,10 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "serde", "sp-api", "sp-core", @@ -11218,9 +11404,9 @@ dependencies = [ [[package]] name = "sp-npos-elections" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "serde", "sp-arithmetic", @@ -11232,7 +11418,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "sp-api", "sp-core", @@ -11242,7 +11428,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "4.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "backtrace", "lazy_static", @@ -11252,7 +11438,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "rustc-hash", "serde", @@ -11262,13 +11448,13 @@ dependencies = [ [[package]] name = "sp-runtime" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "either", "hash256-std-hasher", "impl-trait-for-tuples", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parity-util-mem", "paste", "rand 0.7.3", @@ -11279,15 +11465,17 @@ dependencies = [ "sp-core", "sp-io", "sp-std", + "sp-weights", ] [[package]] name = "sp-runtime-interface" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "bytes", "impl-trait-for-tuples", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "primitive-types", "sp-externalities", "sp-runtime-interface-proc-macro", @@ -11301,7 +11489,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "5.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "Inflector", "proc-macro-crate", @@ -11313,10 +11501,10 @@ dependencies = [ [[package]] name = "sp-sandbox" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-core", "sp-io", "sp-std", @@ -11324,21 +11512,12 @@ dependencies = [ "wasmi", ] -[[package]] -name = "sp-serializer" -version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" -dependencies = [ - "serde", - "serde_json", -] - [[package]] name = "sp-session" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-api", "sp-core", @@ -11350,9 +11529,9 @@ dependencies = [ [[package]] name = "sp-staking" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-runtime", "sp-std", @@ -11361,12 +11540,12 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.12.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "hash-db", "log", "num-traits", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "parking_lot 0.12.1", "rand 0.7.3", "smallvec", @@ -11383,15 +11562,15 @@ dependencies = [ [[package]] name = "sp-std" version = "4.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" [[package]] name = "sp-storage" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "impl-serde", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "ref-cast", "serde", "sp-debug-derive", @@ -11401,7 +11580,7 @@ dependencies = [ [[package]] name = "sp-tasks" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "log", "sp-core", @@ -11414,12 +11593,12 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "futures-timer", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", "sp-inherents", "sp-runtime", @@ -11430,9 +11609,9 @@ dependencies = [ [[package]] name = "sp-tracing" version = "5.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-std", "tracing", "tracing-core", @@ -11442,7 +11621,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "sp-api", "sp-runtime", @@ -11451,11 +11630,11 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "async-trait", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-inherents", @@ -11467,15 +11646,22 @@ dependencies = [ [[package]] name = "sp-trie" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "ahash", "hash-db", + "hashbrown", + "lazy_static", + "lru 0.7.8", "memory-db", - "parity-scale-codec 3.1.5", + "nohash-hasher", + "parity-scale-codec 3.2.1", + "parking_lot 0.12.1", "scale-info", "sp-core", "sp-std", "thiserror", + "tracing", "trie-db", "trie-root", ] @@ -11483,11 +11669,11 @@ dependencies = [ [[package]] name = "sp-version" version = "5.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "impl-serde", - "parity-scale-codec 3.1.5", - "parity-wasm 0.42.2", + "parity-scale-codec 3.2.1", + "parity-wasm 0.45.0", "scale-info", "serde", "sp-core-hashing-proc-macro", @@ -11500,9 +11686,9 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "proc-macro2", "quote", "syn", @@ -11511,27 +11697,53 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "6.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "impl-trait-for-tuples", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-std", "wasmi", "wasmtime", ] +[[package]] +name = "sp-weights" +version = "4.0.0" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" +dependencies = [ + "impl-trait-for-tuples", + "parity-scale-codec 3.2.1", + "scale-info", + "serde", + "smallvec", + "sp-arithmetic", + "sp-core", + "sp-debug-derive", + "sp-std", +] + [[package]] name = "spin" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spki" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d01ac02a6ccf3e07db148d2be087da624fea0221a16152ed01f0496a6b0a27" +dependencies = [ + "base64ct", + "der", +] [[package]] name = "ss58-registry" -version = "1.25.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a039906277e0d8db996cd9d1ef19278c10209d994ecfc1025ced16342873a17c" +checksum = "3ab7554f8a8b6f8d71cd5a8e6536ef116e2ce0504cf97ebf16311d58065dc8a6" dependencies = [ "Inflector", "num-format", @@ -11563,7 +11775,22 @@ dependencies = [ "cfg_aliases", "libc", "parking_lot 0.11.2", - "static_init_macro", + "static_init_macro 0.5.0", +] + +[[package]] +name = "static_init" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2a1c578e98c1c16fc3b8ec1328f7659a500737d7a0c6d625e73e830ff9c1f6" +dependencies = [ + "bitflags", + "cfg_aliases", + "libc", + "parking_lot 0.11.2", + "parking_lot_core 0.8.5", + "static_init_macro 1.0.2", + "winapi", ] [[package]] @@ -11579,6 +11806,19 @@ dependencies = [ "syn", ] +[[package]] +name = "static_init_macro" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a2595fc3aa78f2d0e45dd425b22282dd863273761cc77780914b2cf3003acf" +dependencies = [ + "cfg_aliases", + "memchr", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "statrs" version = "0.15.0" @@ -11606,44 +11846,22 @@ dependencies = [ "syn", ] -[[package]] -name = "strum" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb" -dependencies = [ - "strum_macros 0.23.1", -] - [[package]] name = "strum" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros 0.24.2", -] - -[[package]] -name = "strum_macros" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb0dc7ee9c15cea6199cde9a127fa16a4c5819af85395457ad72d68edc85a38" -dependencies = [ - "heck 0.3.3", - "proc-macro2", - "quote", - "rustversion", - "syn", + "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.2" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4faebde00e8ff94316c01800f9054fd2ba77d30d9e922541913051d1d978918b" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro2", "quote", "rustversion", @@ -11666,7 +11884,7 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "3.0.0" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "platforms", ] @@ -11674,13 +11892,13 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "frame-system-rpc-runtime-api", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-rpc-api", "sc-transaction-pool-api", @@ -11695,24 +11913,24 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "futures-util", "hyper", "log", "prometheus", "thiserror", - "tokio 1.20.1", + "tokio", ] [[package]] name = "substrate-state-trie-migration-rpc" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-rpc-api", "scale-info", @@ -11729,12 +11947,12 @@ dependencies = [ [[package]] name = "substrate-test-client" version = "2.0.1" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ + "array-bytes", "async-trait", - "futures 0.3.21", - "hex", - "parity-scale-codec 3.1.5", + "futures 0.3.25", + "parity-scale-codec 3.2.1", "sc-client-api", "sc-client-db", "sc-consensus", @@ -11755,17 +11973,17 @@ dependencies = [ [[package]] name = "substrate-test-utils" version = "4.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "substrate-test-utils-derive", - "tokio 1.20.1", + "tokio", ] [[package]] name = "substrate-test-utils-derive" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -11776,13 +11994,14 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "5.0.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "ansi_term", "build-helper", "cargo_metadata", + "filetime", "sp-maybe-compressed-blob", - "strum 0.23.0", + "strum", "tempfile", "toml", "walkdir", @@ -11797,9 +12016,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.98" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" +checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" dependencies = [ "proc-macro2", "quote", @@ -11876,8 +12095,8 @@ dependencies = [ [[package]] name = "test-runtime-constants" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "polkadot-primitives", @@ -11888,8 +12107,9 @@ dependencies = [ [[package]] name = "tests" -version = "0.1.0" +version = "0.1.1" dependencies = [ + "evm-coder", "fp-evm-mapping", "frame-support", "frame-system", @@ -11905,36 +12125,36 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-unique", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", "sp-core", "sp-io", "sp-runtime", "sp-std", - "unique-runtime-common", "up-data-structs", + "up-sponsorship", ] [[package]] name = "textwrap" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" +checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16" [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", @@ -12000,6 +12220,24 @@ dependencies = [ "winapi", ] +[[package]] +name = "time" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +dependencies = [ + "itoa", + "libc", + "num_threads", + "time-macros", +] + +[[package]] +name = "time-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" + [[package]] name = "tiny-bip39" version = "0.8.2" @@ -12045,48 +12283,24 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" -dependencies = [ - "bytes 0.5.6", - "fnv", - "pin-project-lite 0.1.12", - "tokio-macros 0.2.6", -] - -[[package]] -name = "tokio" -version = "1.20.1" +version = "1.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a8325f63a7d4774dd041e363b2409ed1c5cbbd0f867795e661df066b2b0a581" +checksum = "a9e03c497dc955702ba729190dc4aac6f2a0ce97f913e5b1b5912fc5039d9099" dependencies = [ "autocfg", - "bytes 1.2.0", + "bytes", "libc", "memchr", "mio", "num_cpus", - "once_cell", "parking_lot 0.12.1", "pin-project-lite 0.2.9", "signal-hook-registry", "socket2", - "tokio-macros 1.8.0", + "tokio-macros", "winapi", ] -[[package]] -name = "tokio-macros" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "tokio-macros" version = "1.8.0" @@ -12105,22 +12319,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls", - "tokio 1.20.1", + "tokio", "webpki", ] +[[package]] +name = "tokio-stream" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +dependencies = [ + "futures-core", + "pin-project-lite 0.2.9", + "tokio", +] + [[package]] name = "tokio-util" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" +checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" dependencies = [ - "bytes 1.2.0", + "bytes", "futures-core", "futures-io", "futures-sink", "pin-project-lite 0.2.9", - "tokio 1.20.1", + "tokio", "tracing", ] @@ -12141,9 +12366,9 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.35" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if 1.0.0", "pin-project-lite 0.2.9", @@ -12153,9 +12378,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", @@ -12164,9 +12389,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ "once_cell", "valuable", @@ -12178,14 +12403,14 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.11", + "pin-project", "tracing", ] [[package]] name = "tracing-gum" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "polkadot-node-jaeger", "polkadot-primitives", @@ -12195,8 +12420,8 @@ dependencies = [ [[package]] name = "tracing-gum-proc-macro" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "expander 0.0.6", "proc-macro-crate", @@ -12211,10 +12436,8 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" dependencies = [ - "ahash", "lazy_static", "log", - "lru 0.7.8", "tracing-core", ] @@ -12253,12 +12476,12 @@ dependencies = [ [[package]] name = "trie-db" -version = "0.23.1" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32d034c0d3db64b43c31de38e945f15b40cd4ca6d2dcfc26d4798ce8de4ab83" +checksum = "004e1e8f92535694b4cb1444dc5a8073ecf0815e3357f729638b9f8fc4062908" dependencies = [ "hash-db", - "hashbrown 0.12.3", + "hashbrown", "log", "rustc-hex", "smallvec", @@ -12296,7 +12519,7 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna", + "idna 0.2.3", "ipnet", "lazy_static", "log", @@ -12335,12 +12558,13 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "try-runtime-cli" version = "0.10.0-dev" -source = "git+https://github.com/uniquenetwork/substrate?branch=polkadot-v0.9.24-hack-substitute#1fa76b0665d32b1e28c36da67e54da1816db3fa2" +source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.30#a3ed0119c45cdd0d571ad34e5b3ee7518c8cef8d" dependencies = [ "clap", + "frame-try-runtime", "jsonrpsee", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "remote-externalities", "sc-chain-spec", "sc-cli", @@ -12370,7 +12594,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if 1.0.0", - "digest 0.10.3", + "digest 0.10.5", "rand 0.8.5", "static_assertions", ] @@ -12383,35 +12607,35 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "uc-rpc" -version = "0.1.0" +version = "0.1.4" dependencies = [ "anyhow", + "app-promotion-rpc", "jsonrpsee", "pallet-common", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rmrk-rpc", "sp-api", "sp-blockchain", "sp-core", "sp-rpc", "sp-runtime", - "unique-runtime-common", "up-data-structs", "up-rpc", ] [[package]] name = "ucd-trie" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" [[package]] name = "uint" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" +checksum = "a45526d29728d135c2900b0d30573fe3ee79fceb12ef534c7bb30e810a91b601" dependencies = [ "byteorder", "crunchy", @@ -12436,41 +12660,36 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] name = "unicode-ident" -version = "1.0.2" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" [[package]] name = "unicode-normalization" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unique-node" -version = "0.9.24" +version = "0.9.30" dependencies = [ + "app-promotion-rpc", "clap", "cumulus-client-cli", "cumulus-client-collator", @@ -12492,15 +12711,15 @@ dependencies = [ "fp-rpc", "frame-benchmarking", "frame-benchmarking-cli", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "log", "opal-runtime", "pallet-ethereum", "pallet-transaction-payment-rpc", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec 3.1.5", - "parking_lot 0.11.2", + "parity-scale-codec 3.2.1", + "parking_lot 0.12.1", "polkadot-cli", "polkadot-parachain", "polkadot-primitives", @@ -12546,26 +12765,27 @@ dependencies = [ "substrate-build-script-utils", "substrate-frame-rpc-system", "substrate-prometheus-endpoint", - "tokio 1.20.1", + "tokio", "try-runtime-cli", "unique-rpc", "unique-runtime", - "unique-runtime-common", + "up-common", "up-data-structs", "up-rpc", ] [[package]] name = "unique-rpc" -version = "0.1.0" +version = "0.1.2" dependencies = [ + "app-promotion-rpc", "fc-db", "fc-mapping-sync", "fc-rpc", "fc-rpc-core", "fp-rpc", "fp-storage", - "futures 0.3.21", + "futures 0.3.25", "jsonrpsee", "pallet-common", "pallet-ethereum", @@ -12597,17 +12817,18 @@ dependencies = [ "sp-storage", "sp-transaction-pool", "substrate-frame-rpc-system", - "tokio 0.2.25", + "tokio", "uc-rpc", - "unique-runtime-common", + "up-common", "up-data-structs", "up-rpc", ] [[package]] name = "unique-runtime" -version = "0.9.24" +version = "0.9.30" dependencies = [ + "app-promotion-rpc", "cumulus-pallet-aura-ext", "cumulus-pallet-dmp-queue", "cumulus-pallet-parachain-system", @@ -12617,6 +12838,7 @@ dependencies = [ "cumulus-primitives-timestamp", "cumulus-primitives-utility", "derivative", + "evm-coder", "fp-evm-mapping", "fp-rpc", "fp-self-contained", @@ -12628,18 +12850,26 @@ dependencies = [ "frame-system-rpc-runtime-api", "frame-try-runtime", "hex-literal", + "impl-trait-for-tuples", "log", + "logtest", + "orml-tokens", + "orml-traits", "orml-vesting", + "orml-xtokens", + "pallet-app-promotion", "pallet-aura", "pallet-balances", "pallet-base-fee", "pallet-common", + "pallet-configuration", "pallet-ethereum", "pallet-evm", "pallet-evm-coder-substrate", "pallet-evm-contract-helpers", "pallet-evm-migration", "pallet-evm-transaction-payment", + "pallet-foreign-assets", "pallet-fungible", "pallet-inflation", "pallet-maintenance", @@ -12659,7 +12889,7 @@ dependencies = [ "pallet-unique-scheduler", "pallet-xcm", "parachain-info", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "rmrk-rpc", "scale-info", @@ -12679,47 +12909,22 @@ dependencies = [ "sp-transaction-pool", "sp-version", "substrate-wasm-builder", - "unique-runtime-common", + "up-common", "up-data-structs", "up-rpc", + "up-sponsorship", "xcm", "xcm-builder", "xcm-executor", ] -[[package]] -name = "unique-runtime-common" -version = "0.9.24" -dependencies = [ - "evm-coder", - "fp-rpc", - "frame-support", - "frame-system", - "pallet-common", - "pallet-evm", - "pallet-fungible", - "pallet-nonfungible", - "pallet-refungible", - "pallet-unique", - "pallet-unique-scheduler", - "parity-scale-codec 3.1.5", - "rmrk-rpc", - "scale-info", - "sp-consensus-aura", - "sp-core", - "sp-runtime", - "sp-std", - "up-data-structs", - "up-sponsorship", -] - [[package]] name = "universal-hash" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "subtle", ] @@ -12730,7 +12935,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d86a8dc7f45e4c1b0d30e43038c38f274e77af056aa5f74b93c2cf9eb3c1c836" dependencies = [ "asynchronous-codec", - "bytes 1.2.0", + "bytes", "futures-io", "futures-util", ] @@ -12741,15 +12946,29 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "up-common" +version = "0.9.30" +dependencies = [ + "fp-rpc", + "frame-support", + "pallet-evm", + "sp-consensus-aura", + "sp-core", + "sp-runtime", + "sp-std", +] + [[package]] name = "up-data-structs" -version = "0.1.0" +version = "0.2.2" dependencies = [ + "bondrewd", "derivative", "frame-support", "frame-system", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "rmrk-traits", "scale-info", "serde", @@ -12761,11 +12980,11 @@ dependencies = [ [[package]] name = "up-rpc" -version = "0.1.0" +version = "0.1.3" dependencies = [ "pallet-common", "pallet-evm", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-api", "sp-core", "sp-runtime", @@ -12776,20 +12995,19 @@ dependencies = [ [[package]] name = "up-sponsorship" version = "0.1.0" -source = "git+https://github.com/uniquenetwork/pallet-sponsoring?branch=polkadot-v0.9.24#05cb0f02abecad915d32455df7a7724b3e2869aa" +source = "git+https://github.com/uniquenetwork/pallet-sponsoring?branch=polkadot-v0.9.30#39dd82158d6caa9d89105441bf2f7111a6e686e5" dependencies = [ "impl-trait-for-tuples", ] [[package]] name = "url" -version = "2.2.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" dependencies = [ "form_urlencoded", - "idna", - "matches", + "idna 0.3.0", "percent-encoding", ] @@ -12874,9 +13092,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7652e3f6c4706c8d9cd54832c4a4ccb9b5336e2c3bd154d5cccfbf1c1f5f7d" +checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -12884,9 +13102,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "662cd44805586bd52971b9586b1df85cdbbd9112e4ef4d8f41559c334dc6ac3f" +checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" dependencies = [ "bumpalo", "log", @@ -12899,9 +13117,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.32" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa76fb221a1f8acddf5b54ace85912606980ad661ac7a503b4570ffd3a624dad" +checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -12911,9 +13129,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b260f13d3012071dfb1512849c033b1925038373aea48ced3012c09df952c602" +checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -12921,9 +13139,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be8e654bdd9b79216c2929ab90721aa82faf65c48cdf08bdc4e7f51357b80da" +checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", @@ -12934,9 +13152,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a" +checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" [[package]] name = "wasm-gc-api" @@ -12951,11 +13169,11 @@ dependencies = [ [[package]] name = "wasm-instrument" -version = "0.1.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "962e5b0401bbb6c887f54e69b8c496ea36f704df65db73e81fd5ff8dc3e63a9f" +checksum = "aa1dafb3e60065305741e83db35c6c2584bb3725b692b5b66148a38d72ace6cd" dependencies = [ - "parity-wasm 0.42.2", + "parity-wasm 0.45.0", ] [[package]] @@ -12964,7 +13182,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "js-sys", "parking_lot 0.11.2", "pin-utils", @@ -12975,55 +13193,63 @@ dependencies = [ [[package]] name = "wasmi" -version = "0.9.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca00c5147c319a8ec91ec1a0edbec31e566ce2c9cc93b3f9bb86a9efd0eb795d" +checksum = "06c326c93fbf86419608361a2c925a31754cf109da1b8b55737070b4d6669422" dependencies = [ - "downcast-rs", - "libc", - "libm", - "memory_units", - "num-rational 0.2.4", - "num-traits", - "parity-wasm 0.42.2", + "parity-wasm 0.45.0", "wasmi-validation", + "wasmi_core", ] [[package]] name = "wasmi-validation" -version = "0.4.1" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ff416ad1ff0c42e5a926ed5d5fab74c0f098749aa0ad8b2a34b982ce0e867b" +dependencies = [ + "parity-wasm 0.45.0", +] + +[[package]] +name = "wasmi_core" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "165343ecd6c018fc09ebcae280752702c9a2ef3e6f8d02f1cfcbdb53ef6d7937" +checksum = "57d20cb3c59b788653d99541c646c561c9dd26506f25c0cebfe810659c54c6d7" dependencies = [ - "parity-wasm 0.42.2", + "downcast-rs", + "libm", + "memory_units", + "num-rational 0.4.1", + "num-traits", ] [[package]] name = "wasmparser" -version = "0.83.0" +version = "0.89.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" +checksum = "ab5d3e08b13876f96dd55608d03cd4883a0545884932d5adf11925876c96daef" +dependencies = [ + "indexmap", +] [[package]] name = "wasmtime" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21ffb4705016d5ca91e18a72ed6822dab50e6d5ddd7045461b17ef19071cdef1" +checksum = "f1f511c4917c83d04da68333921107db75747c4e11a2f654a8e909cc5e0520dc" dependencies = [ "anyhow", - "backtrace", "bincode", "cfg-if 1.0.0", "indexmap", - "lazy_static", "libc", "log", - "object 0.27.1", + "object", "once_cell", "paste", "psm", "rayon", - "region", "serde", "target-lexicon", "wasmparser", @@ -13032,14 +13258,23 @@ dependencies = [ "wasmtime-environ", "wasmtime-jit", "wasmtime-runtime", - "winapi", + "windows-sys 0.36.1", +] + +[[package]] +name = "wasmtime-asm-macros" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39bf3debfe744bf19dd3732990ce6f8c0ced7439e2370ba4e1d8f5a3660a3178" +dependencies = [ + "cfg-if 1.0.0", ] [[package]] name = "wasmtime-cache" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c6ab24291fa7cb3a181f5669f6c72599b7ef781669759b45c7828c5999d0c0" +checksum = "ece42fa4676a263f7558cdaaf5a71c2592bebcbac22a0580e33cf3406c103da2" dependencies = [ "anyhow", "base64", @@ -13051,15 +13286,15 @@ dependencies = [ "serde", "sha2 0.9.9", "toml", - "winapi", + "windows-sys 0.36.1", "zstd", ] [[package]] name = "wasmtime-cranelift" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04c810078a491b7bc4866ebe045f714d2b95e6b539e1f64009a4a7606be11de" +checksum = "058217e28644b012bdcdf0e445f58d496d78c2e0b6a6dd93558e701591dad705" dependencies = [ "anyhow", "cranelift-codegen", @@ -13069,8 +13304,7 @@ dependencies = [ "cranelift-wasm", "gimli", "log", - "more-asserts", - "object 0.27.1", + "object", "target-lexicon", "thiserror", "wasmparser", @@ -13079,17 +13313,16 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61448266ea164b1ac406363cdcfac81c7c44db4d94c7a81c8620ac6c5c6cdf59" +checksum = "c7af06848df28b7661471d9a80d30a973e0f401f2e3ed5396ad7e225ed217047" dependencies = [ "anyhow", "cranelift-entity", "gimli", "indexmap", "log", - "more-asserts", - "object 0.27.1", + "object", "serde", "target-lexicon", "thiserror", @@ -13099,9 +13332,9 @@ dependencies = [ [[package]] name = "wasmtime-jit" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "156b4623c6b0d4b8c24afb846c20525922f538ef464cc024abab7ea8de2109a2" +checksum = "9028fb63a54185b3c192b7500ef8039c7bb8d7f62bfc9e7c258483a33a3d13bb" dependencies = [ "addr2line", "anyhow", @@ -13110,8 +13343,7 @@ dependencies = [ "cpp_demangle", "gimli", "log", - "object 0.27.1", - "region", + "object", "rustc-demangle", "rustix", "serde", @@ -13120,28 +13352,27 @@ dependencies = [ "wasmtime-environ", "wasmtime-jit-debug", "wasmtime-runtime", - "winapi", + "windows-sys 0.36.1", ] [[package]] name = "wasmtime-jit-debug" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5dc31f811760a6c76b2672c404866fd19b75e5fb3b0075a3e377a6846490654" +checksum = "25e82d4ef93296785de7efca92f7679dc67fe68a13b625a5ecc8d7503b377a37" dependencies = [ - "lazy_static", - "object 0.27.1", + "object", + "once_cell", "rustix", ] [[package]] name = "wasmtime-runtime" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f907beaff69d4d920fa4688411ee4cc75c0f01859e424677f9e426e2ef749864" +checksum = "9f0e9bea7d517d114fe66b930b2124ee086516ee93eeebfd97f75f366c5b0553" dependencies = [ "anyhow", - "backtrace", "cc", "cfg-if 1.0.0", "indexmap", @@ -13150,21 +13381,21 @@ dependencies = [ "mach", "memfd", "memoffset", - "more-asserts", + "paste", "rand 0.8.5", - "region", "rustix", "thiserror", + "wasmtime-asm-macros", "wasmtime-environ", "wasmtime-jit-debug", - "winapi", + "windows-sys 0.36.1", ] [[package]] name = "wasmtime-types" -version = "0.35.3" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514ef0e5fd197b9609dc9eb74beba0c84d5a12b2417cbae55534633329ba4852" +checksum = "69b83e93ed41b8fdc936244cfd5e455480cf1eca1fd60c78a0040038b4ce5075" dependencies = [ "cranelift-entity", "serde", @@ -13174,9 +13405,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.59" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed055ab27f941423197eb86b2035720b1a3ce40504df082cac2ecc6ed73335a1" +checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" dependencies = [ "js-sys", "wasm-bindgen", @@ -13194,9 +13425,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.4" +version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf" +checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" dependencies = [ "webpki", ] @@ -13212,8 +13443,8 @@ dependencies = [ [[package]] name = "westend-runtime" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "beefy-primitives", "bitvec 1.0.1", @@ -13237,15 +13468,16 @@ dependencies = [ "pallet-election-provider-multi-phase", "pallet-election-provider-support-benchmarking", "pallet-elections-phragmen", + "pallet-fast-unstake", "pallet-grandpa", "pallet-identity", "pallet-im-online", "pallet-indices", "pallet-membership", "pallet-multisig", - "pallet-nicks", "pallet-nomination-pools", "pallet-nomination-pools-benchmarking", + "pallet-nomination-pools-runtime-api", "pallet-offences", "pallet-offences-benchmarking", "pallet-preimage", @@ -13266,7 +13498,7 @@ dependencies = [ "pallet-vesting", "pallet-xcm", "pallet-xcm-benchmarks", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "polkadot-primitives", "polkadot-runtime-common", @@ -13301,8 +13533,8 @@ dependencies = [ [[package]] name = "westend-runtime-constants" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "polkadot-primitives", @@ -13313,13 +13545,13 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static", "libc", + "once_cell", ] [[package]] @@ -13385,6 +13617,27 @@ dependencies = [ "windows_x86_64_msvc 0.36.1", ] +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc 0.42.0", + "windows_i686_gnu 0.42.0", + "windows_i686_msvc 0.42.0", + "windows_x86_64_gnu 0.42.0", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc 0.42.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" + [[package]] name = "windows_aarch64_msvc" version = "0.34.0" @@ -13397,6 +13650,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" + [[package]] name = "windows_i686_gnu" version = "0.34.0" @@ -13409,6 +13668,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +[[package]] +name = "windows_i686_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" + [[package]] name = "windows_i686_msvc" version = "0.34.0" @@ -13421,6 +13686,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +[[package]] +name = "windows_i686_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" + [[package]] name = "windows_x86_64_gnu" version = "0.34.0" @@ -13433,6 +13704,18 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" + [[package]] name = "windows_x86_64_msvc" version = "0.34.0" @@ -13445,6 +13728,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" + [[package]] name = "winreg" version = "0.7.0" @@ -13482,27 +13771,28 @@ dependencies = [ [[package]] name = "xcm" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "derivative", "impl-trait-for-tuples", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "scale-info", + "sp-runtime", "xcm-procedural", ] [[package]] name = "xcm-builder" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-support", "frame-system", "log", "pallet-transaction-payment", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "polkadot-parachain", "scale-info", "sp-arithmetic", @@ -13515,14 +13805,14 @@ dependencies = [ [[package]] name = "xcm-executor" -version = "0.9.24" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "frame-benchmarking", "frame-support", "impl-trait-for-tuples", "log", - "parity-scale-codec 3.1.5", + "parity-scale-codec 3.2.1", "sp-arithmetic", "sp-core", "sp-io", @@ -13533,8 +13823,8 @@ dependencies = [ [[package]] name = "xcm-procedural" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.24#22836e55d41eef24ed5917fd654ee82a683a7cfe" +version = "0.9.30" +source = "git+https://github.com/paritytech/polkadot?branch=release-v0.9.30#064536093f5ff70d867f4bbce8d4c41a406d317a" dependencies = [ "Inflector", "proc-macro2", @@ -13544,11 +13834,11 @@ dependencies = [ [[package]] name = "yamux" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0608f53c1dc0bad505d03a34bbd49fbf2ad7b51eb036123e896365532745a1" +checksum = "e5d9ba232399af1783a58d8eb26f6b5006fbefe2dc9ef36bd283324792d03ea5" dependencies = [ - "futures 0.3.21", + "futures 0.3.25", "log", "nohash-hasher", "parking_lot 0.12.1", @@ -13556,12 +13846,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - [[package]] name = "zeroize" version = "1.5.7" @@ -13585,18 +13869,18 @@ dependencies = [ [[package]] name = "zstd" -version = "0.10.2+zstd.1.5.2" +version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4a6bd64f22b5e3e94b4e238669ff9f10815c27a5180108b849d24174a83847" +checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "4.1.6+zstd.1.5.2" +version = "5.0.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b61c51bb270702d6167b8ce67340d2754b088d0c091b06e593aa772c3ee9bb" +checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" dependencies = [ "libc", "zstd-sys", @@ -13604,9 +13888,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "1.6.3+zstd.1.5.2" +version = "2.0.1+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc49afa5c8d634e75761feda8c592051e7eeb4683ba827211eb0d731d3402ea8" +checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b" dependencies = [ "cc", "libc", diff --git a/Cargo.toml b/Cargo.toml index 6a520f4b83..72e5e76207 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,5 @@ +cargo-features = ["workspace-inheritance"] + [workspace] resolver = "2" members = [ @@ -6,191 +8,31 @@ members = [ 'client/*', 'primitives/*', 'crates/*', + 'runtime/opal', + 'runtime/quartz', + 'runtime/unique', 'runtime/tests', ] -exclude = [ - "runtime/unique", - "runtime/quartz" -] +default-members = ['node/*', 'runtime/opal'] [profile.release] panic = 'unwind' -[patch.crates-io] -jsonrpsee = {git = "https://github.com/uniquenetwork/jsonrpsee", branch = "unique-v0.13.1-fix-unknown-fields"} -jsonrpsee-types = {git = "https://github.com/uniquenetwork/jsonrpsee", branch = "unique-v0.13.1-fix-unknown-fields"} -jsonrpsee-core = {git = "https://github.com/uniquenetwork/jsonrpsee", branch = "unique-v0.13.1-fix-unknown-fields"} +[workspace.dependencies.orml-vesting] +git = "https://github.com/uniquenetwork/open-runtime-module-library" +branch = "polkadot-v0.9.30" +default-features = false + +[workspace.dependencies.orml-xtokens] +git = "https://github.com/uniquenetwork/open-runtime-module-library" +branch = "polkadot-v0.9.30" +default-features = false -[patch."https://github.com/paritytech/substrate"] -beefy-gadget = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -beefy-gadget-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -beefy-merkle-tree = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -beefy-primitives = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -fork-tree = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-benchmarking-cli = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-election-provider-solution-type = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-election-provider-support = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-executive = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-support = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-support-procedural = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-support-procedural-tools = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-support-procedural-tools-derive = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-system = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-system-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-system-rpc-runtime-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -frame-try-runtime = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-aura = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-authority-discovery = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-authorship = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-babe = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-bags-list = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-balances = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-beefy = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-beefy-mmr = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-bounties = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-child-bounties = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-collective = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-democracy = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-election-provider-multi-phase = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-election-provider-support-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-elections-phragmen = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-gilt = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-grandpa = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-identity = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-im-online = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-indices = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-membership = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-mmr = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-mmr-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-multisig = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-nicks = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-nomination-pools = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-nomination-pools-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-offences = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-offences-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-preimage = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-proxy = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-randomness-collective-flip = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-recovery = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-scheduler = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-session = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-session-benchmarking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-society = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-staking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-staking-reward-curve = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-staking-reward-fn = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-sudo = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-timestamp = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-tips = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-transaction-payment = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-transaction-payment-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-transaction-payment-rpc-runtime-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-treasury = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-utility = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -pallet-vesting = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -remote-externalities = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-allocator = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-authority-discovery = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-basic-authorship = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-block-builder = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-chain-spec = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-chain-spec-derive = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-cli = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-client-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-client-db = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-aura = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-babe = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-babe-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-epochs = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-manual-seal = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-slots = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-consensus-uncles = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-executor = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-executor-common = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-executor-wasmi = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-executor-wasmtime = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-finality-grandpa = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-finality-grandpa-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-informant = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-keystore = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-network = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-network-common = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-network-gossip = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-network-light = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-network-sync = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-offchain = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-peerset = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-proposer-metrics = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-rpc-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-rpc-server = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-service = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-state-db = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-sync-state-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-sysinfo = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-telemetry = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-tracing = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-tracing-proc-macro = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-transaction-pool = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-transaction-pool-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sc-utils = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-api = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-api-proc-macro = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-application-crypto = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-arithmetic = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-authority-discovery = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-authorship = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-block-builder = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-blockchain = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-consensus = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-consensus-aura = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-consensus-babe = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-consensus-slots = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-consensus-vrf = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-core = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-core-hashing = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-core-hashing-proc-macro = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-database = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-debug-derive = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-externalities = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-finality-grandpa = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-inherents = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-io = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-keyring = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-keystore = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-maybe-compressed-blob = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-mmr-primitives = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-npos-elections = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-offchain = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-panic-handler = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-runtime = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-runtime-interface = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-runtime-interface-proc-macro = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-sandbox = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-serializer = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-session = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-staking = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-state-machine = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-std = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-storage = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-tasks = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-timestamp = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-tracing = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-transaction-pool = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-transaction-storage-proof = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-trie = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-version = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-version-proc-macro = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -sp-wasm-interface = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-build-script-utils = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-frame-rpc-system = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-prometheus-endpoint = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-state-trie-migration-rpc = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-test-client = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-test-utils = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-test-utils-derive = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -substrate-wasm-builder = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} -try-runtime-cli = {git = "https://github.com/uniquenetwork/substrate", branch = "polkadot-v0.9.24-hack-substitute"} +[workspace.dependencies.orml-tokens] +git = "https://github.com/uniquenetwork/open-runtime-module-library" +branch = "polkadot-v0.9.30" +default-features = false +[workspace.dependencies.orml-traits] +git = "https://github.com/uniquenetwork/open-runtime-module-library" +branch = "polkadot-v0.9.30" +default-features = false diff --git a/Dockerfile-parachain b/Dockerfile-parachain deleted file mode 100644 index bc687b5f19..0000000000 --- a/Dockerfile-parachain +++ /dev/null @@ -1,107 +0,0 @@ -# ===== Rust builder ===== -FROM phusion/baseimage:focal-1.1.0 as rust-builder -LABEL maintainer="Unique.Network" - -ARG RUST_TOOLCHAIN=nightly-2022-05-11 -#ARG RUST_C=1.62.0-nightly -ARG POLKA_VERSION=release-v0.9.24 -ARG UNIQUE_BRANCH=develop - -#ARG USER=*** -#ARG PASS=*** - - -ENV RUST_TOOLCHAIN $RUST_TOOLCHAIN -#ENV RUST_C $RUST_C -ENV POLKA_VERSION $POLKA_VERSION -ENV UNIQUE_BRANCH $UNIQUE_BRANCH - - -#RUN echo $RUST_TOOLCHAIN -#RUN echo $RUST_C -#RUN echo $POLKA_VERSION -#RUN echo $UNIQUE_BRANCH - -ENV CARGO_HOME="/cargo-home" -ENV PATH="/cargo-home/bin:$PATH" - -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain none - -RUN apt-get update && \ - apt-get dist-upgrade -y -o Dpkg::Options::="--force-confold" && \ - apt-get install -y cmake pkg-config libssl-dev git clang - -RUN rustup toolchain uninstall $(rustup toolchain list) && \ - rustup toolchain install $RUST_TOOLCHAIN && \ - rustup default $RUST_TOOLCHAIN && \ - rustup target list --installed && \ - rustup show -RUN rustup target add wasm32-unknown-unknown --toolchain $RUST_TOOLCHAIN - -RUN cargo install cargo-chef - -RUN mkdir unique_parachain -WORKDIR /unique_parachain - -# ===== Chef ===== -FROM rust-builder as chef - -COPY . . -RUN cargo chef prepare --recipe-path recipe.json - -# ===== BUILD ====== -FROM rust-builder as builder - -RUN mkdir unique_parachain -WORKDIR /unique_parachain - -COPY --from=chef /unique_parachain/recipe.json recipe.json -ARG PROFILE=release -RUN cargo chef cook "--$PROFILE" --recipe-path recipe.json - -COPY . . -RUN cargo build "--$PROFILE" - # && \ - # cargo test - -# ===== BUILD POLKADOT ===== -FROM rust-builder as builder-polkadot - -RUN mkdir unique_parachain -WORKDIR /unique_parachain - -RUN git clone -b $POLKA_VERSION --depth 1 https://github.com/paritytech/polkadot.git && \ - cd polkadot && \ - git tag -n && \ - cargo build --release - -# ===== RUN ====== - -FROM phusion/baseimage:focal-1.1.0 - -ARG PROFILE=release - -RUN apt-get -y update && \ - apt-get -y upgrade && \ - apt-get -y install curl git && \ - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash && \ - export NVM_DIR="$HOME/.nvm" && \ - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ - nvm install v15.5.0 && \ - nvm use v15.5.0 - -RUN git clone https://github.com/paritytech/polkadot-launch - -RUN export NVM_DIR="$HOME/.nvm" && \ - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ - cd /polkadot-launch && \ - npm install --global yarn && \ - yarn - -COPY --from=builder /unique_parachain/target/$PROFILE/unique-collator /unique-chain/target/$PROFILE/ -COPY --from=builder-polkadot /unique_parachain/polkadot/target/$PROFILE/polkadot /polkadot/target/$PROFILE/ - -CMD export NVM_DIR="$HOME/.nvm" && \ - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" && \ - cd /polkadot-launch && \ - yarn start launch-config.json diff --git a/Makefile b/Makefile index 152977c73f..0647595d64 100644 --- a/Makefile +++ b/Makefile @@ -9,9 +9,16 @@ _help: FUNGIBLE_EVM_STUBS=./pallets/fungible/src/stubs FUNGIBLE_EVM_ABI=./tests/src/eth/fungibleAbi.json +REFUNGIBLE_EVM_STUBS=./pallets/refungible/src/stubs +REFUNGIBLE_EVM_ABI=./tests/src/eth/refungibleAbi.json + NONFUNGIBLE_EVM_STUBS=./pallets/nonfungible/src/stubs NONFUNGIBLE_EVM_ABI=./tests/src/eth/nonFungibleAbi.json +REFUNGIBLE_EVM_STUBS=./pallets/refungible/src/stubs +REFUNGIBLE_EVM_ABI=./tests/src/eth/reFungibleAbi.json +REFUNGIBLE_TOKEN_EVM_ABI=./tests/src/eth/reFungibleTokenAbi.json + CONTRACT_HELPERS_STUBS=./pallets/evm-contract-helpers/src/stubs/ CONTRACT_HELPERS_ABI=./tests/src/eth/util/contractHelpersAbi.json @@ -21,7 +28,7 @@ COLLECTION_HELPER_ABI=./tests/src/eth/collectionHelpersAbi.json TESTS_API=./tests/src/eth/api/ .PHONY: regenerate_solidity -regenerate_solidity: UniqueFungible.sol UniqueNFT.sol ContractHelpers.sol CollectionHelpers.sol +regenerate_solidity: UniqueFungible.sol UniqueNFT.sol UniqueRefungible.sol UniqueRefungibleToken.sol ContractHelpers.sol CollectionHelpers.sol UniqueFungible.sol: PACKAGE=pallet-fungible NAME=erc::gen_iface OUTPUT=$(TESTS_API)/$@ ./.maintain/scripts/generate_sol.sh @@ -31,6 +38,14 @@ UniqueNFT.sol: PACKAGE=pallet-nonfungible NAME=erc::gen_iface OUTPUT=$(TESTS_API)/$@ ./.maintain/scripts/generate_sol.sh PACKAGE=pallet-nonfungible NAME=erc::gen_impl OUTPUT=$(NONFUNGIBLE_EVM_STUBS)/$@ ./.maintain/scripts/generate_sol.sh +UniqueRefungible.sol: + PACKAGE=pallet-refungible NAME=erc::gen_iface OUTPUT=$(TESTS_API)/$@ ./.maintain/scripts/generate_sol.sh + PACKAGE=pallet-refungible NAME=erc::gen_impl OUTPUT=$(REFUNGIBLE_EVM_STUBS)/$@ ./.maintain/scripts/generate_sol.sh + +UniqueRefungibleToken.sol: + PACKAGE=pallet-refungible NAME=erc_token::gen_iface OUTPUT=$(TESTS_API)/$@ ./.maintain/scripts/generate_sol.sh + PACKAGE=pallet-refungible NAME=erc_token::gen_impl OUTPUT=$(REFUNGIBLE_EVM_STUBS)/$@ ./.maintain/scripts/generate_sol.sh + ContractHelpers.sol: PACKAGE=pallet-evm-contract-helpers NAME=eth::contract_helpers_iface OUTPUT=$(TESTS_API)/$@ ./.maintain/scripts/generate_sol.sh PACKAGE=pallet-evm-contract-helpers NAME=eth::contract_helpers_impl OUTPUT=$(CONTRACT_HELPERS_STUBS)/$@ ./.maintain/scripts/generate_sol.sh @@ -47,6 +62,14 @@ UniqueNFT: UniqueNFT.sol INPUT=$(NONFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(NONFUNGIBLE_EVM_STUBS)/UniqueNFT.raw ./.maintain/scripts/compile_stub.sh INPUT=$(NONFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(NONFUNGIBLE_EVM_ABI) ./.maintain/scripts/generate_abi.sh +UniqueRefungible: UniqueRefungible.sol + INPUT=$(REFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(REFUNGIBLE_EVM_STUBS)/UniqueRefungible.raw ./.maintain/scripts/compile_stub.sh + INPUT=$(REFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(REFUNGIBLE_EVM_ABI) ./.maintain/scripts/generate_abi.sh + +UniqueRefungibleToken: UniqueRefungibleToken.sol + INPUT=$(REFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(REFUNGIBLE_EVM_STUBS)/UniqueRefungibleToken.raw ./.maintain/scripts/compile_stub.sh + INPUT=$(REFUNGIBLE_EVM_STUBS)/$< OUTPUT=$(REFUNGIBLE_TOKEN_EVM_ABI) ./.maintain/scripts/generate_abi.sh + ContractHelpers: ContractHelpers.sol INPUT=$(CONTRACT_HELPERS_STUBS)/$< OUTPUT=$(CONTRACT_HELPERS_STUBS)/ContractHelpers.raw ./.maintain/scripts/compile_stub.sh INPUT=$(CONTRACT_HELPERS_STUBS)/$< OUTPUT=$(CONTRACT_HELPERS_ABI) ./.maintain/scripts/generate_abi.sh @@ -55,7 +78,7 @@ CollectionHelpers: CollectionHelpers.sol INPUT=$(COLLECTION_HELPER_STUBS)/$< OUTPUT=$(COLLECTION_HELPER_STUBS)/CollectionHelpers.raw ./.maintain/scripts/compile_stub.sh INPUT=$(COLLECTION_HELPER_STUBS)/$< OUTPUT=$(COLLECTION_HELPER_ABI) ./.maintain/scripts/generate_abi.sh -evm_stubs: UniqueFungible UniqueNFT ContractHelpers CollectionHelpers +evm_stubs: UniqueFungible UniqueNFT UniqueRefungible UniqueRefungibleToken ContractHelpers CollectionHelpers .PHONY: _bench _bench: @@ -105,9 +128,13 @@ bench-rmrk-core: bench-rmrk-equip: make _bench PALLET=proxy-rmrk-equip -.PHONY: bench-maintenance -bench-maintenance: - make _bench PALLET=maintenance +.PHONY: bench-foreign-assets +bench-foreign-assets: + make _bench PALLET=foreign-assets +.PHONY: bench-app-promotion +bench-app-promotion: + make _bench PALLET=app-promotion PALLET_DIR=app-promotion + .PHONY: bench -bench: bench-evm-migration bench-unique bench-structure bench-fungible bench-refungible bench-nonfungible bench-scheduler bench-rmrk-core bench-rmrk-equip bench-maintenance +bench: bench-evm-migration bench-unique bench-structure bench-fungible bench-refungible bench-nonfungible bench-scheduler bench-rmrk-core bench-rmrk-equip bench-foreign-assets diff --git a/README.md b/README.md index b29f00fea0..69c3ea1917 100644 --- a/README.md +++ b/README.md @@ -23,8 +23,8 @@ The Unique Chain also provides: Wider Unique Ecosystem (most of it was developed during Hackusama): -- [SubstraPunks Game hosted on IPFS](https://github.com/usetech-llc/substrapunks) -- [Unique Wallet and UI](https://uniqueapps.usetech.com/#/nft) +- [SubstraPunks Game hosted on IPFS](https://github.com/UniqueNetwork/substrapunks) +- [Unique Wallet and UI](https://wallet.unique.network) - [NFT Asset for Unity Framework](https://github.com/usetech-llc/nft_unity) Please see our [walk-through instructions](doc/hackusama_walk_through.md) to try everything out! @@ -42,23 +42,23 @@ so that we can keep the builds stable. 1. Install Rust: ```bash -sudo apt-get install git curl libssl-dev llvm pkg-config libclang-dev clang make cmake +sudo apt-get install git curl libssl-dev llvm pkg-config libclang-dev clang make cmake protobuf-compiler curl https://sh.rustup.rs -sSf | sh ``` 2. Remove all installed toolchains with `rustup toolchain list` and `rustup toolchain uninstall `. -3. Install toolchain nightly-2022-05-11 and make it default: +3. Install toolchain nightly-2022-07-24 and make it default: ```bash -rustup toolchain install nightly-2022-05-11 -rustup default nightly-2022-05-11 +rustup toolchain install nightly-2022-07-24 +rustup default nightly-2022-07-24 ``` 4. Add wasm target for nightly toolchain: ```bash -rustup target add wasm32-unknown-unknown --toolchain nightly-2022-05-11 +rustup target add wasm32-unknown-unknown --toolchain nightly-2022-07-24 ``` 5. Build: @@ -92,7 +92,7 @@ git checkout feature/runtime-upgrade-testing ``` git clone https://github.com/paritytech/polkadot.git cd polkadot -git checkout release-v0.9.24 +git checkout release-v0.9.30 cargo build --release ``` @@ -195,7 +195,7 @@ tokens -> accounts xtokens -> transfer currencyId: - ForeingAsset + ForeignAsset amount: diff --git a/client/rpc/CHANGELOG.md b/client/rpc/CHANGELOG.md new file mode 100644 index 0000000000..75b2f96aae --- /dev/null +++ b/client/rpc/CHANGELOG.md @@ -0,0 +1,33 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + + +## [v0.1.4] 2022-09-08 + +### Added +- Support RPC for `AppPromotion` pallet. + +## [v0.1.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.2] - 2022-08-12 + +### Fixed + +- Method signature `total_pieces`. Before that the number of pieces greater than 2^53 -1 caused an error when calling this method. + +## [0.1.1] - 2022-07-14 + +### Added + +- Implementation of RPC method `token_owners` returning 10 owners in no particular order. + This was an internal request to improve the web interface and support fractionalization event. diff --git a/client/rpc/Cargo.toml b/client/rpc/Cargo.toml index 4b61998be4..c33a55967c 100644 --- a/client/rpc/Cargo.toml +++ b/client/rpc/Cargo.toml @@ -1,22 +1,22 @@ [package] name = "uc-rpc" -version = "0.1.0" +version = "0.1.4" license = "GPLv3" edition = "2021" [dependencies] -unique-runtime-common = { default-features = false, path = "../../runtime/common" } pallet-common = { default-features = false, path = '../../pallets/common' } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } up-rpc = { path = "../../primitives/rpc" } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc"} rmrk-rpc = { path = "../../primitives/rmrk-rpc" } codec = { package = "parity-scale-codec", version = "3.1.2" } -jsonrpsee = { version = "0.13.0", features = ["server", "macros"] } +jsonrpsee = { version = "0.15.1", features = ["server", "macros"] } anyhow = "1.0.57" -sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-blockchain = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-rpc = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-blockchain = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-rpc = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } diff --git a/client/rpc/src/lib.rs b/client/rpc/src/lib.rs index ca56fb5975..21cfe32a87 100644 --- a/client/rpc/src/lib.rs +++ b/client/rpc/src/lib.rs @@ -23,6 +23,7 @@ use jsonrpsee::{ proc_macros::rpc, }; use anyhow::anyhow; +use sp_runtime::traits::{AtLeast32BitUnsigned, Member}; use up_data_structs::{ RpcCollection, CollectionId, CollectionStats, CollectionLimits, TokenId, Property, PropertyKeyPermission, TokenData, TokenChild, @@ -30,6 +31,7 @@ use up_data_structs::{ use sp_api::{BlockId, BlockT, ProvideRuntimeApi, ApiExt}; use sp_blockchain::HeaderBackend; use up_rpc::UniqueApi as UniqueRuntimeApi; +use app_promotion_rpc::AppPromotionApi as AppPromotionRuntimeApi; // RMRK use rmrk_rpc::RmrkApi as RmrkRuntimeApi; @@ -37,11 +39,13 @@ use up_data_structs::{ RmrkCollectionId, RmrkNftId, RmrkBaseId, RmrkNftChild, RmrkThemeName, RmrkResourceId, }; +pub use app_promotion_unique_rpc::AppPromotionApiServer; pub use rmrk_unique_rpc::RmrkApiServer; #[rpc(server)] #[async_trait] pub trait UniqueApi { + /// Get tokens owned by account. #[method(name = "unique_accountTokens")] fn account_tokens( &self, @@ -49,12 +53,16 @@ pub trait UniqueApi { account: CrossAccountId, at: Option, ) -> Result>; + + /// Get tokens contained within a collection. #[method(name = "unique_collectionTokens")] fn collection_tokens( &self, collection: CollectionId, at: Option, ) -> Result>; + + /// Check if the token exists. #[method(name = "unique_tokenExists")] fn token_exists( &self, @@ -63,6 +71,7 @@ pub trait UniqueApi { at: Option, ) -> Result; + /// Get the token owner. #[method(name = "unique_tokenOwner")] fn token_owner( &self, @@ -70,6 +79,17 @@ pub trait UniqueApi { token: TokenId, at: Option, ) -> Result>; + + /// Returns 10 tokens owners in no particular order. + #[method(name = "unique_tokenOwners")] + fn token_owners( + &self, + collection: CollectionId, + token: TokenId, + at: Option, + ) -> Result>; + + /// Get the topmost token owner in the hierarchy of a possibly nested token. #[method(name = "unique_topmostTokenOwner")] fn topmost_token_owner( &self, @@ -77,6 +97,8 @@ pub trait UniqueApi { token: TokenId, at: Option, ) -> Result>; + + /// Get tokens nested directly into the token. #[method(name = "unique_tokenChildren")] fn token_children( &self, @@ -85,6 +107,7 @@ pub trait UniqueApi { at: Option, ) -> Result>; + /// Get collection properties, optionally limited to the provided keys. #[method(name = "unique_collectionProperties")] fn collection_properties( &self, @@ -93,6 +116,7 @@ pub trait UniqueApi { at: Option, ) -> Result>; + /// Get token properties, optionally limited to the provided keys. #[method(name = "unique_tokenProperties")] fn token_properties( &self, @@ -102,6 +126,7 @@ pub trait UniqueApi { at: Option, ) -> Result>; + /// Get property permissions, optionally limited to the provided keys. #[method(name = "unique_propertyPermissions")] fn property_permissions( &self, @@ -110,6 +135,7 @@ pub trait UniqueApi { at: Option, ) -> Result>; + /// Get token data, including properties, optionally limited to the provided keys, and total pieces for an RFT. #[method(name = "unique_tokenData")] fn token_data( &self, @@ -119,8 +145,11 @@ pub trait UniqueApi { at: Option, ) -> Result>; + /// Get the amount of distinctive tokens present in a collection. #[method(name = "unique_totalSupply")] fn total_supply(&self, collection: CollectionId, at: Option) -> Result; + + /// Get the amount of any user tokens owned by an account. #[method(name = "unique_accountBalance")] fn account_balance( &self, @@ -128,6 +157,8 @@ pub trait UniqueApi { account: CrossAccountId, at: Option, ) -> Result; + + /// Get the amount of a specific token owned by an account. #[method(name = "unique_balance")] fn balance( &self, @@ -136,6 +167,8 @@ pub trait UniqueApi { token: TokenId, at: Option, ) -> Result; + + /// Get the amount of currently possible sponsored transactions on a token for the fee to be taken off a sponsor. #[method(name = "unique_allowance")] fn allowance( &self, @@ -146,18 +179,23 @@ pub trait UniqueApi { at: Option, ) -> Result; + /// Get the list of admin accounts of a collection. #[method(name = "unique_adminlist")] fn adminlist( &self, collection: CollectionId, at: Option, ) -> Result>; + + /// Get the list of accounts allowed to operate within a collection. #[method(name = "unique_allowlist")] fn allowlist( &self, collection: CollectionId, at: Option, ) -> Result>; + + /// Check if a user is allowed to operate within a collection. #[method(name = "unique_allowed")] fn allowed( &self, @@ -165,17 +203,24 @@ pub trait UniqueApi { user: CrossAccountId, at: Option, ) -> Result; + + /// Get the last token ID created in a collection. #[method(name = "unique_lastTokenId")] fn last_token_id(&self, collection: CollectionId, at: Option) -> Result; + + /// Get collection info by the specified ID. #[method(name = "unique_collectionById")] fn collection_by_id( &self, collection: CollectionId, at: Option, ) -> Result>>; + + /// Get chain stats about collections. #[method(name = "unique_collectionStats")] fn collection_stats(&self, at: Option) -> Result; + /// Get the number of blocks until sponsoring a transaction is available. #[method(name = "unique_nextSponsored")] fn next_sponsored( &self, @@ -184,12 +229,63 @@ pub trait UniqueApi { token: TokenId, at: Option, ) -> Result>; + + /// Get effective collection limits. If not explicitly set, get the chain defaults. #[method(name = "unique_effectiveCollectionLimits")] fn effective_collection_limits( &self, collection_id: CollectionId, at: Option, ) -> Result>; + + /// Get the total amount of pieces of an RFT. + #[method(name = "unique_totalPieces")] + fn total_pieces( + &self, + collection_id: CollectionId, + token_id: TokenId, + at: Option, + ) -> Result>; +} + +mod app_promotion_unique_rpc { + use super::*; + + #[rpc(server)] + #[async_trait] + pub trait AppPromotionApi { + /// Returns the total amount of staked tokens. + #[method(name = "appPromotion_totalStaked")] + fn total_staked( + &self, + staker: Option, + at: Option, + ) -> Result; + + ///Returns the total amount of staked tokens per block when staked. + #[method(name = "appPromotion_totalStakedPerBlock")] + fn total_staked_per_block( + &self, + staker: CrossAccountId, + at: Option, + ) -> Result>; + + /// Returns the total amount of tokens pending withdrawal from staking. + #[method(name = "appPromotion_pendingUnstake")] + fn pending_unstake( + &self, + staker: Option, + at: Option, + ) -> Result; + + /// Returns the total amount of tokens pending withdrawal from staking per block. + #[method(name = "appPromotion_pendingUnstakePerBlock")] + fn pending_unstake_per_block( + &self, + staker: CrossAccountId, + at: Option, + ) -> Result>; + } } mod rmrk_unique_rpc { @@ -209,20 +305,20 @@ mod rmrk_unique_rpc { Theme, > { + /// Get the latest created collection ID. #[method(name = "rmrk_lastCollectionIdx")] - /// Get the latest created collection id fn last_collection_idx(&self, at: Option) -> Result; + /// Get collection info by ID. #[method(name = "rmrk_collectionById")] - /// Get collection by id fn collection_by_id( &self, id: RmrkCollectionId, at: Option, ) -> Result>; + /// Get NFT info by collection and NFT IDs. #[method(name = "rmrk_nftById")] - /// Get NFT by collection id and NFT id fn nft_by_id( &self, collection_id: RmrkCollectionId, @@ -230,8 +326,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get tokens owned by an account in a collection. #[method(name = "rmrk_accountTokens")] - /// Get tokens owned by an account in a collection fn account_tokens( &self, account_id: AccountId, @@ -239,8 +335,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get tokens nested in an NFT - its direct children (not the children's children). #[method(name = "rmrk_nftChildren")] - /// Get NFT children fn nft_children( &self, collection_id: RmrkCollectionId, @@ -248,8 +344,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get collection properties, created by the user - not the proxy-specific properties. #[method(name = "rmrk_collectionProperties")] - /// Get collection properties fn collection_properties( &self, collection_id: RmrkCollectionId, @@ -257,8 +353,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get NFT properties, created by the user - not the proxy-specific properties. #[method(name = "rmrk_nftProperties")] - /// Get NFT properties fn nft_properties( &self, collection_id: RmrkCollectionId, @@ -267,8 +363,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get data of resources of an NFT. #[method(name = "rmrk_nftResources")] - /// Get NFT resources fn nft_resources( &self, collection_id: RmrkCollectionId, @@ -276,8 +372,8 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get the priority of a resource in an NFT. #[method(name = "rmrk_nftResourcePriority")] - /// Get NFT resource priority fn nft_resource_priority( &self, collection_id: RmrkCollectionId, @@ -286,14 +382,15 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get base info by its ID. #[method(name = "rmrk_base")] - /// Get base info fn base(&self, base_id: RmrkBaseId, at: Option) -> Result>; + /// Get all parts of a base. #[method(name = "rmrk_baseParts")] - /// Get all Base's parts fn base_parts(&self, base_id: RmrkBaseId, at: Option) -> Result>; + /// Get the theme names belonging to a base. #[method(name = "rmrk_themeNames")] fn theme_names( &self, @@ -301,6 +398,7 @@ mod rmrk_unique_rpc { at: Option, ) -> Result>; + /// Get theme info, including properties, optionally limited to the provided keys. #[method(name = "rmrk_themes")] fn theme( &self, @@ -312,34 +410,28 @@ mod rmrk_unique_rpc { } } -pub struct Unique { - client: Arc, - _marker: std::marker::PhantomData

, -} - -impl Unique { - pub fn new(client: Arc) -> Self { - Self { - client, - _marker: Default::default(), +macro_rules! define_struct_for_server_api { + ($name:ident) => { + pub struct $name { + client: Arc, + _marker: std::marker::PhantomData

, } - } -} -pub struct Rmrk { - client: Arc, - _marker: std::marker::PhantomData

, -} - -impl Rmrk { - pub fn new(client: Arc) -> Self { - Self { - client, - _marker: Default::default(), + impl $name { + pub fn new(client: Arc) -> Self { + Self { + client, + _marker: Default::default(), + } + } } - } + }; } +define_struct_for_server_api!(Unique); +define_struct_for_server_api!(AppPromotion); +define_struct_for_server_api!(Rmrk); + macro_rules! pass_method { ( $method_name:ident( @@ -385,6 +477,12 @@ macro_rules! unique_api { }; } +macro_rules! app_promotion_api { + () => { + dyn AppPromotionRuntimeApi + }; +} + macro_rules! rmrk_api { () => { dyn RmrkRuntimeApi @@ -447,22 +545,59 @@ where keys: Option> ) -> Vec, unique_api); - pass_method!(token_data( - collection: CollectionId, - token_id: TokenId, + pass_method!( + token_data( + collection: CollectionId, + token_id: TokenId, - #[map(|keys| string_keys_to_bytes_keys(keys))] - keys: Option>, - ) -> TokenData, unique_api); + #[map(|keys| string_keys_to_bytes_keys(keys))] + keys: Option>, + ) -> TokenData, unique_api; + changed_in 3, token_data_before_version_3(collection, token_id, string_keys_to_bytes_keys(keys)) => |value| value.into() + ); pass_method!(adminlist(collection: CollectionId) -> Vec, unique_api); pass_method!(allowlist(collection: CollectionId) -> Vec, unique_api); pass_method!(allowed(collection: CollectionId, user: CrossAccountId) -> bool, unique_api); pass_method!(last_token_id(collection: CollectionId) -> TokenId, unique_api); - pass_method!(collection_by_id(collection: CollectionId) -> Option>, unique_api); + pass_method!( + collection_by_id(collection: CollectionId) -> Option>, unique_api; + changed_in 3, collection_by_id_before_version_3(collection) => |value| value.map(|coll| coll.into()) + ); pass_method!(collection_stats() -> CollectionStats, unique_api); pass_method!(next_sponsored(collection: CollectionId, account: CrossAccountId, token: TokenId) -> Option, unique_api); pass_method!(effective_collection_limits(collection_id: CollectionId) -> Option, unique_api); + pass_method!(total_pieces(collection_id: CollectionId, token_id: TokenId) -> Option => |o| o.map(|number| number.to_string()) , unique_api); + pass_method!(token_owners(collection: CollectionId, token: TokenId) -> Vec, unique_api); +} + +impl + app_promotion_unique_rpc::AppPromotionApiServer< + ::Hash, + BlockNumber, + CrossAccountId, + AccountId, + > for AppPromotion +where + Block: BlockT, + BlockNumber: Decode + Member + AtLeast32BitUnsigned, + AccountId: Decode, + C: 'static + ProvideRuntimeApi + HeaderBackend, + CrossAccountId: pallet_evm::account::CrossAccountId, + C::Api: AppPromotionRuntimeApi, +{ + pass_method!(total_staked(staker: Option) -> String => |v| v.to_string(), app_promotion_api); + pass_method!(total_staked_per_block(staker: CrossAccountId) -> Vec<(BlockNumber, String)> => + |v| v + .into_iter() + .map(|(b, a)| (b, a.to_string())) + .collect::>(), app_promotion_api); + pass_method!(pending_unstake(staker: Option) -> String => |v| v.to_string(), app_promotion_api); + pass_method!(pending_unstake_per_block(staker: CrossAccountId) -> Vec<(BlockNumber, String)> => + |v| v + .into_iter() + .map(|(b, a)| (b, a.to_string())) + .collect::>(), app_promotion_api); } #[allow(deprecated)] diff --git a/crates/evm-coder/CHANGELOG.md b/crates/evm-coder/CHANGELOG.md new file mode 100644 index 0000000000..711280f911 --- /dev/null +++ b/crates/evm-coder/CHANGELOG.md @@ -0,0 +1,30 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## [0.1.3] - 2022-08-29 + +### Fixed + + - Parsing simple values. + + +## [v0.1.2] 2022-08-19 + +### Added + + - Implementation `AbiWrite` for tuples. + + ### Fixes + + - Tuple generation for solidity. + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/crates/evm-coder/Cargo.toml b/crates/evm-coder/Cargo.toml index aee1c37e18..5dcdc03330 100644 --- a/crates/evm-coder/Cargo.toml +++ b/crates/evm-coder/Cargo.toml @@ -1,20 +1,28 @@ [package] name = "evm-coder" -version = "0.1.0" +version = "0.1.3" license = "GPLv3" edition = "2021" [dependencies] -evm-coder-macros = { path = "../evm-coder-macros" } +# evm-coder reexports those proc-macro +evm-coder-procedural = { path = "./procedural" } +# Evm uses primitive-types for H160, H256 and others primitive-types = { version = "0.11.1", default-features = false } -hex-literal = "0.3.3" +# Evm doesn't have reexports for log and others ethereum = { version = "0.12.0", default-features = false } -evm-core = { default-features = false , git = "https://github.com/uniquenetwork/evm", branch = "unique-polkadot-v0.9.24" } -impl-trait-for-tuples = "0.2.1" +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +# Error types for execution +evm-core = { default-features = false , git = "https://github.com/uniquenetwork/evm", branch = "unique-polkadot-v0.9.30" } +# We have tuple-heavy code in solidity.rs +impl-trait-for-tuples = "0.2.2" [dev-dependencies] +# We want to assert some large binary blobs equality in tests hex = "0.4.3" +hex-literal = "0.3.4" [features] default = ["std"] -std = ["ethereum/std", "primitive-types/std", "evm-core/std"] +std = ["ethereum/std", "primitive-types/std", "evm-core/std", "frame-support/std"] diff --git a/crates/evm-coder/README.md b/crates/evm-coder/README.md new file mode 100644 index 0000000000..f9f30c7f9e --- /dev/null +++ b/crates/evm-coder/README.md @@ -0,0 +1,15 @@ +# evm-coder + +Library for seamless call translation between Rust and Solidity code + +By encoding solidity definitions in Rust, this library also provides generation of +solidity interfaces for ethereum developers + +## Overview + +Most of this library functionality shouldn't be used directly, but via macros + +- [`solidity_interface`] +- [`ToLog`] + + \ No newline at end of file diff --git a/crates/evm-coder-macros/Cargo.toml b/crates/evm-coder/procedural/Cargo.toml similarity index 55% rename from crates/evm-coder-macros/Cargo.toml rename to crates/evm-coder/procedural/Cargo.toml index f8f7c85d6d..0b9956cd67 100644 --- a/crates/evm-coder-macros/Cargo.toml +++ b/crates/evm-coder/procedural/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "evm-coder-macros" -version = "0.1.0" +name = "evm-coder-procedural" +version = "0.2.0" license = "GPLv3" edition = "2021" @@ -8,10 +8,12 @@ edition = "2021" proc-macro = true [dependencies] -sha3 = "0.9.1" +# Ethereum uses keccak (=sha3) for selectors +sha3 = "0.10.1" +# Value formatting +hex = "0.4.3" +Inflector = "0.11.4" +# General proc-macro utilities quote = "1.0" proc-macro2 = "1.0" syn = { version = "1.0", features = ["full"] } -hex = "0.4.3" -Inflector = "0.11.4" -darling = "0.13.0" diff --git a/crates/evm-coder-macros/src/lib.rs b/crates/evm-coder/procedural/src/lib.rs similarity index 70% rename from crates/evm-coder-macros/src/lib.rs rename to crates/evm-coder/procedural/src/lib.rs index a241a91059..63c916ca1c 100644 --- a/crates/evm-coder-macros/src/lib.rs +++ b/crates/evm-coder/procedural/src/lib.rs @@ -16,14 +16,13 @@ #![allow(dead_code)] -use darling::FromMeta; use inflector::cases; use proc_macro::TokenStream; use quote::quote; use sha3::{Digest, Keccak256}; use syn::{ - AttributeArgs, DeriveInput, GenericArgument, Ident, ItemImpl, Pat, Path, PathArguments, - PathSegment, Type, parse_macro_input, spanned::Spanned, + DeriveInput, GenericArgument, Ident, ItemImpl, Pat, Path, PathArguments, PathSegment, Type, + parse_macro_input, spanned::Spanned, }; mod solidity_interface; @@ -200,62 +199,10 @@ fn pascal_ident_to_snake_call(ident: &Ident) -> Ident { Ident::new(&name, ident.span()) } -/// Derives call enum implementing [`evm_coder::Callable`], [`evm_coder::Weighted`] -/// and [`evm_coder::Call`] from impl block -/// -/// ## Macro syntax -/// -/// `#[solidity_interface(name, is, inline_is, events)]` -/// - *name*: used in generated code, and for Call enum name -/// - *is*: used to provide call inheritance, not found methods will be delegated to all contracts -/// specified in is/inline_is -/// - *inline_is*: same as is, but selectors for passed contracts will be used by derived ERC165 -/// implementation -/// -/// `#[weight(value)]` -/// Can be added to every method of impl block, used for deriving [`evm_coder::Weighted`], which -/// is used by substrate bridge -/// - *value*: expression, which evaluates to weight required to call this method. -/// This expression can use call arguments to calculate non-constant execution time. -/// This expression should evaluate faster than actual execution does, and may provide worser case -/// than one is called -/// -/// `#[solidity_interface(rename_selector)]` -/// - *rename_selector*: by default, selector name will be generated by transforming method name -/// from snake_case to camelCase. Use this option, if other naming convention is required. -/// I.e: method `token_uri` will be automatically renamed to `tokenUri` in selector, but name -/// required by ERC721 standard is `tokenURI`, thus we need to specify `rename_selector = "tokenURI"` -/// explicitly -/// -/// Also, any contract method may have doc comments, which will be automatically added to generated -/// solidity interface definitions -/// -/// ## Example -/// -/// ```ignore -/// struct SuperContract; -/// struct InlineContract; -/// struct Contract; -/// -/// #[derive(ToLog)] -/// enum ContractEvents { -/// Event(#[indexed] uint32), -/// } -/// -/// #[solidity_interface(name = "MyContract", is(SuperContract), inline_is(InlineContract))] -/// impl Contract { -/// /// Multiply two numbers -/// #[weight(200 + a + b)] -/// #[solidity_interface(rename_selector = "mul")] -/// fn mul(&mut self, a: uint32, b: uint32) -> Result { -/// Ok(a.checked_mul(b).ok_or("overflow")?) -/// } -/// } -/// ``` +/// See documentation for this proc-macro reexported in `evm-coder` crate #[proc_macro_attribute] pub fn solidity_interface(args: TokenStream, stream: TokenStream) -> TokenStream { - let args = parse_macro_input!(args as AttributeArgs); - let args = solidity_interface::InterfaceInfo::from_list(&args).unwrap(); + let args = parse_macro_input!(args as solidity_interface::InterfaceInfo); let input: ItemImpl = match syn::parse(stream) { Ok(t) => t, @@ -284,10 +231,7 @@ pub fn weight(_args: TokenStream, stream: TokenStream) -> TokenStream { stream } -/// ## Syntax -/// -/// `#[indexed]` -/// Marks this field as indexed, so it will appear in [`ethereum::Log`] topics instead of data +/// See documentation for this proc-macro reexported in `evm-coder` crate #[proc_macro_derive(ToLog, attributes(indexed))] pub fn to_log(value: TokenStream) -> TokenStream { let input = parse_macro_input!(value as DeriveInput); diff --git a/crates/evm-coder-macros/src/solidity_interface.rs b/crates/evm-coder/procedural/src/solidity_interface.rs similarity index 70% rename from crates/evm-coder-macros/src/solidity_interface.rs rename to crates/evm-coder/procedural/src/solidity_interface.rs index cd9e1abdcd..3bac1ae410 100644 --- a/crates/evm-coder-macros/src/solidity_interface.rs +++ b/crates/evm-coder/procedural/src/solidity_interface.rs @@ -16,14 +16,19 @@ #![allow(dead_code)] -use quote::quote; -use darling::{FromMeta, ToTokens}; +// NOTE: In order to understand this Rust macro better, first read this chapter +// about Procedural Macros in Rust book: +// https://doc.rust-lang.org/reference/procedural-macros.html + +use quote::{quote, ToTokens}; use inflector::cases; use std::fmt::Write; use syn::{ Expr, FnArg, GenericArgument, Generics, Ident, ImplItem, ImplItemMethod, ItemImpl, Lit, Meta, - MetaNameValue, NestedMeta, PatType, Path, PathArguments, ReturnType, Type, spanned::Spanned, - parse_str, + MetaNameValue, PatType, PathArguments, ReturnType, Type, + spanned::Spanned, + parse::{Parse, ParseStream}, + parenthesized, Token, LitInt, LitStr, }; use crate::{ @@ -37,21 +42,9 @@ struct Is { pascal_call_name: Ident, snake_call_name: Ident, via: Option<(Type, Ident)>, + condition: Option, } impl Is { - fn new_via(path: &Path, via: Option<(Type, Ident)>) -> syn::Result { - let name = parse_ident_from_path(path, false)?.clone(); - Ok(Self { - pascal_call_name: pascal_ident_to_call(&name), - snake_call_name: pascal_ident_to_snake_call(&name), - name, - via, - }) - } - fn new(path: &Path) -> syn::Result { - Self::new_via(path, None) - } - fn expand_call_def(&self, gen_ref: &proc_macro2::TokenStream) -> proc_macro2::TokenStream { let name = &self.name; let pascal_call_name = &self.pascal_call_name; @@ -72,8 +65,13 @@ impl Is { generics: &proc_macro2::TokenStream, ) -> proc_macro2::TokenStream { let pascal_call_name = &self.pascal_call_name; + let condition = self.condition.as_ref().map(|condition| { + quote! { + (#condition) && + } + }); quote! { - <#pascal_call_name #generics>::supports_interface(interface_id) + #condition <#pascal_call_name #generics>::supports_interface(this, interface_id) } } @@ -101,8 +99,13 @@ impl Is { .as_ref() .map(|(_, i)| quote! {.#i()}) .unwrap_or_default(); + let condition = self.condition.as_ref().map(|condition| { + quote! { + if ({let this = &self; (#condition)}) + } + }); quote! { - #call_name::#name(call) => return <#via_typ as ::evm_coder::Callable<#pascal_call_name #generics>>::call(self #via_map, Msg { + #call_name::#name(call) #condition => return <#via_typ as ::evm_coder::Callable<#pascal_call_name #generics>>::call(self #via_map, Msg { call, caller: c.caller, value: c.value, @@ -137,72 +140,192 @@ impl Is { #[derive(Default)] struct IsList(Vec); -impl FromMeta for IsList { - fn from_list(items: &[NestedMeta]) -> darling::Result { - let mut out = Vec::new(); - for item in items { - match item { - NestedMeta::Meta(Meta::Path(path)) => out.push(Is::new(path)?), - // TODO: replace meta parsing with manual - NestedMeta::Meta(Meta::List(list)) - if list.path.is_ident("via") && list.nested.len() == 3 => - { - let mut data = list.nested.iter(); - let typ = match data.next().expect("len == 3") { - NestedMeta::Lit(Lit::Str(s)) => { - let v = s.value(); - let typ: Type = parse_str(&v)?; - typ +impl Parse for IsList { + fn parse(input: ParseStream) -> syn::Result { + let mut out = vec![]; + loop { + if input.is_empty() { + break; + } + let name = input.parse::()?; + let lookahead = input.lookahead1(); + + let mut condition: Option = None; + let mut via: Option<(Type, Ident)> = None; + + if lookahead.peek(syn::token::Paren) { + let contents; + parenthesized!(contents in input); + let input = contents; + + while !input.is_empty() { + let lookahead = input.lookahead1(); + if lookahead.peek(Token![if]) { + input.parse::()?; + let contents; + parenthesized!(contents in input); + let contents = contents.parse::()?; + + if condition.replace(contents).is_some() { + return Err(syn::Error::new(input.span(), "condition is already set")); } - _ => { - return Err(syn::Error::new( - item.span(), - "via typ should be type in string", - ) - .into()) + } else if lookahead.peek(kw::via) { + input.parse::()?; + let contents; + parenthesized!(contents in input); + + let method = contents.parse::()?; + contents.parse::()?; + let ty = contents.parse::()?; + + if via.replace((ty, method)).is_some() { + return Err(syn::Error::new(input.span(), "via is already set")); } - }; - let via = match data.next().expect("len == 3") { - NestedMeta::Meta(Meta::Path(path)) => path - .get_ident() - .ok_or_else(|| syn::Error::new(item.span(), "via should be ident"))?, - _ => return Err(syn::Error::new(item.span(), "via should be ident").into()), - }; - let path = match data.next().expect("len == 3") { - NestedMeta::Meta(Meta::Path(path)) => path, - _ => return Err(syn::Error::new(item.span(), "path should be path").into()), - }; + } else { + return Err(lookahead.error()); + } - out.push(Is::new_via(path, Some((typ, via.clone())))?) - } - _ => { - return Err(syn::Error::new( - item.span(), - "expected either Name or via(\"Type\", getter, Name)", - ) - .into()) + if input.peek(Token![,]) { + input.parse::()?; + } else if !input.is_empty() { + return Err(syn::Error::new(input.span(), "expected end")); + } } + } else if lookahead.peek(Token![,]) || input.is_empty() { + // Pass + } else { + return Err(lookahead.error()); + }; + out.push(Is { + pascal_call_name: pascal_ident_to_call(&name), + snake_call_name: pascal_ident_to_snake_call(&name), + name, + via, + condition, + }); + if input.peek(Token![,]) { + input.parse::()?; + continue; + } else { + break; } } Ok(Self(out)) } } -#[derive(FromMeta)] pub struct InterfaceInfo { name: Ident, - #[darling(default)] is: IsList, - #[darling(default)] inline_is: IsList, - #[darling(default)] events: IsList, + expect_selector: Option, +} +impl Parse for InterfaceInfo { + fn parse(input: ParseStream) -> syn::Result { + let mut name = None; + let mut is = None; + let mut inline_is = None; + let mut events = None; + let mut expect_selector = None; + // TODO: create proc-macro to optimize proc-macro boilerplate? :D + loop { + let lookahead = input.lookahead1(); + if lookahead.peek(kw::name) { + let k = input.parse::()?; + input.parse::()?; + if name.replace(input.parse::()?).is_some() { + return Err(syn::Error::new(k.span(), "name is already set")); + } + } else if lookahead.peek(kw::is) { + let k = input.parse::()?; + let contents; + parenthesized!(contents in input); + if is.replace(contents.parse::()?).is_some() { + return Err(syn::Error::new(k.span(), "is is already set")); + } + } else if lookahead.peek(kw::inline_is) { + let k = input.parse::()?; + let contents; + parenthesized!(contents in input); + if inline_is.replace(contents.parse::()?).is_some() { + return Err(syn::Error::new(k.span(), "inline_is is already set")); + } + } else if lookahead.peek(kw::events) { + let k = input.parse::()?; + let contents; + parenthesized!(contents in input); + if events.replace(contents.parse::()?).is_some() { + return Err(syn::Error::new(k.span(), "events is already set")); + } + } else if lookahead.peek(kw::expect_selector) { + let k = input.parse::()?; + input.parse::()?; + let value = input.parse::()?; + if expect_selector + .replace(value.base10_parse::()?) + .is_some() + { + return Err(syn::Error::new(k.span(), "expect_selector is already set")); + } + } else if input.is_empty() { + break; + } else { + return Err(lookahead.error()); + } + if input.peek(Token![,]) { + input.parse::()?; + } else { + break; + } + } + Ok(Self { + name: name.ok_or_else(|| syn::Error::new(input.span(), "missing name"))?, + is: is.unwrap_or_default(), + inline_is: inline_is.unwrap_or_default(), + events: events.unwrap_or_default(), + expect_selector, + }) + } } -#[derive(FromMeta)] struct MethodInfo { - #[darling(default)] rename_selector: Option, + hide: bool, +} +impl Parse for MethodInfo { + fn parse(input: ParseStream) -> syn::Result { + let mut rename_selector = None; + let mut hide = false; + while !input.is_empty() { + let lookahead = input.lookahead1(); + if lookahead.peek(kw::rename_selector) { + let k = input.parse::()?; + input.parse::()?; + if rename_selector + .replace(input.parse::()?.value()) + .is_some() + { + return Err(syn::Error::new(k.span(), "rename_selector is already set")); + } + } else if lookahead.peek(kw::hide) { + input.parse::()?; + hide = true; + } else { + return Err(lookahead.error()); + } + + if input.peek(Token![,]) { + input.parse::()?; + } else if !input.is_empty() { + return Err(syn::Error::new(input.span(), "expected end")); + } + } + Ok(Self { + rename_selector, + hide, + }) + } } enum AbiType { @@ -258,7 +381,7 @@ impl AbiType { "expected only one generic for vec", )); } - let arg = args.first().unwrap(); + let arg = args.first().expect("first arg"); let ty = match arg { GenericArgument::Type(ty) => ty, @@ -429,25 +552,24 @@ enum Mutability { Pure, } -pub struct WeightAttr(syn::Expr); - -mod keyword { +/// Group all keywords for this macro. Usage example: +/// #[solidity_interface(name = "B", inline_is(A))] +mod kw { syn::custom_keyword!(weight); -} -impl syn::parse::Parse for WeightAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; + syn::custom_keyword!(via); + syn::custom_keyword!(returns); + syn::custom_keyword!(name); + syn::custom_keyword!(is); + syn::custom_keyword!(inline_is); + syn::custom_keyword!(events); + syn::custom_keyword!(expect_selector); - let weight_content; - syn::parenthesized!(weight_content in content); - Ok(WeightAttr(weight_content.parse::()?)) - } + syn::custom_keyword!(rename_selector); + syn::custom_keyword!(hide); } +/// Rust methods are parsed into this structure when Solidity code is generated struct Method { name: Ident, camel_name: String, @@ -455,8 +577,10 @@ struct Method { screaming_name: Ident, selector_str: String, selector: u32, + hide: bool, args: Vec, has_normal_args: bool, + has_value_args: bool, mutability: Mutability, result: Type, weight: Option, @@ -466,14 +590,14 @@ impl Method { fn try_from(value: &ImplItemMethod) -> syn::Result { let mut info = MethodInfo { rename_selector: None, + hide: false, }; let mut docs = Vec::new(); let mut weight = None; for attr in &value.attrs { let ident = parse_ident_from_path(&attr.path, false)?; if ident == "solidity" { - let args = attr.parse_meta().unwrap(); - info = MethodInfo::from_meta(&args).unwrap(); + info = attr.parse_args::()?; } else if ident == "doc" { let args = attr.parse_meta().unwrap(); let value = match args { @@ -484,7 +608,7 @@ impl Method { }; docs.push(value); } else if ident == "weight" { - weight = Some(syn::parse2::(attr.to_token_stream())?.0); + weight = Some(attr.parse_args::()?); } } let ident = &value.sig.ident; @@ -553,6 +677,7 @@ impl Method { write!(selector_str, "{}", arg.selector_ty()).unwrap(); has_normal_args = true; } + let has_value_args = args.iter().any(|a| a.is_value()); selector_str.push(')'); let selector = fn_selector_str(&selector_str); @@ -563,8 +688,10 @@ impl Method { screaming_name: snake_ident_to_screaming(ident), selector_str, selector, + hide: info.hide, args, has_normal_args, + has_value_args, mutability, result: result.clone(), weight, @@ -578,9 +705,12 @@ impl Method { .filter(|a| !a.is_special()) .map(|a| a.expand_call_def()); let pascal_name = &self.pascal_name; + let docs = &self.docs; if self.has_normal_args { quote! { + #(#[doc = #docs])* + #[allow(missing_docs)] #pascal_name { #( #defs, @@ -715,15 +845,20 @@ impl Method { .iter() .filter(|a| !a.is_special()) .map(MethodArg::expand_solidity_argument); - let docs = self.docs.iter(); - let selector = format!("{} {:0>8x}", self.selector_str, self.selector); - + let docs = &self.docs; + let selector_str = &self.selector_str; + let selector = self.selector; + let hide = self.hide; + let is_payable = self.has_value_args; quote! { SolidityFunction { docs: &[#(#docs),*], + selector_str: #selector_str, selector: #selector, + hide: #hide, name: #camel_name, mutability: #mutability, + is_payable: #is_payable, args: ( #( #args, @@ -776,6 +911,7 @@ pub struct SolidityInterface { name: Box, info: InterfaceInfo, methods: Vec, + docs: Vec, } impl SolidityInterface { pub fn try_from(info: InterfaceInfo, value: &ItemImpl) -> syn::Result { @@ -786,11 +922,26 @@ impl SolidityInterface { methods.push(Method::try_from(method)?) } } + let mut docs = vec![]; + for attr in &value.attrs { + let ident = parse_ident_from_path(&attr.path, false)?; + if ident == "doc" { + let args = attr.parse_meta().unwrap(); + let value = match args { + Meta::NameValue(MetaNameValue { + lit: Lit::Str(str), .. + }) => str.value(), + _ => unreachable!(), + }; + docs.push(value); + } + } Ok(Self { generics: value.generics.clone(), name: value.self_ty.clone(), info, methods, + docs, }) } pub fn expand(self) -> proc_macro2::TokenStream { @@ -869,11 +1020,37 @@ impl SolidityInterface { .map(|is| Is::expand_generator(is, &gen_ref)); let solidity_event_generators = self.info.events.0.iter().map(Is::expand_event_generator); + let docs = &self.docs; + + if let Some(expect_selector) = &self.info.expect_selector { + if !self.info.inline_is.0.is_empty() { + return syn::Error::new( + name.span(), + "expect_selector is not compatible with inline_is", + ) + .to_compile_error(); + } + let selector = self + .methods + .iter() + .map(|m| m.selector) + .fold(0, |a, b| a ^ b); + + if *expect_selector != selector { + let mut methods = String::new(); + for meth in self.methods.iter() { + write!(methods, "\n- {}", meth.selector_str).expect("write to string"); + } + return syn::Error::new(name.span(), format!("expected selector mismatch, expected {expect_selector:0>8x}, but implementation has {selector:0>8x}{methods}")).to_compile_error(); + } + } // let methods = self.methods.iter().map(Method::solidity_def); quote! { #[derive(Debug)] + #(#[doc = #docs])* pub enum #call_name #gen_ref { + /// Inherited method ERC165Call(::evm_coder::ERC165Call, ::core::marker::PhantomData<#gen_data>), #( #calls, @@ -886,25 +1063,19 @@ impl SolidityInterface { #( #consts )* + /// Return this call ERC165 selector pub fn interface_id() -> ::evm_coder::types::bytes4 { let mut interface_id = 0; #(#interface_id)* #(#inline_interface_id)* u32::to_be_bytes(interface_id) } - pub fn supports_interface(interface_id: ::evm_coder::types::bytes4) -> bool { - interface_id != u32::to_be_bytes(0xffffff) && ( - interface_id == ::evm_coder::ERC165Call::INTERFACE_ID || - interface_id == Self::interface_id() - #( - || #supports_interface - )* - ) - } + /// Generate solidity definitions for methods described in this interface pub fn generate_solidity_interface(tc: &evm_coder::solidity::TypeCollector, is_impl: bool) { use evm_coder::solidity::*; use core::fmt::Write; let interface = SolidityInterface { + docs: &[#(#docs),*], name: #solidity_name, selector: Self::interface_id(), is: &["Dummy", "ERC165", #( @@ -916,26 +1087,24 @@ impl SolidityInterface { #solidity_functions, )*), }; - if is_impl { - tc.collect("// Common stubs holder\ncontract Dummy {\n\tuint8 dummy;\n\tstring stub_error = \"this contract is implemented in native\";\n}\ncontract ERC165 is Dummy {\n\tfunction supportsInterface(bytes4 interfaceID) external view returns (bool) {\n\t\trequire(false, stub_error);\n\t\tinterfaceID;\n\t\treturn true;\n\t}\n}\n".into()); - } else { - tc.collect("// Common stubs holder\ninterface Dummy {\n}\ninterface ERC165 is Dummy {\n\tfunction supportsInterface(bytes4 interfaceID) external view returns (bool);\n}\n".into()); + + let mut out = ::evm_coder::types::string::new(); + if #solidity_name.starts_with("Inline") { + out.push_str("/// @dev inlined interface\n"); } + let _ = interface.format(is_impl, &mut out, tc); + tc.collect(out); #( - #solidity_generators + #solidity_event_generators )* #( - #solidity_event_generators + #solidity_generators )* - - let mut out = string::new(); - // In solidity interface usage (is) should be preceeded by interface definition - // This comment helps to sort it in a set - if #solidity_name.starts_with("Inline") { - out.push_str("// Inline\n"); + if is_impl { + tc.collect("/// @dev common stubs holder\ncontract Dummy {\n\tuint8 dummy;\n\tstring stub_error = \"this contract is implemented in native\";\n}\ncontract ERC165 is Dummy {\n\tfunction supportsInterface(bytes4 interfaceID) external view returns (bool) {\n\t\trequire(false, stub_error);\n\t\tinterfaceID;\n\t\treturn true;\n\t}\n}\n".into()); + } else { + tc.collect("/// @dev common stubs holder\ninterface Dummy {\n}\ninterface ERC165 is Dummy {\n\tfunction supportsInterface(bytes4 interfaceID) external view returns (bool);\n}\n".into()); } - let _ = interface.format(is_impl, &mut out, tc); - tc.collect(out); } } impl #gen_ref ::evm_coder::Call for #call_name #gen_ref { @@ -957,6 +1126,20 @@ impl SolidityInterface { return Ok(None); } } + impl #generics #call_name #gen_ref + #gen_where + { + /// Is this contract implements specified ERC165 selector + pub fn supports_interface(this: &#name, interface_id: ::evm_coder::types::bytes4) -> bool { + interface_id != u32::to_be_bytes(0xffffff) && ( + interface_id == ::evm_coder::ERC165Call::INTERFACE_ID || + interface_id == Self::interface_id() + #( + || #supports_interface + )* + ) + } + } impl #generics ::evm_coder::Weighted for #call_name #gen_ref #gen_where { @@ -967,7 +1150,7 @@ impl SolidityInterface { #weight_variants, )* // TODO: It should be very cheap, but not free - Self::ERC165Call(::evm_coder::ERC165Call::SupportsInterface {..}, _) => 100u64.into(), + Self::ERC165Call(::evm_coder::ERC165Call::SupportsInterface {..}, _) => ::frame_support::weights::Weight::from_ref_time(100).into(), #( #weight_variants_this, )* @@ -986,7 +1169,7 @@ impl SolidityInterface { )* #call_name::ERC165Call(::evm_coder::ERC165Call::SupportsInterface {interface_id}, _) => { let mut writer = ::evm_coder::abi::AbiWriter::default(); - writer.bool(&<#call_name #gen_ref>::supports_interface(interface_id)); + writer.bool(&<#call_name #gen_ref>::supports_interface(self, interface_id)); return Ok(writer.into()); } _ => {}, @@ -996,7 +1179,7 @@ impl SolidityInterface { #( #call_variants_this, )* - _ => unreachable!() + _ => Err(::evm_coder::execution::Error::from("method is not available").into()), } } } diff --git a/crates/evm-coder-macros/src/to_log.rs b/crates/evm-coder/procedural/src/to_log.rs similarity index 98% rename from crates/evm-coder-macros/src/to_log.rs rename to crates/evm-coder/procedural/src/to_log.rs index 6b9dab8a21..7fd8e0a76e 100644 --- a/crates/evm-coder-macros/src/to_log.rs +++ b/crates/evm-coder/procedural/src/to_log.rs @@ -199,6 +199,7 @@ impl Events { use evm_coder::solidity::*; use core::fmt::Write; let interface = SolidityInterface { + docs: &[], selector: [0; 4], name: #solidity_name, is: &[], @@ -207,7 +208,7 @@ impl Events { )*), }; let mut out = string::new(); - out.push_str("// Inline\n"); + out.push_str("/// @dev inlined interface\n"); let _ = interface.format(is_impl, &mut out, tc); tc.collect(out); } diff --git a/crates/evm-coder/src/abi.rs b/crates/evm-coder/src/abi.rs index 2969c6b35b..e1114333cb 100644 --- a/crates/evm-coder/src/abi.rs +++ b/crates/evm-coder/src/abi.rs @@ -14,8 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -//! TODO: I misunterstood therminology, abi IS rlp encoded, so -//! this module should be replaced with rlp crate +//! Implementation of EVM RLP reader/writer #![allow(dead_code)] @@ -32,6 +31,11 @@ use crate::execution::Result; const ABI_ALIGNMENT: usize = 32; +trait TypeHelper { + fn is_dynamic() -> bool; +} + +/// View into RLP data, which provides method to read typed items from it #[derive(Clone)] pub struct AbiReader<'i> { buf: &'i [u8], @@ -39,6 +43,7 @@ pub struct AbiReader<'i> { offset: usize, } impl<'i> AbiReader<'i> { + /// Start reading RLP buffer, assuming there is no padding bytes pub fn new(buf: &'i [u8]) -> Self { Self { buf, @@ -46,6 +51,7 @@ impl<'i> AbiReader<'i> { offset: 0, } } + /// Start reading RLP buffer, parsing first 4 bytes as selector pub fn new_call(buf: &'i [u8]) -> Result<(types::bytes4, Self)> { if buf.len() < 4 { return Err(Error::Error(ExitError::OutOfOffset)); @@ -75,8 +81,8 @@ impl<'i> AbiReader<'i> { return Err(Error::Error(ExitError::OutOfOffset)); } let mut block = [0; S]; - // Verify padding is empty - if !buf[pad_start..pad_size].iter().all(|&v| v == 0) { + let is_pad_zeroed = buf[pad_start..pad_size].iter().all(|&v| v == 0); + if !is_pad_zeroed { return Err(Error::Error(ExitError::InvalidRange)); } block.copy_from_slice(&buf[block_start..block_size]); @@ -109,10 +115,12 @@ impl<'i> AbiReader<'i> { ) } + /// Read [`H160`] at current position, then advance pub fn address(&mut self) -> Result { Ok(H160(self.read_padleft()?)) } + /// Read [`bool`] at current position, then advance pub fn bool(&mut self) -> Result { let data: [u8; 1] = self.read_padleft()?; match data[0] { @@ -122,76 +130,105 @@ impl<'i> AbiReader<'i> { } } + /// Read [`[u8; 4]`] at current position, then advance pub fn bytes4(&mut self) -> Result<[u8; 4]> { self.read_padright() } + /// Read [`Vec`] at current position, then advance pub fn bytes(&mut self) -> Result> { - let mut subresult = self.subresult()?; - let length = subresult.read_usize()?; + let mut subresult = self.subresult(None)?; + let length = subresult.uint32()? as usize; if subresult.buf.len() < subresult.offset + length { return Err(Error::Error(ExitError::OutOfOffset)); } Ok(subresult.buf[subresult.offset..subresult.offset + length].into()) } + + /// Read [`string`] at current position, then advance pub fn string(&mut self) -> Result { string::from_utf8(self.bytes()?).map_err(|_| Error::Error(ExitError::InvalidRange)) } + /// Read [`u8`] at current position, then advance pub fn uint8(&mut self) -> Result { Ok(self.read_padleft::<1>()?[0]) } + /// Read [`u32`] at current position, then advance pub fn uint32(&mut self) -> Result { Ok(u32::from_be_bytes(self.read_padleft()?)) } + /// Read [`u128`] at current position, then advance pub fn uint128(&mut self) -> Result { Ok(u128::from_be_bytes(self.read_padleft()?)) } + /// Read [`U256`] at current position, then advance pub fn uint256(&mut self) -> Result { let buf: [u8; 32] = self.read_padleft()?; Ok(U256::from_big_endian(&buf)) } + /// Read [`u64`] at current position, then advance pub fn uint64(&mut self) -> Result { Ok(u64::from_be_bytes(self.read_padleft()?)) } + /// Read [`usize`] at current position, then advance + #[deprecated = "dangerous, as usize may have different width in wasm and native execution"] pub fn read_usize(&mut self) -> Result { Ok(usize::from_be_bytes(self.read_padleft()?)) } - fn subresult(&mut self) -> Result> { - let offset = self.read_usize()?; + /// Slice recursive buffer, advance one word for buffer offset + /// If `size` is [`None`] then [`Self::offset`] and [`Self::subresult_offset`] evals from [`Self::buf`]. + fn subresult(&mut self, size: Option) -> Result> { + let subresult_offset = self.subresult_offset; + let offset = if let Some(size) = size { + self.offset += size; + self.subresult_offset += size; + 0 + } else { + self.uint32()? as usize + }; + if offset + self.subresult_offset > self.buf.len() { return Err(Error::Error(ExitError::InvalidRange)); } + + let new_offset = offset + subresult_offset; Ok(AbiReader { buf: self.buf, - subresult_offset: offset + self.subresult_offset, - offset: offset + self.subresult_offset, + subresult_offset: new_offset, + offset: new_offset, }) } + /// Is this parser reached end of buffer? pub fn is_finished(&self) -> bool { self.buf.len() == self.offset } } +/// Writer for RLP encoded data #[derive(Default)] pub struct AbiWriter { static_part: Vec, dynamic_part: Vec<(usize, AbiWriter)>, + had_call: bool, } impl AbiWriter { + /// Initialize internal buffers for output data, assuming no padding required pub fn new() -> Self { Self::default() } + /// Initialize internal buffers, inserting method selector at beginning pub fn new_call(method_id: u32) -> Self { let mut val = Self::new(); val.static_part.extend(&method_id.to_be_bytes()); + val.had_call = true; val } @@ -209,62 +246,74 @@ impl AbiWriter { .extend(&[0; ABI_ALIGNMENT][0..ABI_ALIGNMENT - bytes.len()]); } + /// Write [`H160`] to end of buffer pub fn address(&mut self, address: &H160) { self.write_padleft(&address.0) } + /// Write [`bool`] to end of buffer pub fn bool(&mut self, value: &bool) { self.write_padleft(&[if *value { 1 } else { 0 }]) } + /// Write [`u8`] to end of buffer pub fn uint8(&mut self, value: &u8) { self.write_padleft(&[*value]) } + /// Write [`u32`] to end of buffer pub fn uint32(&mut self, value: &u32) { self.write_padleft(&u32::to_be_bytes(*value)) } + /// Write [`u128`] to end of buffer pub fn uint128(&mut self, value: &u128) { self.write_padleft(&u128::to_be_bytes(*value)) } + /// Write [`U256`] to end of buffer pub fn uint256(&mut self, value: &U256) { let mut out = [0; 32]; value.to_big_endian(&mut out); self.write_padleft(&out) } + /// Write [`usize`] to end of buffer + #[deprecated = "dangerous, as usize may have different width in wasm and native execution"] pub fn write_usize(&mut self, value: &usize) { self.write_padleft(&usize::to_be_bytes(*value)) } + /// Append recursive data, writing pending offset at end of buffer pub fn write_subresult(&mut self, result: Self) { self.dynamic_part.push((self.static_part.len(), result)); // Empty block, to be filled later self.write_padleft(&[]); } - pub fn memory(&mut self, value: &[u8]) { + fn memory(&mut self, value: &[u8]) { let mut sub = Self::new(); - sub.write_usize(&value.len()); + sub.uint32(&(value.len() as u32)); for chunk in value.chunks(ABI_ALIGNMENT) { sub.write_padright(chunk); } self.write_subresult(sub); } + /// Append recursive [`str`] at end of buffer pub fn string(&mut self, value: &str) { self.memory(value.as_bytes()) } + /// Append recursive [`[u8]`] at end of buffer pub fn bytes(&mut self, value: &[u8]) { self.memory(value) } + /// Finish writer, concatenating all internal buffers pub fn finish(mut self) -> Vec { for (static_offset, part) in self.dynamic_part { - let part_offset = self.static_part.len(); + let part_offset = self.static_part.len() - if self.had_call { 4 } else { 0 }; let encoded_dynamic_offset = usize::to_be_bytes(part_offset); self.static_part[static_offset + ABI_ALIGNMENT - encoded_dynamic_offset.len() @@ -276,30 +325,48 @@ impl AbiWriter { } } +/// [`AbiReader`] implements reading of many types, but it should +/// be limited to types defined in spec +/// +/// As this trait can't be made sealed, +/// instead of having `impl AbiRead for T`, we have `impl AbiRead for AbiReader` pub trait AbiRead { + /// Read item from current position, advanding decoder fn abi_read(&mut self) -> Result; + + /// Size for type aligned to [`ABI_ALIGNMENT`]. + fn size() -> usize; } macro_rules! impl_abi_readable { - ($ty:ty, $method:ident) => { + ($ty:ty, $method:ident, $dynamic:literal) => { + impl TypeHelper for $ty { + fn is_dynamic() -> bool { + $dynamic + } + } impl AbiRead<$ty> for AbiReader<'_> { fn abi_read(&mut self) -> Result<$ty> { self.$method() } + + fn size() -> usize { + ABI_ALIGNMENT + } } }; } -impl_abi_readable!(u8, uint8); -impl_abi_readable!(u32, uint32); -impl_abi_readable!(u64, uint64); -impl_abi_readable!(u128, uint128); -impl_abi_readable!(U256, uint256); -impl_abi_readable!([u8; 4], bytes4); -impl_abi_readable!(H160, address); -impl_abi_readable!(Vec, bytes); -impl_abi_readable!(bool, bool); -impl_abi_readable!(string, string); +impl_abi_readable!(u8, uint8, false); +impl_abi_readable!(u32, uint32, false); +impl_abi_readable!(u64, uint64, false); +impl_abi_readable!(u128, uint128, false); +impl_abi_readable!(U256, uint256, false); +impl_abi_readable!([u8; 4], bytes4, false); +impl_abi_readable!(H160, address, false); +impl_abi_readable!(Vec, bytes, true); +impl_abi_readable!(bool, bool, true); +impl_abi_readable!(string, string, true); mod sealed { /// Not all types can be placed in vec, i.e `Vec` is restricted, `bytes` should be used instead @@ -315,8 +382,8 @@ where Self: AbiRead, { fn abi_read(&mut self) -> Result> { - let mut sub = self.subresult()?; - let size = sub.read_usize()?; + let mut sub = self.subresult(None)?; + let size = sub.uint32()? as usize; sub.subresult_offset = sub.offset; let mut out = Vec::with_capacity(size); for _ in 0..size { @@ -324,21 +391,51 @@ where } Ok(out) } + + fn size() -> usize { + ABI_ALIGNMENT + } } macro_rules! impl_tuples { ($($ident:ident)+) => { + impl<$($ident: TypeHelper,)+> TypeHelper for ($($ident,)+) { + fn is_dynamic() -> bool { + false + $( + || <$ident>::is_dynamic() + )* + } + } impl<$($ident),+> sealed::CanBePlacedInVec for ($($ident,)+) {} impl<$($ident),+> AbiRead<($($ident,)+)> for AbiReader<'_> where - $(Self: AbiRead<$ident>),+ + $( + Self: AbiRead<$ident>, + )+ + ($($ident,)+): TypeHelper, { fn abi_read(&mut self) -> Result<($($ident,)+)> { - let mut subresult = self.subresult()?; + let size = if !<($($ident,)+)>::is_dynamic() { Some(>::size()) } else { None }; + let mut subresult = self.subresult(size)?; Ok(( $(>::abi_read(&mut subresult)?,)+ )) } + + fn size() -> usize { + 0 $(+ as AbiRead<$ident>>::size())+ + } + } + #[allow(non_snake_case)] + impl<$($ident),+> AbiWrite for &($($ident,)+) + where + $($ident: AbiWrite,)+ + { + fn abi_write(&self, writer: &mut AbiWriter) { + let ($($ident,)+) = self; + $($ident.abi_write(writer);)+ + } } }; } @@ -354,8 +451,13 @@ impl_tuples! {A B C D E F G H} impl_tuples! {A B C D E F G H I} impl_tuples! {A B C D E F G H I J} +/// For questions about inability to provide custom implementations, +/// see [`AbiRead`] pub trait AbiWrite { + /// Write value to end of specified encoder fn abi_write(&self, writer: &mut AbiWriter); + /// Specialization for [`crate::solidity_interface`] implementation, + /// see comment in `impl AbiWrite for ResultWithPostInfo` fn to_result(&self) -> ResultWithPostInfo { let mut writer = AbiWriter::new(); self.abi_write(&mut writer); @@ -363,13 +465,11 @@ pub trait AbiWrite { } } +/// This particular AbiWrite implementation should be split to another trait, +/// which only implements `to_result`, but due to lack of specialization feature +/// in stable Rust, we can't have blanket impl of this trait `for T where T: AbiWrite`, +/// so here we abusing default trait methods for it impl AbiWrite for ResultWithPostInfo { - // this particular AbiWrite implementation should be split to another trait, - // which only implements [`to_result`] - // - // But due to lack of specialization feature in stable Rust, we can't have - // blanket impl of this trait `for T where T: AbiWrite`, so here we abusing - // default trait methods for it fn abi_write(&self, _writer: &mut AbiWriter) { debug_assert!(false, "shouldn't be called, see comment") } @@ -420,6 +520,8 @@ impl AbiWrite for () { fn abi_write(&self, _writer: &mut AbiWriter) {} } +/// Helper macros to parse reader into variables +#[deprecated] #[macro_export] macro_rules! abi_decode { ($reader:expr, $($name:ident: $typ:ident),+ $(,)?) => { @@ -428,6 +530,9 @@ macro_rules! abi_decode { )+ } } + +/// Helper macros to construct RLP-encoded buffer +#[deprecated] #[macro_export] macro_rules! abi_encode { ($($typ:ident($value:expr)),* $(,)?) => {{ @@ -477,7 +582,7 @@ pub mod test { assert_eq!(encoded, alternative_encoded); let mut decoder = AbiReader::new(&encoded); - assert_eq!(decoder.bool().unwrap(), true); + assert!(decoder.bool().unwrap()); assert_eq!(decoder.string().unwrap(), "test"); } @@ -546,4 +651,49 @@ pub mod test { ] ); } + + #[test] + fn parse_vec_with_simple_type() { + use crate::types::address; + use primitive_types::{H160, U256}; + + let (call, mut decoder) = AbiReader::new_call(&hex!( + " + 1ACF2D55 + 0000000000000000000000000000000000000000000000000000000000000020 // offset of (address, uint256)[] + 0000000000000000000000000000000000000000000000000000000000000003 // length of (address, uint256)[] + + 0000000000000000000000002D2FF76104B7BACB2E8F6731D5BFC184EBECDDBC // address + 000000000000000000000000000000000000000000000000000000000000000A // uint256 + + 000000000000000000000000AB8E3D9134955566483B11E6825C9223B6737B10 // address + 0000000000000000000000000000000000000000000000000000000000000014 // uint256 + + 0000000000000000000000008C582BDF2953046705FC56F189385255EFC1BE18 // address + 000000000000000000000000000000000000000000000000000000000000001E // uint256 + " + )) + .unwrap(); + assert_eq!(call, u32::to_be_bytes(0x1ACF2D55)); + let data = + as AbiRead>>::abi_read(&mut decoder).unwrap(); + assert_eq!(data.len(), 3); + assert_eq!( + data, + vec![ + ( + H160(hex!("2D2FF76104B7BACB2E8F6731D5BFC184EBECDDBC")), + U256([10, 0, 0, 0]) + ), + ( + H160(hex!("AB8E3D9134955566483B11E6825C9223B6737B10")), + U256([20, 0, 0, 0]) + ), + ( + H160(hex!("8C582BDF2953046705FC56F189385255EFC1BE18")), + U256([30, 0, 0, 0]) + ), + ] + ); + } } diff --git a/crates/evm-coder/src/events.rs b/crates/evm-coder/src/events.rs index 05b057f0f4..0c4c955f7f 100644 --- a/crates/evm-coder/src/events.rs +++ b/crates/evm-coder/src/events.rs @@ -19,11 +19,23 @@ use primitive_types::{H160, H256}; use crate::types::*; +/// Implementation of this trait should not be written manually, +/// instead use [`crate::ToLog`] proc macros. +/// +/// See also [`evm_coder_procedural::ToLog`], [solidity docs on events](https://docs.soliditylang.org/en/develop/contracts.html#events) pub trait ToLog { + /// Convert event to [`ethereum::Log`]. + /// Because event by itself doesn't contains current contract + /// address, it should be specified manually. fn to_log(&self, contract: H160) -> Log; } +/// Only items implementing `ToTopic` may be used as `#[indexed]` field +/// in [`crate::ToLog`] macro usage. +/// +/// See also (solidity docs on events)[] pub trait ToTopic { + /// Convert value to topic to be used in [`ethereum::Log`] fn to_topic(&self) -> H256; } diff --git a/crates/evm-coder/src/execution.rs b/crates/evm-coder/src/execution.rs index 61dddff4b6..c27ee59590 100644 --- a/crates/evm-coder/src/execution.rs +++ b/crates/evm-coder/src/execution.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Contract execution related types + #[cfg(not(feature = "std"))] use alloc::string::{String, ToString}; use evm_core::{ExitError, ExitFatal}; @@ -22,10 +24,14 @@ use std::string::{String, ToString}; use crate::Weight; +/// Execution error, should be convertible between EVM and Substrate. #[derive(Debug, Clone)] pub enum Error { + /// Non-fatal contract error occured Revert(String), + /// EVM fatal error Fatal(ExitFatal), + /// EVM normal error Error(ExitError), } @@ -38,9 +44,12 @@ where } } +/// To be used in [`crate::solidity_interface`] implementation. pub type Result = core::result::Result; +/// Static information collected from [`crate::weight`]. pub struct DispatchInfo { + /// Statically predicted call weight pub weight: Weight, } @@ -49,22 +58,37 @@ impl From for DispatchInfo { Self { weight } } } +impl From for DispatchInfo { + fn from(weight: u64) -> Self { + Self { + weight: Weight::from_ref_time(weight), + } + } +} impl From<()> for DispatchInfo { fn from(_: ()) -> Self { - Self { weight: 0 } + Self { + weight: Weight::zero(), + } } } +/// Weight information that is only available post dispatch. +/// Note: This can only be used to reduce the weight or fee, not increase it. #[derive(Default, Clone)] pub struct PostDispatchInfo { + /// Actual weight consumed by call actual_weight: Option, } impl PostDispatchInfo { + /// Calculate amount to be returned back to user pub fn calc_unspent(&self, info: &DispatchInfo) -> Weight { info.weight - self.calc_actual_weight(info) } + /// Calculate actual consumed weight, saturating to weight reported + /// pre-dispatch pub fn calc_actual_weight(&self, info: &DispatchInfo) -> Weight { if let Some(actual_weight) = self.actual_weight { actual_weight.min(info.weight) @@ -74,9 +98,12 @@ impl PostDispatchInfo { } } +/// Wrapper for PostDispatchInfo and any user-provided data #[derive(Clone)] pub struct WithPostDispatchInfo { + /// User provided data pub data: T, + /// Info known after dispatch pub post_info: PostDispatchInfo, } @@ -89,5 +116,6 @@ impl From for WithPostDispatchInfo { } } +/// Return type of items in [`crate::solidity_interface`] definition pub type ResultWithPostInfo = core::result::Result, WithPostDispatchInfo>; diff --git a/crates/evm-coder/src/lib.rs b/crates/evm-coder/src/lib.rs index 136811bc37..fa247f2240 100644 --- a/crates/evm-coder/src/lib.rs +++ b/crates/evm-coder/src/lib.rs @@ -14,22 +14,103 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#![doc = include_str!("../README.md")] +#![deny(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(not(feature = "std"))] extern crate alloc; use abi::{AbiRead, AbiReader, AbiWriter}; -pub use evm_coder_macros::{event_topic, fn_selector, solidity_interface, solidity, weight, ToLog}; +pub use evm_coder_procedural::{event_topic, fn_selector}; pub mod abi; -pub mod events; -pub use events::ToLog; +pub use events::{ToLog, ToTopic}; use execution::DispatchInfo; pub mod execution; + +/// Derives call enum implementing [`crate::Callable`], [`crate::Weighted`] +/// and [`crate::Call`] from impl block. +/// +/// ## Macro syntax +/// +/// `#[solidity_interface(name, is, inline_is, events)]` +/// - *name* - used in generated code, and for Call enum name +/// - *is* - used to provide inheritance in Solidity +/// - *inline_is* - same as `is`, but ERC165::SupportsInterface will work differently: For `is` SupportsInterface(A) will return true +/// if A is one of the interfaces the contract is inherited from (e.g. B is created as `is(A)`). If B is created as `inline_is(A)` +/// SupportsInterface(A) will internally create a new interface that combines all methods of A and B, so SupportsInterface(A) will return +/// false. +/// +/// `#[weight(value)]` +/// Can be added to every method of impl block, used for deriving [`crate::Weighted`], which +/// is used by substrate bridge. +/// - *value*: expression, which evaluates to weight required to call this method. +/// This expression can use call arguments to calculate non-constant execution time. +/// This expression should evaluate faster than actual execution does, and may provide worse case +/// than one is called. +/// +/// `#[solidity_interface(rename_selector)]` +/// - *rename_selector* - by default, selector name will be generated by transforming method name +/// from snake_case to camelCase. Use this option, if other naming convention is required. +/// I.e: method `token_uri` will be automatically renamed to `tokenUri` in selector, but name +/// required by ERC721 standard is `tokenURI`, thus we need to specify `rename_selector = "tokenURI"` +/// explicitly. +/// +/// Both contract and contract methods may have doccomments, which will end up in a generated +/// solidity interface file, thus you should use [solidity syntax](https://docs.soliditylang.org/en/latest/natspec-format.html) for writing documentation in this macro +/// +/// ## Example +/// +/// ```ignore +/// struct SuperContract; +/// struct InlineContract; +/// struct Contract; +/// +/// #[derive(ToLog)] +/// enum ContractEvents { +/// Event(#[indexed] uint32), +/// } +/// +/// /// @dev This contract provides function to multiply two numbers +/// #[solidity_interface(name = MyContract, is(SuperContract), inline_is(InlineContract))] +/// impl Contract { +/// /// Multiply two numbers +/// /// @param a First number +/// /// @param b Second number +/// /// @return uint32 Product of two passed numbers +/// /// @dev This function returns error in case of overflow +/// #[weight(200 + a + b)] +/// #[solidity_interface(rename_selector = "mul")] +/// fn mul(&mut self, a: uint32, b: uint32) -> Result { +/// Ok(a.checked_mul(b).ok_or("overflow")?) +/// } +/// } +/// ``` +pub use evm_coder_procedural::solidity_interface; +/// See [`solidity_interface`] +pub use evm_coder_procedural::solidity; +/// See [`solidity_interface`] +pub use evm_coder_procedural::weight; + +/// Derives [`ToLog`] for enum +/// +/// Selectors will be derived from variant names, there is currently no way to have custom naming +/// for them +/// +/// `#[indexed]` +/// Marks this field as indexed, so it will appear in [`ethereum::Log`] topics instead of data +pub use evm_coder_procedural::ToLog; + +// Api of those modules shouldn't be consumed directly, it is only exported for usage in proc macros +#[doc(hidden)] +pub mod events; +#[doc(hidden)] pub mod solidity; -/// Solidity type definitions +/// Solidity type definitions (aliases from solidity name to rust type) +/// To be used in [`solidity_interface`] definitions, to make sure there is no +/// type conflict between Rust code and generated definitions pub mod types { - #![allow(non_camel_case_types)] + #![allow(non_camel_case_types, missing_docs)] #[cfg(not(feature = "std"))] use alloc::{vec::Vec}; @@ -54,6 +135,8 @@ pub mod types { pub type string = ::std::string::String; pub type bytes = Vec; + /// Solidity doesn't have `void` type, however we have special implementation + /// for empty tuple return type pub type void = (); //#region Special types @@ -63,36 +146,64 @@ pub mod types { pub type caller = address; //#endregion + /// Ethereum typed call message, similar to solidity + /// `msg` object. pub struct Msg { pub call: C, + /// Address of user, which called this contract. pub caller: H160, + /// Payment amount to contract. + /// Contract should reject payment, if target call is not payable, + /// and there is no `receiver()` function defined. pub value: U256, } } +/// Parseable EVM call, this trait should be implemented with [`solidity_interface`] macro pub trait Call: Sized { + /// Parse call buffer into typed call enum fn parse(selector: types::bytes4, input: &mut AbiReader) -> execution::Result>; } -pub type Weight = u64; +/// Intended to be used as `#[weight]` output type +/// Should be same between evm-coder and substrate to avoid confusion +/// +/// Isn't same thing as gas, some mapping is required between those types +pub type Weight = frame_support::weights::Weight; +/// In substrate, we have benchmarking, which allows +/// us to not rely on gas metering, but instead predict amount of gas to execute call pub trait Weighted: Call { + /// Predict weight of this call fn weight(&self) -> DispatchInfo; } +/// Type callable with ethereum message, may be implemented by [`solidity_interface`] macro +/// on interface implementation, or for externally-owned real EVM contract pub trait Callable { + /// Call contract using specified call data fn call(&mut self, call: types::Msg) -> execution::ResultWithPostInfo; } -/// Implementation is implicitly provided for all interfaces +/// Implementation of ERC165 is implicitly generated for all interfaces in [`solidity_interface`], +/// this structure holds parsed data for ERC165Call subvariant +/// +/// Note: no [`Callable`] implementation is provided, call implementation is inlined into every +/// implementing contract /// -/// Note: no Callable implementation is provided +/// See #[derive(Debug)] pub enum ERC165Call { - SupportsInterface { interface_id: types::bytes4 }, + /// ERC165 provides single method, which returns true, if contract + /// implements specified interface + SupportsInterface { + /// Requested interface + interface_id: types::bytes4, + }, } impl ERC165Call { + /// ERC165 selector is provided by standard pub const INTERFACE_ID: types::bytes4 = u32::to_be_bytes(0x01ffc9a7); } diff --git a/crates/evm-coder/src/solidity.rs b/crates/evm-coder/src/solidity.rs index 6625fd0ec8..0ceaab6f40 100644 --- a/crates/evm-coder/src/solidity.rs +++ b/crates/evm-coder/src/solidity.rs @@ -14,26 +14,31 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Implementation detail of [`crate::solidity_interface`] macro code-generation. +//! You should not rely on any public item from this module, as it is only intended to be used +//! by procedural macro, API and output format may be changed at any time. +//! +//! Purpose of this module is to receive solidity contract definition in module-specified +//! format, and then output string, representing interface of this contract in solidity language + #[cfg(not(feature = "std"))] -use alloc::{ - string::String, - vec::Vec, - collections::{BTreeSet, BTreeMap}, - format, -}; +use alloc::{string::String, vec::Vec, collections::BTreeMap, format}; #[cfg(feature = "std")] -use std::collections::{BTreeSet, BTreeMap}; +use std::collections::BTreeMap; use core::{ fmt::{self, Write}, marker::PhantomData, cell::{Cell, RefCell}, + cmp::Reverse, }; use impl_trait_for_tuples::impl_for_tuples; use crate::types::*; #[derive(Default)] pub struct TypeCollector { - structs: RefCell>, + /// Code => id + /// id ordering is required to perform topo-sort on the resulting data + structs: RefCell>, anonymous: RefCell, usize>>, id: Cell, } @@ -42,7 +47,8 @@ impl TypeCollector { Self::default() } pub fn collect(&self, item: string) { - self.structs.borrow_mut().insert(item); + let id = self.next_id(); + self.structs.borrow_mut().insert(item, id); } pub fn next_id(&self) -> usize { let v = self.id.get(); @@ -56,7 +62,7 @@ impl TypeCollector { } let id = self.next_id(); let mut str = String::new(); - writeln!(str, "// Anonymous struct").unwrap(); + writeln!(str, "/// @dev anonymous struct").unwrap(); writeln!(str, "struct Tuple{} {{", id).unwrap(); for (i, name) in names.iter().enumerate() { writeln!(str, "\t{} field_{};", name, i).unwrap(); @@ -66,15 +72,19 @@ impl TypeCollector { self.anonymous.borrow_mut().insert(names, id); format!("Tuple{}", id) } - pub fn finish(self) -> BTreeSet { - self.structs.into_inner() + pub fn finish(self) -> Vec { + let mut data = self.structs.into_inner().into_iter().collect::>(); + data.sort_by_key(|(_, id)| Reverse(*id)); + data.into_iter().map(|(code, _)| code).collect() } } pub trait SolidityTypeName: 'static { fn solidity_name(writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result; + /// "simple" types are stored inline, no `memory` modifier should be used in solidity fn is_simple() -> bool; fn solidity_default(writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result; + /// Specialization fn is_void() -> bool { false } @@ -125,6 +135,10 @@ impl SolidityTypeName for void { } mod sealed { + /// Not every type should be directly placed in vec. + /// Vec encoding is not memory efficient, as every item will be padded + /// to 32 bytes. + /// Instead you should use specialized types (`bytes` in case of `Vec`) pub trait CanBePlacedInVec {} } @@ -180,9 +194,16 @@ macro_rules! impl_tuples { fn is_simple() -> bool { false } + #[allow(unused_assignments)] fn solidity_default(writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result { write!(writer, "{}(", tc.collect_tuple::())?; + let mut first = true; $( + if !first { + write!(writer, ",")?; + } else { + first = false; + } <$ident>::solidity_default(writer, tc)?; )* write!(writer, ")") @@ -204,7 +225,7 @@ impl_tuples! {A B C D E F G H I J} pub trait SolidityArguments { fn solidity_name(&self, writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result; - fn solidity_get(&self, writer: &mut impl fmt::Write) -> fmt::Result; + fn solidity_get(&self, prefix: &str, writer: &mut impl fmt::Write) -> fmt::Result; fn solidity_default(&self, writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result; fn is_empty(&self) -> bool { self.len() == 0 @@ -227,7 +248,7 @@ impl SolidityArguments for UnnamedArgument { Ok(()) } } - fn solidity_get(&self, _writer: &mut impl fmt::Write) -> fmt::Result { + fn solidity_get(&self, _prefix: &str, _writer: &mut impl fmt::Write) -> fmt::Result { Ok(()) } fn solidity_default(&self, writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result { @@ -262,8 +283,8 @@ impl SolidityArguments for NamedArgument { Ok(()) } } - fn solidity_get(&self, writer: &mut impl fmt::Write) -> fmt::Result { - writeln!(writer, "\t\t{};", self.0) + fn solidity_get(&self, prefix: &str, writer: &mut impl fmt::Write) -> fmt::Result { + writeln!(writer, "\t{prefix}\t{};", self.0) } fn solidity_default(&self, writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result { T::solidity_default(writer, tc) @@ -297,8 +318,8 @@ impl SolidityArguments for SolidityEventArgument { Ok(()) } } - fn solidity_get(&self, writer: &mut impl fmt::Write) -> fmt::Result { - writeln!(writer, "\t\t{};", self.1) + fn solidity_get(&self, prefix: &str, writer: &mut impl fmt::Write) -> fmt::Result { + writeln!(writer, "\t{prefix}\t{};", self.1) } fn solidity_default(&self, writer: &mut impl fmt::Write, tc: &TypeCollector) -> fmt::Result { T::solidity_default(writer, tc) @@ -316,7 +337,7 @@ impl SolidityArguments for () { fn solidity_name(&self, _writer: &mut impl fmt::Write, _tc: &TypeCollector) -> fmt::Result { Ok(()) } - fn solidity_get(&self, _writer: &mut impl fmt::Write) -> fmt::Result { + fn solidity_get(&self, _prefix: &str, _writer: &mut impl fmt::Write) -> fmt::Result { Ok(()) } fn solidity_default(&self, _writer: &mut impl fmt::Write, _tc: &TypeCollector) -> fmt::Result { @@ -344,9 +365,9 @@ impl SolidityArguments for Tuple { )* ); Ok(()) } - fn solidity_get(&self, writer: &mut impl fmt::Write) -> fmt::Result { + fn solidity_get(&self, prefix: &str, writer: &mut impl fmt::Write) -> fmt::Result { for_tuples!( #( - Tuple.solidity_get(writer)?; + Tuple.solidity_get(prefix, writer)?; )* ); Ok(()) } @@ -395,11 +416,14 @@ pub enum SolidityMutability { } pub struct SolidityFunction { pub docs: &'static [&'static str], - pub selector: &'static str, + pub selector_str: &'static str, + pub selector: u32, + pub hide: bool, pub name: &'static str, pub args: A, pub result: R, pub mutability: SolidityMutability, + pub is_payable: bool, } impl SolidityFunctions for SolidityFunction { fn solidity_name( @@ -408,14 +432,21 @@ impl SolidityFunctions for SolidityF writer: &mut impl fmt::Write, tc: &TypeCollector, ) -> fmt::Result { + let hide_comment = self.hide.then(|| "// ").unwrap_or(""); for doc in self.docs { - writeln!(writer, "\t//{}", doc)?; - } - if !self.docs.is_empty() { - writeln!(writer, "\t//")?; + writeln!(writer, "\t{hide_comment}///{}", doc)?; } - writeln!(writer, "\t// Selector: {}", self.selector)?; - write!(writer, "\tfunction {}(", self.name)?; + writeln!( + writer, + "\t{hide_comment}/// @dev EVM selector for this function is: 0x{:0>8x},", + self.selector + )?; + writeln!( + writer, + "\t{hide_comment}/// or in textual repr: {}", + self.selector_str + )?; + write!(writer, "\t{hide_comment}function {}(", self.name)?; self.args.solidity_name(writer, tc)?; write!(writer, ")")?; if is_impl { @@ -428,6 +459,9 @@ impl SolidityFunctions for SolidityF SolidityMutability::View => write!(writer, " view")?, SolidityMutability::Mutable => {} } + if self.is_payable { + write!(writer, " payable")?; + } if !self.result.is_empty() { write!(writer, " returns (")?; self.result.solidity_name(writer, tc)?; @@ -435,27 +469,30 @@ impl SolidityFunctions for SolidityF } if is_impl { writeln!(writer, " {{")?; - writeln!(writer, "\t\trequire(false, stub_error);")?; - self.args.solidity_get(writer)?; + writeln!(writer, "\t{hide_comment}\trequire(false, stub_error);")?; + self.args.solidity_get(hide_comment, writer)?; match &self.mutability { SolidityMutability::Pure => {} - SolidityMutability::View => writeln!(writer, "\t\tdummy;")?, - SolidityMutability::Mutable => writeln!(writer, "\t\tdummy = 0;")?, + SolidityMutability::View => writeln!(writer, "\t{hide_comment}\tdummy;")?, + SolidityMutability::Mutable => writeln!(writer, "\t{hide_comment}\tdummy = 0;")?, } if !self.result.is_empty() { - write!(writer, "\t\treturn ")?; + write!(writer, "\t{hide_comment}\treturn ")?; self.result.solidity_default(writer, tc)?; writeln!(writer, ";")?; } - writeln!(writer, "\t}}")?; + writeln!(writer, "\t{hide_comment}}}")?; } else { writeln!(writer, ";")?; } + if self.hide { + writeln!(writer, "// FORMATTING: FORCE NEWLINE")?; + } Ok(()) } } -#[impl_for_tuples(0, 24)] +#[impl_for_tuples(0, 48)] impl SolidityFunctions for Tuple { for_tuples!( where #( Tuple: SolidityFunctions ),* ); @@ -474,6 +511,7 @@ impl SolidityFunctions for Tuple { } pub struct SolidityInterface { + pub docs: &'static [&'static str], pub selector: bytes4, pub name: &'static str, pub is: &'static [&'static str], @@ -488,10 +526,13 @@ impl SolidityInterface { tc: &TypeCollector, ) -> fmt::Result { const ZERO_BYTES: [u8; 4] = [0; 4]; + for doc in self.docs { + writeln!(out, "///{}", doc)?; + } if self.selector != ZERO_BYTES { writeln!( out, - "// Selector: {:0>8x}", + "/// @dev the ERC-165 identifier for this interface is 0x{:0>8x}", u32::from_be_bytes(self.selector) )?; } diff --git a/crates/evm-coder/tests/conditional_is.rs b/crates/evm-coder/tests/conditional_is.rs new file mode 100644 index 0000000000..210decd309 --- /dev/null +++ b/crates/evm-coder/tests/conditional_is.rs @@ -0,0 +1,44 @@ +use evm_coder::{types::*, solidity_interface, execution::Result}; + +pub struct Contract(bool); + +#[solidity_interface(name = A)] +impl Contract { + fn method_a() -> Result { + Ok(()) + } +} + +#[solidity_interface(name = B)] +impl Contract { + fn method_b() -> Result { + Ok(()) + } +} + +#[solidity_interface(name = Contract, is( + A(if(this.0)), + B(if(!this.0)), +))] +impl Contract {} + +#[test] +fn conditional_erc165() { + assert!(ContractCall::supports_interface( + &Contract(true), + ACall::METHOD_A + )); + assert!(!ContractCall::supports_interface( + &Contract(false), + ACall::METHOD_A + )); + + assert!(ContractCall::supports_interface( + &Contract(false), + BCall::METHOD_B + )); + assert!(!ContractCall::supports_interface( + &Contract(true), + BCall::METHOD_B + )); +} diff --git a/crates/evm-coder/tests/generics.rs b/crates/evm-coder/tests/generics.rs index 5ca4bab56a..ab5cac9f82 100644 --- a/crates/evm-coder/tests/generics.rs +++ b/crates/evm-coder/tests/generics.rs @@ -17,16 +17,16 @@ use std::marker::PhantomData; use evm_coder::{execution::Result, generate_stubgen, solidity_interface, types::*}; -struct Generic(PhantomData); +pub struct Generic(PhantomData); -#[solidity_interface(name = "GenericIs")] +#[solidity_interface(name = GenericIs)] impl Generic { fn test_1(&self) -> Result { unreachable!() } } -#[solidity_interface(name = "Generic", is(GenericIs))] +#[solidity_interface(name = Generic, is(GenericIs))] impl> Generic { fn test_2(&self) -> Result { unreachable!() @@ -35,7 +35,7 @@ impl> Generic { generate_stubgen!(gen_iface, GenericCall<()>, false); -#[solidity_interface(name = "GenericWhere")] +#[solidity_interface(name = GenericWhere)] impl Generic where T: core::fmt::Debug, diff --git a/crates/evm-coder/tests/random.rs b/crates/evm-coder/tests/random.rs index 09ec1dd37a..47acff7280 100644 --- a/crates/evm-coder/tests/random.rs +++ b/crates/evm-coder/tests/random.rs @@ -16,19 +16,18 @@ #![allow(dead_code)] // This test only checks that macros is not panicking -use evm_coder::{ToLog, execution::Result, solidity_interface, types::*}; -use evm_coder_macros::{solidity, weight}; +use evm_coder::{ToLog, execution::Result, solidity_interface, types::*, solidity, weight}; -struct Impls; +pub struct Impls; -#[solidity_interface(name = "OurInterface")] +#[solidity_interface(name = OurInterface)] impl Impls { fn fn_a(&self, _input: uint256) -> Result { unreachable!() } } -#[solidity_interface(name = "OurInterface1")] +#[solidity_interface(name = OurInterface1)] impl Impls { fn fn_b(&self, _input: uint128) -> Result { unreachable!() @@ -48,7 +47,7 @@ enum OurEvents { } #[solidity_interface( - name = "OurInterface2", + name = OurInterface2, is(OurInterface), inline_is(OurInterface1), events(OurEvents) @@ -79,3 +78,9 @@ impl Impls { unreachable!() } } + +#[solidity_interface( + name = ValidSelector, + expect_selector = 0x00000000, +)] +impl Impls {} diff --git a/crates/evm-coder/tests/solidity_generation.rs b/crates/evm-coder/tests/solidity_generation.rs index 4a44582ba2..7ddc8ea500 100644 --- a/crates/evm-coder/tests/solidity_generation.rs +++ b/crates/evm-coder/tests/solidity_generation.rs @@ -16,9 +16,9 @@ use evm_coder::{execution::Result, generate_stubgen, solidity_interface, types::*}; -struct ERC20; +pub struct ERC20; -#[solidity_interface(name = "ERC20")] +#[solidity_interface(name = ERC20)] impl ERC20 { fn decimals(&self) -> Result { unreachable!() diff --git a/crates/struct-versioning/README.md b/crates/struct-versioning/README.md new file mode 100644 index 0000000000..4ea12d3c18 --- /dev/null +++ b/crates/struct-versioning/README.md @@ -0,0 +1,62 @@ +# struct-versioning + +The crate contains procedural macros for versioning data structures. +Macros [`versioned`] generate versioned variants of a struct. + +Example: +``` +# use struct_versioning::versioned; +#[versioned(version = 5, first_version = 2)] +struct Example {} + +// versioned macro will generate suffixed versions of example struct, +// starting from `Version{first_version or 1}` to `Version{version}` inclusive +let _ver2 = ExampleVersion2 {}; +let _ver3 = ExampleVersion3 {}; +let _ver4 = ExampleVersion4 {}; +let _ver5 = ExampleVersion5 {}; + +// last version will also be aliased with original struct name +let _orig: Example = ExampleVersion5 {}; + +#[versioned(version = 2, upper)] +#[derive(PartialEq, Debug)] +struct Upper { + #[version(..2)] + removed: u32, + #[version(2.., upper(10))] + added: u32, + + #[version(..2)] + retyped: u32, + #[version(2.., upper(retyped as u64))] + retyped: u64, +} + +// #[version] attribute on field allows to specify, in which versions of structs this field should present +// versions here works as standard rust ranges, start is inclusive, end is exclusive +let _up1 = UpperVersion1 {removed: 1, retyped: 0}; +let _up2 = UpperVersion2 {added: 1, retyped: 0}; + +// and upper() allows to specify, which value should be assigned to this field in `From` impl +assert_eq!( + UpperVersion2::from(UpperVersion1 {removed: 0, retyped: 6}), + UpperVersion2 {added: 10, retyped: 6}, +); +``` + +In this case, the upgrade is described in `on_runtime_upgrade` using the `translate_values` substrate feature + +```ignore +#[pallet::hooks] +impl Hooks> for Pallet { + fn on_runtime_upgrade() -> Weight { + if StorageVersion::get::>() < StorageVersion::new(1) { + >::translate_values::(|v| { + Some(::from(v)) + }) + } + 0 + } +} +``` diff --git a/crates/struct-versioning/src/lib.rs b/crates/struct-versioning/src/lib.rs index 5a55e04c47..a3767827f1 100644 --- a/crates/struct-versioning/src/lib.rs +++ b/crates/struct-versioning/src/lib.rs @@ -1,3 +1,21 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#![doc = include_str!("../README.md")] + use proc_macro::TokenStream; use quote::format_ident; use syn::{ diff --git a/doc/separate_rpc.md b/doc/separate_rpc.md new file mode 100644 index 0000000000..58d1066721 --- /dev/null +++ b/doc/separate_rpc.md @@ -0,0 +1,123 @@ +1. Create, in the `primitives` folder, a crate with a trait for RPC generation. + ```rust + sp_api::decl_runtime_apis! { + #[api_version(2)] + pub trait ModuleNameApi + where + CrossAccountId: pallet_evm::account::CrossAccountId, + { + fn method_name(user: Option) -> Result; + } + } + ``` + +2. client/rpc/src/lib.rs + * Add a trait with the required methods. Mark it with `#[rpc(server)]` and `#[async_trait]` directives. + ```rust + #[rpc(server)] + #[async_trait] + pub trait ModuleNameApi { + #[method(name = "moduleName_methodName")] + fn method_name(&self, user: Option, at: Option) + -> Result; + } + ``` + * Don't forget to write the correct method identifier in the form `moduleName_methodName`. + * Add a structure for which the server API interface will be implemented. + ```rust + define_struct_for_server_api!(ModuleName); + ``` + * Define a macro to be used in the implementation of the server API interface. + ```rust + macro_rules! module_api { + () => { + dyn ModuleNameRuntimeApi + }; + } + ``` + * Implement a server API interface. + ```rust + impl + ModuleNameApiServer<::Hash, CrossAccountId> for ModuleName + where + Block: BlockT, + C: 'static + ProvideRuntimeApi + HeaderBackend, + C::Api: AppPromotionRuntimeApi, + CrossAccountId: pallet_evm::account::CrossAccountId, + { + pass_method!(method_name(user: Option) -> String => |v| v.to_string(), app_promotion_api); + } + ``` + +3. runtime/common/runtime_apis.rs + * Implement the `ModuleNameApi` interface for `Runtime`. Optionally, you can mark a feature flag to disable the functionality. + ```rust + impl MethodApi for Runtime { + fn method_name(user: Option) -> Result { + #[cfg(not(feature = "module"))] + return unsupported!(); + + #[cfg(feature = "module")] + return Ok(0); + } + } + ``` + +4. node/cli/src/service.rs + * Set the `MethodApi` bound in the `start_node_impl`, `start_node`, `start_dev_node` methods. + +5. node/rpc/src/lib.rs + * Add `MethodApi` bound to `create_full` method. + * Enable RPC in the `create_full` method by adding `io.merge(ModuleName::new(client.clone()).into_rpc())?;` + +6. Add a new crate (see point 1) into dependencies. + * client/rpc/Cargo.toml + * node/rpc/Cargo.toml + * runtime/opal/Cargo.toml + * runtime/quartz/Cargo.toml + * runtime/unique/Cargo.toml + +7. Create tests/src/interfaces/ModuleName/definitions.ts and describe the necessary methods in it. + ```ts + type RpcParam = { + name: string; + type: string; + isOptional?: true; + }; + + const CROSS_ACCOUNT_ID_TYPE = 'PalletEvmAccountBasicCrossAccountIdRepr'; + + const fun = (description: string, params: RpcParam[], type: string) => ({ + description, + params: [...params, atParam], + type, + }); + + export default { + types: {}, + rpc: { + methodName: fun( + 'Documentation for method', + [{name: 'user', type: CROSS_ACCOUNT_ID_TYPE, isOptional: true}], + 'u128', + ), + }, + }; + ``` + +8. Describe definitions from paragraph 7 in tests/src/interfaces/definitions.ts. + ```ts + export {default as ModuleName} from './module/definitions'; + ``` + +9. tests/src/substrate/substrate-api.ts + * Set the RPC interface in the `defaultApiOptions` function, add an entry in the `rpc` parameter + ```ts + module: defs.module.rpc, + ``` + +10. tests/src/util/playgrounds/unique.dev.ts + * Specify RPC interface in `connect` function, add entry in `rpc` parameter + ```ts + module: defs.module.rpc, + ``` \ No newline at end of file diff --git a/doc/sponsoring-flow.drawio.svg b/doc/sponsoring-flow.drawio.svg new file mode 100644 index 0000000000..2a8298df95 --- /dev/null +++ b/doc/sponsoring-flow.drawio.svg @@ -0,0 +1,305 @@ + + + + + + + + + + + + + + + +
+
+
+ EVM calls +
+
+
+
+ + EVM calls + +
+
+ + + + +
+
+
+
+ Extrinsics +
+
+
+
+
+ + Extrinsics + +
+
+ + + + + + + + +
+
+
+
+ User +
+
+
+
+
+ + User + +
+
+ + + + + + +
+
+
+
+ Substrate +
+
+
+
+
+ + Substrate + +
+
+ + + + + + +
+
+
+
+ Ethereum +
+
+
+
+
+ + Ethereum + +
+
+ + + + + + +
+
+
+ pallet_evm:: +
+ TransactionValidityHack +
+
+
+
+ + pallet_evm::... + +
+
+ + + + + + +
+
+
+

+ + pallet_charge_evm_transaction:: +
+ Config::EvmSponsorshipHandler +
+
+

+
+
+
+
+ + pallet_charge_evm_tr... + +
+
+ + + + + + +
+
+
+ pallet_charge_evm_transaction:: +
+ BridgeSponsorshipHandler +
+
+
+
+ + pallet_charge_evm_transaction::... + +
+
+ + + + + + +
+
+
+ pallet_sponsoring:: +
+ ChargeTransactionPayment +
+
+
+
+
+ + pallet_sponsoring::... + +
+
+ + + + + +
+
+
+ tx.others +
+
+
+
+ + tx.others + +
+
+ + + + + +
+
+
+
+ tx.evm.call +
+
+
+
+
+ + tx.evm.call + +
+
+ + + + +
+
+
+
+ pallet_sponsoring:: +
+ Config::SponsorshipHandler +
+
+
+
+
+ + pallet_sponsoring::... + +
+
+ + + + +
+
+
+ SponsorshipHandler<CrossAccountId, (H160, Vec<u8>), CallContext> +
+
+
+
+ + SponsorshipHandler<CrossAccountId, (H160, Vec<u8>), CallContext> + +
+
+ + + + +
+
+
+ SponsorshipHandler<AccountId, Call, ()> +
+
+
+
+ + SponsorshipHandler<AccountId, Call, ()> + +
+
+
+ + + + + Viewer does not support full SVG 1.1 + + + +
diff --git a/doc/sponsoring.md b/doc/sponsoring.md new file mode 100644 index 0000000000..9910d19f48 --- /dev/null +++ b/doc/sponsoring.md @@ -0,0 +1,13 @@ +# Sponsoring + +![Sponsoring flow](./sponsoring-flow.drawio.svg) + +## Implementation + +If you need to add sponsoring for pallet call, you should implement `SponsorshipHandler`, see `UniqueSponsorshipHandler` for example. + +If you need to add sponsoring for EVM contract call, you should implement `SponsorshipHandler), CallContext>`, see `UniqueEthSponsorshipHandler` for example. + +## EVM bridging + +In case if Ethereum call is being called using substrate `evm.call` extrinsic, `BridgeSponsorshipHandler` is used to convert between two different `SponsorshipHandler` types diff --git a/docker-compose-tests-parachain.yml b/docker-compose-tests-parachain.yml deleted file mode 100644 index ad91a80b5c..0000000000 --- a/docker-compose-tests-parachain.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: "3.5" - -services: - blockchain_nodes: - build: - context: ./ - dockerfile: Dockerfile-parachain - args: - - RUST_TOOLCHAIN=${RUST_TOOLCHAIN:?err} - - RUST_C=${RUST_C:?err} - - POLKA_VERSION=${POLKA_VERSION:?err} - - UNIQUE_BRANCH=${UNIQUE_BRANCH:?err} - volumes: - - ./launch-config.json:/polkadot-launch/launch-config.json - env_file: - - ./.env - - integration_tests: - build: - context: tests/ - dockerfile: Dockerfile-tests - environment: - RPC_URL: http://blockchain_nodes:9933/ - volumes: - - ./tests/src/config_docker.ts:/unique_parachain/src/config.ts - - /home/ubuntu/mochawesome-report:/unique_parachain/mochawesome-report - depends_on: - - blockchain_nodes diff --git a/migrations.md b/migrations.md new file mode 100644 index 0000000000..70aeeb7fd2 --- /dev/null +++ b/migrations.md @@ -0,0 +1,35 @@ +# **930032 < 924010** + +### **pallet-common:** + +* Removed the previous migration of: + * if the storage version is below 1, all collections from storage **CollectionById** of struct **Collection** version 1 to version 2, consisting of: + * displacing _offchain_schema, variable_on_chain_schema, const_on_chain_schema, schema_version_ into _properties_ + * displacing _acccess, mint_mode_ into _permissions.access, permissions.mint_mode_ + * adding _external_collection_ flag +* Added unconditional bump of the storage version to 1 +* Replaced returned weight `0` with `Weight::zero()` + +### **pallet-nonfungible:** + +* Removed the previous migration of: + * if the storage version is below 1, all items from storage **TokenData** of struct **ItemData** version 1 to version 2, consisting of: + * displacing _const_data, variable_data_ into _properties_ + * adding permission for the collection admin to set the immutable __old_constData_ property +* Added unconditional bump of the storage version to 1 +* Replaced returned weight `0` with `Weight::zero()` + +### **pallet-refungible:** + +* Removed the previous migration of: + * if the storage version is below 1, all items from storage **TokenData** of struct **ItemData** version 1 to version 2, consisting of: + * removing _variable_data_ +* Added: + * if the storage version is below 2, cleaning of all storage of now-redundant **TokenData** + * unconditional bump of the storage version to 2 +* Replaced returned weight `0` with `Weight::zero()` + +### **pallet-unique:** + +* Removed the previous migration of: + * unconditional cleaning of all storage of **VariableMetaDataBasket** (cache for sponosoring setting deprecated variable metadata) diff --git a/node/cli/CHANGELOG.md b/node/cli/CHANGELOG.md new file mode 100644 index 0000000000..b2c7754574 --- /dev/null +++ b/node/cli/CHANGELOG.md @@ -0,0 +1,21 @@ + + +## [v0.9.27] 2022-09-08 + +### Added +- Support RPC for `AppPromotion` pallet. + +## [v0.9.27] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Use export-genesis state from cumulus e1980179e647db8b299cca32cdc9e2b3bf5e51b2 + +We had our implementation for some reason, however it is now broken, and +I see no reason to keep it, as upstream implements exact same options + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/node/cli/Cargo.toml b/node/cli/Cargo.toml index 5b860175f2..3c32b00684 100644 --- a/node/cli/Cargo.toml +++ b/node/cli/Cargo.toml @@ -3,7 +3,7 @@ [build-dependencies.substrate-build-script-utils] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" ################################################################################ # Substrate Dependecies @@ -16,158 +16,158 @@ version = '3.1.2' [dependencies.frame-benchmarking] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-benchmarking-cli] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.try-runtime-cli] -git = 'https://github.com/paritytech/substrate' -branch = 'polkadot-v0.9.24' +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment-rpc] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.substrate-prometheus-endpoint] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-basic-authorship] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-chain-spec] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-cli] features = ['wasmtime'] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-client-api] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-consensus] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-consensus-aura] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-executor] features = ['wasmtime'] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-finality-grandpa] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-keystore] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-rpc] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-rpc-api] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-service] features = ['wasmtime'] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-telemetry] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-transaction-pool] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-tracing] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-sysinfo] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-block-builder] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-api] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-blockchain] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-consensus] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-consensus-aura] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-finality-grandpa] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-inherents] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-keystore] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-offchain] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-runtime] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-session] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-timestamp] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-transaction-pool] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-trie] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.substrate-frame-rpc-system] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sc-network] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.serde] features = ['derive'] @@ -178,84 +178,83 @@ version = '1.0.68' [dependencies.sc-consensus-manual-seal] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" ################################################################################ # Cumulus dependencies [dependencies.cumulus-client-consensus-aura] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-client-consensus-common] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-client-collator] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-client-cli] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-client-network] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-primitives-core] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-primitives-parachain-inherent] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-client-service] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-relay-chain-interface] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-relay-chain-inprocess-interface] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-relay-chain-rpc-interface] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" ################################################################################ # Polkadot dependencies [dependencies.polkadot-primitives] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" [dependencies.polkadot-service] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" [dependencies.polkadot-cli] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" [dependencies.polkadot-test-service] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" [dependencies.polkadot-parachain] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" ################################################################################ # Local dependencies -[dependencies.unique-runtime-common] -default-features = false -path = "../../runtime/common" +[dependencies.up-common] +path = "../../primitives/common" [dependencies.unique-runtime] path = '../../runtime/unique' @@ -267,6 +266,7 @@ optional = true [dependencies.opal-runtime] path = '../../runtime/opal' +optional = true [dependencies.up-data-structs] path = "../../primitives/data-structs" @@ -277,7 +277,7 @@ path = "../../primitives/rpc" [dependencies.pallet-transaction-payment-rpc-runtime-api] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" ################################################################################ # Package @@ -291,7 +291,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'unique-node' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.9.24' +version = "0.9.30" [[bin]] name = 'unique-collator' @@ -302,30 +302,36 @@ targets = ['x86_64-unknown-linux-gnu'] [dependencies] futures = '0.3.17' -log = '0.4.14' -flexi_logger = "0.15.7" -parking_lot = '0.11.2' +log = '0.4.16' +flexi_logger = "0.22.5" +parking_lot = '0.12.1' clap = "3.1.2" -jsonrpsee = { version = "0.13.0", features = ["server", "macros"] } -tokio = { version = "1.17.0", features = ["time"] } +jsonrpsee = { version = "0.15.1", features = ["server", "macros"] } +tokio = { version = "1.19.2", features = ["time"] } -fc-rpc-core = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-consensus = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-mapping-sync = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-rpc = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-db = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-rpc = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +fc-rpc-core = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-consensus = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-mapping-sync = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-rpc = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-db = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-rpc = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } unique-rpc = { default-features = false, path = "../rpc" } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc", default-features = false } rmrk-rpc = { path = "../../primitives/rmrk-rpc" } [features] -default = ['opal-runtime/opal-runtime', 'quartz-runtime?/quartz-runtime'] +default = ["opal-runtime"] runtime-benchmarks = [ 'unique-runtime?/runtime-benchmarks', 'quartz-runtime?/runtime-benchmarks', 'opal-runtime/runtime-benchmarks', 'polkadot-service/runtime-benchmarks', + 'sc-service/runtime-benchmarks', +] +try-runtime = [ + 'unique-runtime?/try-runtime', + 'quartz-runtime?/try-runtime', + 'opal-runtime?/try-runtime', ] -try-runtime = [] diff --git a/node/cli/src/chain_spec.rs b/node/cli/src/chain_spec.rs index a4bb925fe2..834f7c8b3d 100644 --- a/node/cli/src/chain_spec.rs +++ b/node/cli/src/chain_spec.rs @@ -23,7 +23,7 @@ use std::collections::BTreeMap; use serde::{Deserialize, Serialize}; use serde_json::map::Map; -use unique_runtime_common::types::*; +use up_common::types::opaque::*; #[cfg(feature = "unique-runtime")] pub use unique_runtime as default_runtime; @@ -65,6 +65,14 @@ pub enum RuntimeId { Unknown(String), } +#[cfg(not(feature = "unique-runtime"))] +/// PARA_ID for Opal/Quartz +const PARA_ID: u32 = 2095; + +#[cfg(feature = "unique-runtime")] +/// PARA_ID for Unique +const PARA_ID: u32 = 2037; + pub trait RuntimeIdentification { fn runtime_id(&self) -> RuntimeId; } @@ -115,7 +123,7 @@ pub fn get_from_seed(seed: &str) -> ::Pu .public() } -/// The extensions for the [`ChainSpec`]. +/// The extensions for the [`DefaultChainSpec`]. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension)] #[serde(deny_unknown_fields)] pub struct Extensions { @@ -167,6 +175,7 @@ macro_rules! testnet_genesis { .collect(), }, treasury: Default::default(), + tokens: TokensConfig { balances: vec![] }, sudo: SudoConfig { key: Some($root_key), }, @@ -235,7 +244,7 @@ pub fn development_config() -> DefaultChainSpec { get_account_id_from_seed::("Eve//stash"), get_account_id_from_seed::("Ferdie//stash"), ], - 1000 + PARA_ID ) }, // Bootnodes @@ -250,7 +259,7 @@ pub fn development_config() -> DefaultChainSpec { // Extensions Extensions { relay_chain: "rococo-dev".into(), - para_id: 1000, + para_id: PARA_ID, }, ) } @@ -303,7 +312,7 @@ pub fn local_testnet_config() -> DefaultChainSpec { get_account_id_from_seed::("Eve//stash"), get_account_id_from_seed::("Ferdie//stash"), ], - 1000 + PARA_ID ) }, // Bootnodes @@ -318,7 +327,7 @@ pub fn local_testnet_config() -> DefaultChainSpec { // Extensions Extensions { relay_chain: "westend-local".into(), - para_id: 1000, + para_id: PARA_ID, }, ) } diff --git a/node/cli/src/cli.rs b/node/cli/src/cli.rs index e1080c1e7c..62bab20472 100644 --- a/node/cli/src/cli.rs +++ b/node/cli/src/cli.rs @@ -24,12 +24,10 @@ const NODE_NAME_ENV: &str = "UNIQUE_NODE_NAME"; #[derive(Debug, Parser)] pub enum Subcommand { /// Export the genesis state of the parachain. - #[clap(name = "export-genesis-state")] - ExportGenesisState(ExportGenesisStateCommand), + ExportGenesisState(cumulus_client_cli::ExportGenesisStateCommand), /// Export the genesis wasm of the parachain. - #[clap(name = "export-genesis-wasm")] - ExportGenesisWasm(ExportGenesisWasmCommand), + ExportGenesisWasm(cumulus_client_cli::ExportGenesisWasmCommand), /// Build a chain specification. BuildSpec(sc_cli::BuildSpecCmd), @@ -54,50 +52,13 @@ pub enum Subcommand { /// The custom benchmark subcommmand benchmarking runtime pallets. #[clap(subcommand)] + #[cfg(feature = "runtime-benchmarks")] Benchmark(frame_benchmarking_cli::BenchmarkCmd), /// Try runtime TryRuntime(try_runtime_cli::TryRuntimeCmd), } -/// Command for exporting the genesis state of the parachain -#[derive(Debug, Parser)] -pub struct ExportGenesisStateCommand { - /// Output file name or stdout if unspecified. - #[clap(parse(from_os_str))] - pub output: Option, - - /// Id of the parachain this state is for. - /// - /// Default: 100 - #[clap(long, conflicts_with = "chain")] - pub parachain_id: Option, - - /// Write output in binary. Default is to write in hex. - #[clap(short, long)] - pub raw: bool, - - /// The name of the chain for that the genesis state should be exported. - #[clap(long, conflicts_with = "parachain-id")] - pub chain: Option, -} - -/// Command for exporting the genesis wasm file. -#[derive(Debug, Parser)] -pub struct ExportGenesisWasmCommand { - /// Output file name or stdout if unspecified. - #[clap(parse(from_os_str))] - pub output: Option, - - /// Write output in binary. Default is to write in hex. - #[clap(short, long)] - pub raw: bool, - - /// The name of the chain for that the genesis wasm file should be exported. - #[clap(long)] - pub chain: Option, -} - #[derive(Debug, Parser)] #[clap(args_conflicts_with_subcommands = true, subcommand_negates_reqs = true)] pub struct Cli { diff --git a/node/cli/src/command.rs b/node/cli/src/command.rs index 3b494ef4e0..211e6b6b75 100644 --- a/node/cli/src/command.rs +++ b/node/cli/src/command.rs @@ -50,7 +50,7 @@ use crate::service::{OpalRuntimeExecutor, DefaultRuntimeExecutor}; use codec::Encode; use cumulus_primitives_core::ParaId; -use cumulus_client_service::genesis::generate_genesis_block; +use cumulus_client_cli::generate_genesis_block; use std::{future::Future, pin::Pin}; use log::info; use sc_cli::{ @@ -62,9 +62,9 @@ use sc_service::{ }; use sp_core::hexdisplay::HexDisplay; use sp_runtime::traits::{AccountIdConversion, Block as BlockT}; -use std::{io::Write, net::SocketAddr, time::Duration}; +use std::{net::SocketAddr, time::Duration}; -use unique_runtime_common::types::Block; +use up_common::types::opaque::Block; macro_rules! no_runtime_err { ($chain_name:expr) => { @@ -191,16 +191,6 @@ impl SubstrateCli for RelayChainCli { } } -#[allow(clippy::borrowed_box)] -fn extract_genesis_wasm(chain_spec: &Box) -> Result> { - let mut storage = chain_spec.build_storage()?; - - storage - .top - .remove(sp_core::storage::well_known_keys::CODE) - .ok_or_else(|| "Could not find wasm file in genesis state!".into()) -} - macro_rules! async_run_with_runtime { ( $runtime_api:path, $executor:path, @@ -248,6 +238,45 @@ macro_rules! construct_async_run { }} } +macro_rules! sync_run_with_runtime { + ( + $runtime_api:path, $executor:path, + $runner:ident, $components:ident, $cli:ident, $cmd:ident, $config:ident, + $( $code:tt )* + ) => { + $runner.sync_run(|$config| { + $( $code )* + }) + }; +} + +macro_rules! construct_sync_run { + (|$components:ident, $cli:ident, $cmd:ident, $config:ident| $( $code:tt )* ) => {{ + let runner = $cli.create_runner($cmd)?; + + match runner.config().chain_spec.runtime_id() { + #[cfg(feature = "unique-runtime")] + RuntimeId::Unique => sync_run_with_runtime!( + unique_runtime::RuntimeApi, UniqueRuntimeExecutor, + runner, $components, $cli, $cmd, $config, $( $code )* + ), + + #[cfg(feature = "quartz-runtime")] + RuntimeId::Quartz => sync_run_with_runtime!( + quartz_runtime::RuntimeApi, QuartzRuntimeExecutor, + runner, $components, $cli, $cmd, $config, $( $code )* + ), + + RuntimeId::Opal => sync_run_with_runtime!( + opal_runtime::RuntimeApi, OpalRuntimeExecutor, + runner, $components, $cli, $cmd, $config, $( $code )* + ), + + RuntimeId::Unknown(chain) => Err(no_runtime_err!(chain).into()) + } + }} +} + macro_rules! start_node_using_chain_runtime { ($start_node_fn:ident($config:expr $(, $($args:expr),+)?) $($code:tt)*) => { match $config.chain_spec.runtime_id() { @@ -329,63 +358,27 @@ pub fn run() -> Result<()> { Some(Subcommand::Revert(cmd)) => construct_async_run!(|components, cli, cmd, config| { Ok(cmd.run(components.client, components.backend, None)) }), - Some(Subcommand::ExportGenesisState(params)) => { - let mut builder = sc_cli::LoggerBuilder::new(""); - builder.with_profiling(sc_tracing::TracingReceiver::Log, ""); - let _ = builder.init(); - - let spec = load_spec(¶ms.chain.clone().unwrap_or_default())?; - let state_version = Cli::native_runtime_version(&spec).state_version(); - let block: Block = generate_genesis_block(&spec, state_version)?; - let raw_header = block.header().encode(); - let output_buf = if params.raw { - raw_header - } else { - format!("0x{:?}", HexDisplay::from(&block.header().encode())).into_bytes() - }; - - if let Some(output) = ¶ms.output { - std::fs::write(output, output_buf)?; - } else { - std::io::stdout().write_all(&output_buf)?; - } - - Ok(()) + Some(Subcommand::ExportGenesisState(cmd)) => { + construct_sync_run!(|components, cli, cmd, _config| { + let spec = cli.load_spec(&cmd.shared_params.chain.clone().unwrap_or_default())?; + let state_version = Cli::native_runtime_version(&spec).state_version(); + cmd.run::(&*spec, state_version) + }) } - Some(Subcommand::ExportGenesisWasm(params)) => { - let mut builder = sc_cli::LoggerBuilder::new(""); - builder.with_profiling(sc_tracing::TracingReceiver::Log, ""); - let _ = builder.init(); - - let raw_wasm_blob = - extract_genesis_wasm(&cli.load_spec(¶ms.chain.clone().unwrap_or_default())?)?; - let output_buf = if params.raw { - raw_wasm_blob - } else { - format!("0x{:?}", HexDisplay::from(&raw_wasm_blob)).into_bytes() - }; - - if let Some(output) = ¶ms.output { - std::fs::write(output, output_buf)?; - } else { - std::io::stdout().write_all(&output_buf)?; - } - - Ok(()) + Some(Subcommand::ExportGenesisWasm(cmd)) => { + construct_sync_run!(|components, cli, cmd, _config| { + let spec = cli.load_spec(&cmd.shared_params.chain.clone().unwrap_or_default())?; + cmd.run(&*spec) + }) } + #[cfg(feature = "runtime-benchmarks")] Some(Subcommand::Benchmark(cmd)) => { use frame_benchmarking_cli::{BenchmarkCmd, SUBSTRATE_REFERENCE_HARDWARE}; let runner = cli.create_runner(cmd)?; // Switch on the concrete benchmark sub-command- match cmd { BenchmarkCmd::Pallet(cmd) => { - if cfg!(feature = "runtime-benchmarks") { - runner.sync_run(|config| cmd.run::(config)) - } else { - Err("Benchmarking wasn't enabled when building the node. \ - You can enable it with `--features runtime-benchmarks`." - .into()) - } + runner.sync_run(|config| cmd.run::(config)) } BenchmarkCmd::Block(cmd) => runner.sync_run(|config| { let partials = new_partial::< @@ -409,7 +402,9 @@ pub fn run() -> Result<()> { BenchmarkCmd::Machine(cmd) => { runner.sync_run(|config| cmd.run(&config, SUBSTRATE_REFERENCE_HARDWARE.clone())) } - BenchmarkCmd::Overhead(_) => Err("Unsupported benchmarking command".into()), + BenchmarkCmd::Overhead(_) | BenchmarkCmd::Extrinsic(_) => { + Err("Unsupported benchmarking command".into()) + } } } Some(Subcommand::TryRuntime(cmd)) => { @@ -500,7 +495,7 @@ pub fn run() -> Result<()> { let state_version = RelayChainCli::native_runtime_version(&config.chain_spec).state_version(); - let block: Block = generate_genesis_block(&config.chain_spec, state_version) + let block: Block = generate_genesis_block(&*config.chain_spec, state_version) .map_err(|e| format!("{:?}", e))?; let genesis_state = format!("0x{:?}", HexDisplay::from(&block.header().encode())); let genesis_hash = format!("0x{:?}", HexDisplay::from(&block.header().hash().0)); @@ -574,7 +569,7 @@ impl CliConfiguration for RelayChainCli { fn base_path(&self) -> Result> { Ok(self .shared_params() - .base_path() + .base_path()? .or_else(|| self.base_path.clone().map(Into::into))) } @@ -624,12 +619,8 @@ impl CliConfiguration for RelayChainCli { self.base.base.role(is_dev) } - fn transaction_pool(&self) -> Result { - self.base.base.transaction_pool() - } - - fn state_cache_child_ratio(&self) -> Result> { - self.base.base.state_cache_child_ratio() + fn transaction_pool(&self, is_dev: bool) -> Result { + self.base.base.transaction_pool(is_dev) } fn rpc_methods(&self) -> Result { diff --git a/node/cli/src/service.rs b/node/cli/src/service.rs index 708e749fe5..6972adcd1d 100644 --- a/node/cli/src/service.rs +++ b/node/cli/src/service.rs @@ -43,14 +43,14 @@ use cumulus_client_network::BlockAnnounceValidator; use cumulus_primitives_core::ParaId; use cumulus_relay_chain_inprocess_interface::build_inprocess_relay_chain; use cumulus_relay_chain_interface::{RelayChainError, RelayChainInterface, RelayChainResult}; -use cumulus_relay_chain_rpc_interface::RelayChainRPCInterface; +use cumulus_relay_chain_rpc_interface::{RelayChainRpcInterface, create_client_and_start_worker}; // Substrate Imports use sc_client_api::ExecutorProvider; use sc_executor::NativeElseWasmExecutor; use sc_executor::NativeExecutionDispatch; -use sc_network::NetworkService; -use sc_service::{BasePath, Configuration, PartialComponents, Role, TaskManager}; +use sc_network::{NetworkService, NetworkBlock}; +use sc_service::{BasePath, Configuration, PartialComponents, TaskManager}; use sc_telemetry::{Telemetry, TelemetryHandle, TelemetryWorker, TelemetryWorkerHandle}; use sp_keystore::SyncCryptoStorePtr; use sp_runtime::traits::BlakeTwo256; @@ -63,7 +63,9 @@ use polkadot_service::CollatorPair; use fc_rpc_core::types::FilterPool; use fc_mapping_sync::{MappingSyncWorker, SyncStrategy}; -use unique_runtime_common::types::{AuraId, RuntimeInstance, AccountId, Balance, Index, Hash, Block}; +use up_common::types::opaque::{ + AuraId, RuntimeInstance, AccountId, Balance, Index, Hash, Block, BlockNumber, +}; // RMRK use up_data_structs::{ @@ -318,10 +320,14 @@ async fn build_relay_chain_interface( Option, )> { match collator_options.relay_chain_rpc_url { - Some(relay_chain_url) => Ok(( - Arc::new(RelayChainRPCInterface::new(relay_chain_url).await?) as Arc<_>, - None, - )), + Some(relay_chain_url) => { + let rpc_client = create_client_and_start_worker(relay_chain_url, task_manager).await?; + + Ok(( + Arc::new(RelayChainRpcInterface::new(rpc_client)) as Arc<_>, + None, + )) + } None => build_inprocess_relay_chain( polkadot_config, parachain_config, @@ -362,6 +368,7 @@ where + pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi + sp_api::ApiExt> + up_rpc::UniqueApi + + app_promotion_rpc::AppPromotionApi + rmrk_rpc::RmrkApi< Block, AccountId, @@ -398,10 +405,6 @@ where bool, ) -> Result>, sc_service::Error>, { - if matches!(parachain_config.role, Role::Light) { - return Err("Light client not supported!".into()); - } - let parachain_config = prepare_node_config(parachain_config); let params = @@ -435,7 +438,7 @@ where let transaction_pool = params.transaction_pool.clone(); let import_queue = cumulus_client_service::SharedImportQueue::new(params.import_queue); - let (network, system_rpc_tx, start_network) = + let (network, system_rpc_tx, tx_handler_controller, start_network) = sc_service::build_network(sc_service::BuildNetworkParams { config: ¶chain_config, client: client.clone(), @@ -518,6 +521,7 @@ where network: network.clone(), system_rpc_tx, telemetry: telemetry.as_mut(), + tx_handler_controller, })?; if let Some(hwbench) = hwbench { @@ -535,7 +539,9 @@ where let announce_block = { let network = network.clone(); - Arc::new(move |hash, data| network.announce_block(hash, data)) + Arc::new(Box::new(move |hash, data| { + network.announce_block(hash, data) + })) }; let relay_chain_slot_duration = Duration::from_secs(6); @@ -620,7 +626,6 @@ where _, _, _, - _, >(cumulus_client_consensus_aura::ImportQueueParams { block_import: client.clone(), client: client.clone(), @@ -633,10 +638,9 @@ where slot_duration, ); - Ok((time, slot)) + Ok((slot, time)) }, registry: config.prometheus_registry(), - can_author_with: sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone()), spawner: &task_manager.spawn_essential_handle(), telemetry, }) @@ -667,6 +671,7 @@ where + pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi + sp_api::ApiExt> + up_rpc::UniqueApi + + app_promotion_rpc::AppPromotionApi + rmrk_rpc::RmrkApi< Block, AccountId, @@ -743,7 +748,7 @@ where "Failed to create parachain inherent", ) })?; - Ok((time, slot, parachain_inherent)) + Ok((slot, time, parachain_inherent)) } }, block_import: client.clone(), @@ -811,6 +816,7 @@ where + pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi + sp_api::ApiExt> + up_rpc::UniqueApi + + app_promotion_rpc::AppPromotionApi + rmrk_rpc::RmrkApi< Block, AccountId, @@ -856,7 +862,7 @@ where prometheus_registry.clone(), )); - let (network, system_rpc_tx, network_starter) = + let (network, system_rpc_tx, tx_handler_controller, network_starter) = sc_service::build_network(sc_service::BuildNetworkParams { config: &config, client: client.clone(), @@ -947,12 +953,14 @@ where current_para_block, relay_offset: 1000, relay_blocks_per_para_block: 2, + para_blocks_per_relay_epoch: 0, xcm_config: cumulus_primitives_parachain_inherent::MockXcmConfig::new( &*client_for_xcm, block, Default::default(), Default::default(), ), + relay_randomness_config: (), raw_downward_messages: vec![], raw_horizontal_messages: vec![], }; @@ -1029,6 +1037,7 @@ where system_rpc_tx, config, telemetry: None, + tx_handler_controller, })?; network_starter.start_network(); diff --git a/node/rpc/CHANGELOG.md b/node/rpc/CHANGELOG.md new file mode 100644 index 0000000000..3bdb41378c --- /dev/null +++ b/node/rpc/CHANGELOG.md @@ -0,0 +1,15 @@ + +## [v0.1.2] 2022-09-08 + +### Added +- Support RPC for `AppPromotion` pallet. + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/node/rpc/Cargo.toml b/node/rpc/Cargo.toml index f567c6b070..1bfa18bf3e 100644 --- a/node/rpc/Cargo.toml +++ b/node/rpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "unique-rpc" -version = "0.1.0" +version = "0.1.2" authors = ['Unique Network '] license = 'GPLv3' edition = "2021" @@ -11,48 +11,49 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] futures = { version = "0.3.17", features = ["compat"] } -jsonrpsee = { version = "0.13.0", features = ["server", "macros"] } +jsonrpsee = { version = "0.15.1", features = ["server", "macros"] } # pallet-contracts-rpc = { git = 'https://github.com/paritytech/substrate', branch = 'master' } -pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-client-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-consensus-aura = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-consensus-epochs = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-finality-grandpa = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-finality-grandpa-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-keystore = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-network = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-rpc-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-service = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-block-builder = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-blockchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-consensus = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-consensus-aura = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-offchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-storage = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-session = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sc-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -substrate-frame-rpc-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -tokio = { version = "0.2.25", features = ["macros", "sync"] } +pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-client-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-consensus-aura = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-consensus-epochs = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-finality-grandpa = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-finality-grandpa-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-keystore = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-network = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-rpc-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-service = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-block-builder = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-blockchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-consensus = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-consensus-aura = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-offchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-storage = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-session = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sc-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +substrate-frame-rpc-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +tokio = { version = "1.19.2", features = ["macros", "sync"] } -pallet-ethereum = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-storage = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-rpc-core = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-db = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fc-mapping-sync = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-ethereum = { git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-storage = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-rpc-core = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-db = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fc-mapping-sync = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-common = { default-features = false, path = "../../pallets/common" } -unique-runtime-common = { default-features = false, path = "../../runtime/common" } +up-common = { path = "../../primitives/common" } pallet-unique = { path = "../../pallets/unique" } uc-rpc = { path = "../../client/rpc" } up-rpc = { path = "../../primitives/rpc" } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc"} rmrk-rpc = { path = "../../primitives/rmrk-rpc" } up-data-structs = { default-features = false, path = "../../primitives/data-structs" } diff --git a/node/rpc/src/lib.rs b/node/rpc/src/lib.rs index 50ec266198..3c3b46df6f 100644 --- a/node/rpc/src/lib.rs +++ b/node/rpc/src/lib.rs @@ -40,9 +40,8 @@ use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata}; use sc_service::TransactionPool; use std::{collections::BTreeMap, sync::Arc}; -use unique_runtime_common::types::{ - Hash, AccountId, RuntimeInstance, Index, Block, BlockNumber, Balance, -}; +use up_common::types::opaque::{Hash, AccountId, RuntimeInstance, Index, Block, BlockNumber, Balance}; + // RMRK use up_data_structs::{ RmrkCollectionInfo, RmrkInstanceInfo, RmrkResourceInfo, RmrkPropertyInfo, RmrkBaseInfo, @@ -146,6 +145,12 @@ where C::Api: fp_rpc::EthereumRuntimeRPCApi, C::Api: fp_rpc::ConvertTransactionRuntimeApi, C::Api: up_rpc::UniqueApi::CrossAccountId, AccountId>, + C::Api: app_promotion_rpc::AppPromotionApi< + Block, + BlockNumber, + ::CrossAccountId, + AccountId, + >, C::Api: rmrk_rpc::RmrkApi< Block, AccountId, @@ -171,6 +176,9 @@ where }; use uc_rpc::{UniqueApiServer, Unique}; + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + use uc_rpc::{AppPromotionApiServer, AppPromotion}; + #[cfg(not(feature = "unique-runtime"))] use uc_rpc::{RmrkApiServer, Rmrk}; @@ -228,6 +236,9 @@ where io.merge(Unique::new(client.clone()).into_rpc())?; + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + io.merge(AppPromotion::new(client.clone()).into_rpc())?; + #[cfg(not(feature = "unique-runtime"))] io.merge(Rmrk::new(client.clone()).into_rpc())?; diff --git a/pallets/app-promotion/Cargo.toml b/pallets/app-promotion/Cargo.toml new file mode 100644 index 0000000000..6e42ebc522 --- /dev/null +++ b/pallets/app-promotion/Cargo.toml @@ -0,0 +1,75 @@ +################################################################################ +# Package + +[package] +authors = ['Unique Network '] +description = 'Unique App Promotion Pallet' +edition = '2021' +homepage = 'https://unique.network' +license = 'GPLv3' +name = 'pallet-app-promotion' +repository = 'https://github.com/UniqueNetwork/unique-chain' +version = '0.1.0' + +[package.metadata.docs.rs] +targets = ['x86_64-unknown-linux-gnu'] + +[features] +default = ['std'] +runtime-benchmarks = [ + 'frame-benchmarking', + 'frame-support/runtime-benchmarks', + 'frame-system/runtime-benchmarks', + # 'pallet-unique/runtime-benchmarks', +] +std = [ + 'codec/std', + 'frame-benchmarking/std', + 'frame-support/std', + 'frame-system/std', + 'pallet-balances/std', + 'pallet-timestamp/std', + 'pallet-randomness-collective-flip/std', + 'pallet-evm/std', + 'sp-io/std', + 'sp-std/std', + 'sp-runtime/std', + 'sp-core/std', + 'serde/std', + +] +try-runtime = ["frame-support/try-runtime"] + +[dependencies] +scale-info = { version = "2.0.1", default-features = false, features = [ + "derive", +] } +################################################################################ +# Substrate Dependencies + +codec = { default-features = false, features = ['derive'], package = 'parity-scale-codec', version = '3.1.2' } +frame-benchmarking = {default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-balances ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-timestamp ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-randomness-collective-flip ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm ={ default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +sp-std ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io ={ default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +serde = { default-features = false, features = ['derive'], version = '1.0.130' } + +################################################################################ +# local dependencies + +up-data-structs ={ default-features = false, path = "../../primitives/data-structs" } +pallet-common ={ default-features = false, path = "../common" } +pallet-unique ={ default-features = false, path = "../unique" } +pallet-evm-contract-helpers ={ default-features = false, path = "../evm-contract-helpers" } +pallet-evm-migration ={ default-features = false, path = "../evm-migration" } + +# [dev-dependencies] + +################################################################################ diff --git a/pallets/app-promotion/src/benchmarking.rs b/pallets/app-promotion/src/benchmarking.rs new file mode 100644 index 0000000000..b9e9c7f0d8 --- /dev/null +++ b/pallets/app-promotion/src/benchmarking.rs @@ -0,0 +1,162 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#![cfg(feature = "runtime-benchmarks")] + +use super::*; +use crate::Pallet as PromototionPallet; + +use sp_runtime::traits::Bounded; + +use frame_benchmarking::{benchmarks, account}; +use frame_support::traits::OnInitialize; +use frame_system::RawOrigin; +use pallet_unique::benchmarking::create_nft_collection; +use pallet_evm_migration::Pallet as EvmMigrationPallet; + +const SEED: u32 = 0; + +fn set_admin() -> Result +where + T: Config + pallet_unique::Config + pallet_evm_migration::Config, + T::BlockNumber: From + Into, + <::Currency as Currency>::Balance: Sum + From, +{ + let pallet_admin = account::("admin", 0, SEED); + + ::Currency::make_free_balance_be( + &pallet_admin, + Perbill::from_rational(1u32, 2) * BalanceOf::::max_value(), + ); + + PromototionPallet::::set_admin_address( + RawOrigin::Root.into(), + T::CrossAccountId::from_sub(pallet_admin.clone()), + )?; + + Ok(pallet_admin) +} + +benchmarks! { + where_clause{ + where T: Config + pallet_unique::Config + pallet_evm_migration::Config , + T::BlockNumber: From + Into, + <::Currency as Currency>::Balance: Sum + From + } + + on_initialize { + let b in 0..PENDING_LIMIT_PER_BLOCK; + set_admin::()?; + + (0..b).try_for_each(|index| { + let staker = account::("staker", index, SEED); + ::Currency::make_free_balance_be(&staker, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + PromototionPallet::::stake(RawOrigin::Signed(staker.clone()).into(), Into::>::into(100u128) * T::Nominal::get())?; + PromototionPallet::::unstake(RawOrigin::Signed(staker.clone()).into()).map_err(|e| e.error)?; + Result::<(), sp_runtime::DispatchError>::Ok(()) + })?; + let block_number = >::current_block_number() + T::PendingInterval::get(); + }: {PromototionPallet::::on_initialize(block_number)} + + set_admin_address { + let pallet_admin = account::("admin", 0, SEED); + let _ = ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + } : _(RawOrigin::Root, T::CrossAccountId::from_sub(pallet_admin)) + + payout_stakers{ + let b in 1..101; + + let pallet_admin = account::("admin", 1, SEED); + let share = Perbill::from_rational(1u32, 20); + PromototionPallet::::set_admin_address(RawOrigin::Root.into(), T::CrossAccountId::from_sub(pallet_admin.clone()))?; + ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + + let staker: T::AccountId = account("caller", 0, SEED); + ::Currency::make_free_balance_be(&staker, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let stakers: Vec = (0..b).map(|index| account("staker", index, SEED)).collect(); + stakers.iter().for_each(|staker| { + ::Currency::make_free_balance_be(&staker, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + }); + (0..10).try_for_each(|_| { + stakers.iter() + .map(|staker| { + PromototionPallet::::stake(RawOrigin::Signed(staker.clone()).into(), Into::>::into(100u128) * T::Nominal::get()) + }).collect::, _>>()?; + >::finalize(); + Result::<(), sp_runtime::DispatchError>::Ok(()) + })?; + } : _(RawOrigin::Signed(pallet_admin.clone()), Some(b as u8)) + + stake { + let caller = account::("caller", 0, SEED); + let share = Perbill::from_rational(1u32, 10); + let _ = ::Currency::make_free_balance_be(&caller, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + } : _(RawOrigin::Signed(caller.clone()), share * ::Currency::total_balance(&caller)) + + unstake { + let caller = account::("caller", 0, SEED); + let share = Perbill::from_rational(1u32, 20); + let _ = ::Currency::make_free_balance_be(&caller, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + (0..10).map(|_| { + // used to change block number + >::finalize(); + PromototionPallet::::stake(RawOrigin::Signed(caller.clone()).into(), share * ::Currency::total_balance(&caller)) + }).collect::, _>>()?; + + } : _(RawOrigin::Signed(caller.clone())) + + sponsor_collection { + let pallet_admin = account::("admin", 0, SEED); + PromototionPallet::::set_admin_address(RawOrigin::Root.into(), T::CrossAccountId::from_sub(pallet_admin.clone()))?; + let _ = ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let caller: T::AccountId = account("caller", 0, SEED); + let _ = ::Currency::make_free_balance_be(&caller, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let collection = create_nft_collection::(caller.clone())?; + } : _(RawOrigin::Signed(pallet_admin.clone()), collection) + + stop_sponsoring_collection { + let pallet_admin = account::("admin", 0, SEED); + PromototionPallet::::set_admin_address(RawOrigin::Root.into(), T::CrossAccountId::from_sub(pallet_admin.clone()))?; + let _ = ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let caller: T::AccountId = account("caller", 0, SEED); + let _ = ::Currency::make_free_balance_be(&caller, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let collection = create_nft_collection::(caller.clone())?; + PromototionPallet::::sponsor_collection(RawOrigin::Signed(pallet_admin.clone()).into(), collection)?; + } : _(RawOrigin::Signed(pallet_admin.clone()), collection) + + sponsor_contract { + let pallet_admin = account::("admin", 0, SEED); + PromototionPallet::::set_admin_address(RawOrigin::Root.into(), T::CrossAccountId::from_sub(pallet_admin.clone()))?; + + let _ = ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let address = H160::from_low_u64_be(SEED as u64); + let data: Vec = (0..20 as u8).collect(); + >::begin(RawOrigin::Root.into(), address)?; + >::finish(RawOrigin::Root.into(), address, data)?; + } : _(RawOrigin::Signed(pallet_admin.clone()), address) + + stop_sponsoring_contract { + let pallet_admin = account::("admin", 0, SEED); + PromototionPallet::::set_admin_address(RawOrigin::Root.into(), T::CrossAccountId::from_sub(pallet_admin.clone()))?; + + let _ = ::Currency::make_free_balance_be(&pallet_admin, Perbill::from_rational(1u32, 2) * BalanceOf::::max_value()); + let address = H160::from_low_u64_be(SEED as u64); + let data: Vec = (0..20 as u8).collect(); + >::begin(RawOrigin::Root.into(), address)?; + >::finish(RawOrigin::Root.into(), address, data)?; + PromototionPallet::::sponsor_contract(RawOrigin::Signed(pallet_admin.clone()).into(), address)?; + } : _(RawOrigin::Signed(pallet_admin.clone()), address) +} diff --git a/pallets/app-promotion/src/lib.rs b/pallets/app-promotion/src/lib.rs new file mode 100644 index 0000000000..d87ad33e0b --- /dev/null +++ b/pallets/app-promotion/src/lib.rs @@ -0,0 +1,872 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! # App Promotion pallet +//! +//! The pallet implements the mechanics of staking and sponsoring collections/contracts. +//! +//! - [`Config`] +//! - [`Pallet`] +//! - [`Error`] +//! - [`Event`] +//! +//! ## Overview +//! The App Promotion pallet allows fund holders to stake at a certain daily rate of return. +//! The mechanics implemented in the pallet allow it to act as a sponsor for collections / contracts, +//! the list of which is set by the pallet administrator. +//! +//! +//! ## Interface +//! The pallet provides interfaces for funds, collection/contract operations (see [types] module). + +//! +//! ### Dispatchable Functions +//! - [`set_admin_address`][`Pallet::set_admin_address`] - sets an address as the the admin. +//! - [`stake`][`Pallet::stake`] - stakes the amount of native tokens. +//! - [`unstake`][`Pallet::unstake`] - unstakes all stakes. +//! - [`sponsor_collection`][`Pallet::sponsor_collection`] - sets the pallet to be the sponsor for the collection. +//! - [`stop_sponsoring_collection`][`Pallet::stop_sponsoring_collection`] - removes the pallet as the sponsor for the collection. +//! - [`sponsor_contract`][`Pallet::sponsor_contract`] - sets the pallet to be the sponsor for the contract. +//! - [`stop_sponsoring_contract`][`Pallet::stop_sponsoring_contract`] - removes the pallet as the sponsor for the contract. +//! - [`payout_stakers`][`Pallet::payout_stakers`] - recalculates interest for the specified number of stakers. +//! + +// #![recursion_limit = "1024"] +#![cfg_attr(not(feature = "std"), no_std)] + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +pub mod types; +pub mod weights; + +use sp_std::{ + vec::{Vec}, + vec, + iter::Sum, + borrow::ToOwned, + cell::RefCell, +}; +use sp_core::H160; +use codec::EncodeLike; +use pallet_balances::BalanceLock; +pub use types::*; + +use up_data_structs::CollectionId; + +use frame_support::{ + dispatch::{DispatchResult}, + traits::{ + Currency, Get, LockableCurrency, WithdrawReasons, tokens::Balance, ExistenceRequirement, + }, + ensure, +}; + +use weights::WeightInfo; + +pub use pallet::*; +use pallet_evm::account::CrossAccountId; +use sp_runtime::{ + Perbill, + traits::{BlockNumberProvider, CheckedAdd, CheckedSub, AccountIdConversion, Zero}, + ArithmeticError, +}; + +pub const LOCK_IDENTIFIER: [u8; 8] = *b"appstake"; + +const PENDING_LIMIT_PER_BLOCK: u32 = 3; + +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; + +#[frame_support::pallet] +pub mod pallet { + use super::*; + use frame_support::{ + Blake2_128Concat, Twox64Concat, pallet_prelude::*, storage::Key, PalletId, + traits::ReservableCurrency, + }; + use frame_system::pallet_prelude::*; + + #[pallet::config] + pub trait Config: frame_system::Config + pallet_evm::account::Config { + /// Type to interact with the native token + type Currency: ExtendedLockableCurrency + + ReservableCurrency; + + /// Type for interacting with collections + type CollectionHandler: CollectionHandler< + AccountId = Self::AccountId, + CollectionId = CollectionId, + >; + + /// Type for interacting with conrtacts + type ContractHandler: ContractHandler; + + /// `AccountId` for treasury + type TreasuryAccountId: Get; + + /// The app's pallet id, used for deriving its sovereign account address. + #[pallet::constant] + type PalletId: Get; + + /// In relay blocks. + #[pallet::constant] + type RecalculationInterval: Get; + + /// In parachain blocks. + #[pallet::constant] + type PendingInterval: Get; + + /// Rate of return for interval in blocks defined in `RecalculationInterval`. + #[pallet::constant] + type IntervalIncome: Get; + + /// Decimals for the `Currency`. + #[pallet::constant] + type Nominal: Get>; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; + + // The relay block number provider + type RelayBlockNumberProvider: BlockNumberProvider; + + /// Events compatible with [`frame_system::Config::Event`]. + type RuntimeEvent: IsType<::RuntimeEvent> + From>; + } + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); + + #[pallet::event] + #[pallet::generate_deposit(fn deposit_event)] + pub enum Event { + /// Staking recalculation was performed + /// + /// # Arguments + /// * AccountId: account of the staker. + /// * Balance : recalculation base + /// * Balance : total income + StakingRecalculation( + /// An recalculated staker + T::AccountId, + /// Base on which interest is calculated + BalanceOf, + /// Amount of accrued interest + BalanceOf, + ), + + /// Staking was performed + /// + /// # Arguments + /// * AccountId: account of the staker + /// * Balance : staking amount + Stake(T::AccountId, BalanceOf), + + /// Unstaking was performed + /// + /// # Arguments + /// * AccountId: account of the staker + /// * Balance : unstaking amount + Unstake(T::AccountId, BalanceOf), + + /// The admin was set + /// + /// # Arguments + /// * AccountId: account address of the admin + SetAdmin(T::AccountId), + } + + #[pallet::error] + pub enum Error { + /// Error due to action requiring admin to be set. + AdminNotSet, + /// No permission to perform an action. + NoPermission, + /// Insufficient funds to perform an action. + NotSufficientFunds, + /// Occurs when a pending unstake cannot be added in this block. PENDING_LIMIT_PER_BLOCK` limits exceeded. + PendingForBlockOverflow, + /// The error is due to the fact that the collection/contract must already be sponsored in order to perform the action. + SponsorNotSet, + /// Errors caused by incorrect actions with a locked balance. + IncorrectLockedBalanceOperation, + } + + /// Stores the total staked amount. + #[pallet::storage] + pub type TotalStaked = StorageValue, QueryKind = ValueQuery>; + + /// Stores the `admin` account. Some extrinsics can only be executed if they were signed by `admin`. + #[pallet::storage] + pub type Admin = StorageValue; + + /// Stores the amount of tokens staked by account in the blocknumber. + /// + /// * **Key1** - Staker account. + /// * **Key2** - Relay block number when the stake was made. + /// * **(Balance, BlockNumber)** - Balance of the stake. + /// The number of the relay block in which we must perform the interest recalculation + #[pallet::storage] + pub type Staked = StorageNMap< + Key = ( + Key, + Key, + ), + Value = (BalanceOf, T::BlockNumber), + QueryKind = ValueQuery, + >; + + /// Stores amount of stakes for an `Account`. + /// + /// * **Key** - Staker account. + /// * **Value** - Amount of stakes. + #[pallet::storage] + pub type StakesPerAccount = + StorageMap<_, Blake2_128Concat, T::AccountId, u8, ValueQuery>; + + /// Stores amount of stakes for an `Account`. + /// + /// * **Key** - Staker account. + /// * **Value** - Amount of stakes. + #[pallet::storage] + pub type PendingUnstake = StorageMap< + _, + Twox64Concat, + T::BlockNumber, + BoundedVec<(T::AccountId, BalanceOf), ConstU32>, + ValueQuery, + >; + + /// Stores a key for record for which the next revenue recalculation would be performed. + /// If `None`, then recalculation has not yet been performed or calculations have been completed for all stakers. + #[pallet::storage] + #[pallet::getter(fn get_next_calculated_record)] + pub type NextCalculatedRecord = + StorageValue; + + #[pallet::hooks] + impl Hooks> for Pallet { + /// Block overflow is impossible due to the fact that the unstake algorithm in on_initialize + /// implies the execution of a strictly limited number of relatively lightweight operations. + /// A separate benchmark has been implemented to scale the weight depending on the number of pendings. + fn on_initialize(current_block_number: T::BlockNumber) -> Weight + where + ::BlockNumber: From, + { + let block_pending = PendingUnstake::::take(current_block_number); + let counter = block_pending.len() as u32; + + if !block_pending.is_empty() { + block_pending.into_iter().for_each(|(staker, amount)| { + >::unreserve(&staker, amount); + }); + } + + T::WeightInfo::on_initialize(counter) + } + } + + #[pallet::call] + impl Pallet + where + T::BlockNumber: From + Into, + <::Currency as Currency>::Balance: Sum + From, + { + /// Sets an address as the the admin. + /// + /// # Permissions + /// + /// * Sudo + /// + /// # Arguments + /// + /// * `admin`: account of the new admin. + #[pallet::weight(T::WeightInfo::set_admin_address())] + pub fn set_admin_address(origin: OriginFor, admin: T::CrossAccountId) -> DispatchResult { + ensure_root(origin)?; + + >::set(Some(admin.as_sub().to_owned())); + + Self::deposit_event(Event::SetAdmin(admin.as_sub().to_owned())); + + Ok(()) + } + + /// Stakes the amount of native tokens. + /// Sets `amount` to the locked state. + /// The maximum number of stakes for a staker is 10. + /// + /// # Arguments + /// + /// * `amount`: in native tokens. + #[pallet::weight(T::WeightInfo::stake())] + pub fn stake(staker: OriginFor, amount: BalanceOf) -> DispatchResult { + let staker_id = ensure_signed(staker)?; + + ensure!( + StakesPerAccount::::get(&staker_id) < 10, + Error::::NoPermission + ); + + ensure!( + amount >= >::from(100u128) * T::Nominal::get(), + ArithmeticError::Underflow + ); + + let balance = + <::Currency as Currency>::free_balance(&staker_id); + + // checks that we can lock `amount` on the `staker` account. + <::Currency as Currency>::ensure_can_withdraw( + &staker_id, + amount, + WithdrawReasons::all(), + balance + .checked_sub(&amount) + .ok_or(ArithmeticError::Underflow)?, + )?; + + Self::add_lock_balance(&staker_id, amount)?; + + let block_number = T::RelayBlockNumberProvider::current_block_number(); + + // Calculation of the number of recalculation periods, + // after how much the first interest calculation should be performed for the stake + let recalculate_after_interval: T::BlockNumber = + if block_number % T::RecalculationInterval::get() == 0u32.into() { + 1u32.into() + } else { + 2u32.into() + }; + + // Сalculation of the number of the relay block + // in which it is necessary to accrue remuneration for the stake. + let recalc_block = (block_number / T::RecalculationInterval::get() + + recalculate_after_interval) + * T::RecalculationInterval::get(); + + >::insert((&staker_id, block_number), { + let mut balance_and_recalc_block = >::get((&staker_id, block_number)); + balance_and_recalc_block.0 = balance_and_recalc_block + .0 + .checked_add(&amount) + .ok_or(ArithmeticError::Overflow)?; + balance_and_recalc_block.1 = recalc_block; + balance_and_recalc_block + }); + + >::set( + >::get() + .checked_add(&amount) + .ok_or(ArithmeticError::Overflow)?, + ); + + StakesPerAccount::::mutate(&staker_id, |stakes| *stakes += 1); + + Self::deposit_event(Event::Stake(staker_id, amount)); + + Ok(()) + } + + /// Unstakes all stakes. + /// Moves the sum of all stakes to the `reserved` state. + /// After the end of `PendingInterval` this sum becomes completely + /// free for further use. + #[pallet::weight(T::WeightInfo::unstake())] + pub fn unstake(staker: OriginFor) -> DispatchResultWithPostInfo { + let staker_id = ensure_signed(staker)?; + + // calculate block number where the sum would be free + let block = >::block_number() + T::PendingInterval::get(); + + let mut pendings = >::get(block); + + // checks that we can do unreserve stakes in the block + ensure!(!pendings.is_full(), Error::::PendingForBlockOverflow); + + let mut total_stakes = 0u64; + + let total_staked: BalanceOf = Staked::::drain_prefix((&staker_id,)) + .map(|(_, (amount, _))| { + total_stakes += 1; + amount + }) + .sum(); + + if total_staked.is_zero() { + return Ok(None::.into()); // TO-DO + } + + pendings + .try_push((staker_id.clone(), total_staked)) + .map_err(|_| Error::::PendingForBlockOverflow)?; + + >::insert(block, pendings); + + Self::unlock_balance(&staker_id, total_staked)?; + + >::reserve(&staker_id, total_staked)?; + + TotalStaked::::set( + TotalStaked::::get() + .checked_sub(&total_staked) + .ok_or(ArithmeticError::Underflow)?, + ); + + StakesPerAccount::::remove(&staker_id); + + Self::deposit_event(Event::Unstake(staker_id, total_staked)); + + Ok(None::.into()) + } + + /// Sets the pallet to be the sponsor for the collection. + /// + /// # Permissions + /// + /// * Pallet admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the collection that will be sponsored by `pallet_id` + #[pallet::weight(T::WeightInfo::sponsor_collection())] + pub fn sponsor_collection( + admin: OriginFor, + collection_id: CollectionId, + ) -> DispatchResult { + let admin_id = ensure_signed(admin)?; + ensure!( + admin_id == Admin::::get().ok_or(Error::::AdminNotSet)?, + Error::::NoPermission + ); + + T::CollectionHandler::set_sponsor(Self::account_id(), collection_id) + } + + /// Removes the pallet as the sponsor for the collection. + /// Returns [`NoPermission`][`Error::NoPermission`] + /// if the pallet wasn't the sponsor. + /// + /// # Permissions + /// + /// * Pallet admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the collection that is sponsored by `pallet_id` + #[pallet::weight(T::WeightInfo::stop_sponsoring_collection())] + pub fn stop_sponsoring_collection( + admin: OriginFor, + collection_id: CollectionId, + ) -> DispatchResult { + let admin_id = ensure_signed(admin)?; + + ensure!( + admin_id == Admin::::get().ok_or(Error::::AdminNotSet)?, + Error::::NoPermission + ); + + ensure!( + T::CollectionHandler::sponsor(collection_id)?.ok_or(>::SponsorNotSet)? + == Self::account_id(), + >::NoPermission + ); + T::CollectionHandler::remove_collection_sponsor(collection_id) + } + + /// Sets the pallet to be the sponsor for the contract. + /// + /// # Permissions + /// + /// * Pallet admin + /// + /// # Arguments + /// + /// * `contract_id`: the contract address that will be sponsored by `pallet_id` + #[pallet::weight(T::WeightInfo::sponsor_contract())] + pub fn sponsor_contract(admin: OriginFor, contract_id: H160) -> DispatchResult { + let admin_id = ensure_signed(admin)?; + + ensure!( + admin_id == Admin::::get().ok_or(Error::::AdminNotSet)?, + Error::::NoPermission + ); + + T::ContractHandler::set_sponsor( + T::CrossAccountId::from_sub(Self::account_id()), + contract_id, + ) + } + + /// Removes the pallet as the sponsor for the contract. + /// Returns [`NoPermission`][`Error::NoPermission`] + /// if the pallet wasn't the sponsor. + /// + /// # Permissions + /// + /// * Pallet admin + /// + /// # Arguments + /// + /// * `contract_id`: the contract address that is sponsored by `pallet_id` + #[pallet::weight(T::WeightInfo::stop_sponsoring_contract())] + pub fn stop_sponsoring_contract(admin: OriginFor, contract_id: H160) -> DispatchResult { + let admin_id = ensure_signed(admin)?; + + ensure!( + admin_id == Admin::::get().ok_or(Error::::AdminNotSet)?, + Error::::NoPermission + ); + + ensure!( + T::ContractHandler::sponsor(contract_id)? + .ok_or(>::SponsorNotSet)? + .as_sub() == &Self::account_id(), + >::NoPermission + ); + T::ContractHandler::remove_contract_sponsor(contract_id) + } + + /// Recalculates interest for the specified number of stakers. + /// If all stakers are not recalculated, the next call of the extrinsic + /// will continue the recalculation, from those stakers for whom this + /// was not perform in last call. + /// + /// # Permissions + /// + /// * Pallet admin + /// + /// # Arguments + /// + /// * `stakers_number`: the number of stakers for which recalculation will be performed + #[pallet::weight(T::WeightInfo::payout_stakers(stakers_number.unwrap_or(20) as u32))] + pub fn payout_stakers(admin: OriginFor, stakers_number: Option) -> DispatchResult { + let admin_id = ensure_signed(admin)?; + + ensure!( + admin_id == Admin::::get().ok_or(Error::::AdminNotSet)?, + Error::::NoPermission + ); + + // calculate the number of the current recalculation block, + // this is necessary in order to understand which stakers we should calculate interest + let current_recalc_block = + Self::get_current_recalc_block(T::RelayBlockNumberProvider::current_block_number()); + + // calculate the number of the next recalculation block, + // this value is set for the stakers to whom the recalculation will be performed + let next_recalc_block = current_recalc_block + T::RecalculationInterval::get(); + + let mut storage_iterator = Self::get_next_calculated_key() + .map_or(Staked::::iter(), |key| Staked::::iter_from(key)); + + NextCalculatedRecord::::set(None); + + { + let mut stakers_number = stakers_number.unwrap_or(20); + let last_id = RefCell::new(None); + let income_acc = RefCell::new(BalanceOf::::default()); + let amount_acc = RefCell::new(BalanceOf::::default()); + + // this closure is used to perform some of the actions if we break the loop because we reached the number of stakers for recalculation, + // but there were unrecalculated records in the storage. + let flush_stake = || -> DispatchResult { + if let Some(last_id) = &*last_id.borrow() { + if !income_acc.borrow().is_zero() { + >::transfer( + &T::TreasuryAccountId::get(), + last_id, + *income_acc.borrow(), + ExistenceRequirement::KeepAlive, + )?; + + Self::add_lock_balance(last_id, *income_acc.borrow())?; + >::try_mutate(|staked| -> DispatchResult { + *staked = staked + .checked_add(&*income_acc.borrow()) + .ok_or(ArithmeticError::Overflow)?; + Ok(()) + })?; + + Self::deposit_event(Event::StakingRecalculation( + last_id.clone(), + *amount_acc.borrow(), + *income_acc.borrow(), + )); + } + + *income_acc.borrow_mut() = BalanceOf::::default(); + *amount_acc.borrow_mut() = BalanceOf::::default(); + } + Ok(()) + }; + + while let Some(( + (current_id, staked_block), + (amount, next_recalc_block_for_stake), + )) = storage_iterator.next() + { + if stakers_number == 0 { + NextCalculatedRecord::::set(Some((current_id, staked_block))); + break; + } + if last_id.borrow().as_ref() != Some(¤t_id) { + flush_stake()?; + *last_id.borrow_mut() = Some(current_id.clone()); + stakers_number -= 1; + }; + if current_recalc_block >= next_recalc_block_for_stake { + *amount_acc.borrow_mut() += amount; + Self::recalculate_and_insert_stake( + ¤t_id, + staked_block, + next_recalc_block, + amount, + ((current_recalc_block - next_recalc_block_for_stake) + / T::RecalculationInterval::get()) + .into() + 1, + &mut *income_acc.borrow_mut(), + ); + } + } + flush_stake()?; + } + + Ok(()) + } + } +} + +impl Pallet { + /// The account address of the app promotion pot. + /// + /// This actually does computation. If you need to keep using it, then make sure you cache the + /// value and only call this once. + pub fn account_id() -> T::AccountId { + T::PalletId::get().into_account_truncating() + } + + /// Unlocks the balance that was locked by the pallet. + /// + /// - `staker`: staker account. + /// - `amount`: amount of unlocked funds. + fn unlock_balance(staker: &T::AccountId, amount: BalanceOf) -> DispatchResult { + let locked_balance = Self::get_locked_balance(staker) + .map(|l| l.amount) + .ok_or(>::IncorrectLockedBalanceOperation)?; + + // It is understood that we cannot unlock more funds than were locked by staking. + // Therefore, if implemented correctly, this error should not occur. + Self::set_lock_unchecked( + staker, + locked_balance + .checked_sub(&amount) + .ok_or(ArithmeticError::Underflow)?, + ); + Ok(()) + } + + /// Adds the balance to locked by the pallet. + /// + /// - `staker`: staker account. + /// - `amount`: amount of added locked funds. + fn add_lock_balance(staker: &T::AccountId, amount: BalanceOf) -> DispatchResult { + Self::get_locked_balance(staker) + .map_or(>::default(), |l| l.amount) + .checked_add(&amount) + .map(|new_lock| Self::set_lock_unchecked(staker, new_lock)) + .ok_or(ArithmeticError::Overflow.into()) + } + + /// Sets the new state of a balance locked by the pallet. + /// + /// - `staker`: staker account. + /// - `amount`: amount of locked funds. + fn set_lock_unchecked(staker: &T::AccountId, amount: BalanceOf) { + if amount.is_zero() { + >::remove_lock(LOCK_IDENTIFIER, &staker); + } else { + >::set_lock( + LOCK_IDENTIFIER, + staker, + amount, + WithdrawReasons::all(), + ) + } + } + + /// Returns the balance locked by the pallet for the staker. + /// + /// - `staker`: staker account. + pub fn get_locked_balance( + staker: impl EncodeLike, + ) -> Option>> { + >::locks(staker) + .into_iter() + .find(|l| l.id == LOCK_IDENTIFIER) + } + + /// Returns the total staked balance for the staker. + /// + /// - `staker`: staker account. + pub fn total_staked_by_id(staker: impl EncodeLike) -> Option> { + let staked = Staked::::iter_prefix((staker,)) + .into_iter() + .fold(>::default(), |acc, (_, (amount, _))| { + acc + amount + }); + if staked != >::default() { + Some(staked) + } else { + None + } + } + + /// Returns all relay block numbers when stake was made, + /// the amount of the stake. + /// + /// - `staker`: staker account. + pub fn total_staked_by_id_per_block( + staker: impl EncodeLike, + ) -> Option)>> { + let mut staked = Staked::::iter_prefix((staker,)) + .into_iter() + .map(|(block, (amount, _))| (block, amount)) + .collect::>(); + staked.sort_by_key(|(block, _)| *block); + if !staked.is_empty() { + Some(staked) + } else { + None + } + } + + /// Returns the total staked balance for the staker. + /// If `staker` is `None`, returns the total amount staked. + /// - `staker`: staker account. + pub fn cross_id_total_staked(staker: Option) -> Option> { + staker.map_or(Some(>::get()), |s| { + Self::total_staked_by_id(s.as_sub()) + }) + } + + // pub fn cross_id_locked_balance(staker: T::CrossAccountId) -> BalanceOf { + // Self::get_locked_balance(staker.as_sub()) + // .map(|l| l.amount) + // .unwrap_or_default() + // } + + /// Returns all relay block numbers when stake was made, + /// the amount of the stake. + /// + /// - `staker`: staker account. + pub fn cross_id_total_staked_per_block( + staker: T::CrossAccountId, + ) -> Vec<(T::BlockNumber, BalanceOf)> { + Self::total_staked_by_id_per_block(staker.as_sub()).unwrap_or_default() + } + + fn recalculate_and_insert_stake( + staker: &T::AccountId, + staked_block: T::BlockNumber, + next_recalc_block: T::BlockNumber, + base: BalanceOf, + iters: u32, + income_acc: &mut BalanceOf, + ) { + let income = Self::calculate_income(base, iters); + + base.checked_add(&income).map(|res| { + >::insert((staker, staked_block), (res, next_recalc_block)); + *income_acc += income; + }); + } + + fn calculate_income(base: I, iters: u32) -> I + where + I: EncodeLike> + Balance, + { + let mut income = base; + + (0..iters).for_each(|_| income += T::IntervalIncome::get() * income); + + income - base + } + + fn get_current_recalc_block(current_relay_block: T::BlockNumber) -> T::BlockNumber { + (current_relay_block / T::RecalculationInterval::get()) * T::RecalculationInterval::get() + } + + fn get_next_calculated_key() -> Option> { + Self::get_next_calculated_record().map(|key| Staked::::hashed_key_for(key)) + } +} + +impl Pallet +where + <::Currency as Currency>::Balance: Sum, +{ + /// Returns the amount reserved by the pending. + /// If `staker` is `None`, returns the total pending. + /// + /// -`staker`: staker account. + /// + /// Since user funds are not transferred anywhere by staking, overflow protection is provided + /// at the level of the associated type `Balance` of `Currency` trait. In order to overflow, + /// the staker must have more funds on his account than the maximum set for `Balance` type. + pub fn cross_id_pending_unstake(staker: Option) -> BalanceOf { + staker.map_or( + PendingUnstake::::iter_values() + .flat_map(|pendings| pendings.into_iter().map(|(_, amount)| amount)) + .sum(), + |s| { + PendingUnstake::::iter_values() + .flatten() + .filter_map(|(id, amount)| { + if id == *s.as_sub() { + Some(amount) + } else { + None + } + }) + .sum() + }, + ) + } + + /// Returns all parachain block numbers when unreserve is expected, + /// the amount of the unreserved funds. + /// + /// - `staker`: staker account. + pub fn cross_id_pending_unstake_per_block( + staker: T::CrossAccountId, + ) -> Vec<(T::BlockNumber, BalanceOf)> { + let mut unsorted_res = vec![]; + PendingUnstake::::iter().for_each(|(block, pendings)| { + pendings.into_iter().for_each(|(id, amount)| { + if id == *staker.as_sub() { + unsorted_res.push((block, amount)); + }; + }) + }); + + unsorted_res.sort_by_key(|(block, _)| *block); + unsorted_res + } +} diff --git a/pallets/app-promotion/src/types.rs b/pallets/app-promotion/src/types.rs new file mode 100644 index 0000000000..37029d0988 --- /dev/null +++ b/pallets/app-promotion/src/types.rs @@ -0,0 +1,130 @@ +use codec::EncodeLike; +use frame_support::{traits::LockableCurrency, WeakBoundedVec, Parameter, dispatch::DispatchResult}; + +use pallet_balances::{BalanceLock, Config as BalancesConfig, Pallet as PalletBalances}; +use pallet_common::CollectionHandle; + +use sp_runtime::DispatchError; +use up_data_structs::{CollectionId}; +use sp_std::borrow::ToOwned; +use pallet_evm_contract_helpers::{Pallet as EvmHelpersPallet, Config as EvmHelpersConfig}; + +/// This trait was defined because `LockableCurrency` +/// has no way to know the state of the lock for an account. +pub trait ExtendedLockableCurrency: LockableCurrency { + /// Returns lock balance for an account. Allows to determine the cause of the lock. + fn locks(who: KArg) -> WeakBoundedVec, Self::MaxLocks> + where + KArg: EncodeLike; +} + +impl, I: 'static> ExtendedLockableCurrency + for PalletBalances +{ + fn locks(who: KArg) -> WeakBoundedVec, Self::MaxLocks> + where + KArg: EncodeLike, + { + Self::locks(who) + } +} +/// Trait for interacting with collections. +pub trait CollectionHandler { + type CollectionId; + type AccountId; + + /// Sets sponsor for a collection. + /// + /// - `sponsor_id`: the account of the sponsor-to-be. + /// - `collection_id`: ID of the modified collection. + fn set_sponsor( + sponsor_id: Self::AccountId, + collection_id: Self::CollectionId, + ) -> DispatchResult; + + /// Removes sponsor for a collection. + /// + /// - `collection_id`: ID of the modified collection. + fn remove_collection_sponsor(collection_id: Self::CollectionId) -> DispatchResult; + + /// Retuns the current sponsor for a collection if one is set. + /// + /// - `collection_id`: ID of the collection. + fn sponsor(collection_id: Self::CollectionId) + -> Result, DispatchError>; +} + +impl CollectionHandler for pallet_unique::Pallet { + type CollectionId = CollectionId; + + type AccountId = T::AccountId; + + fn set_sponsor( + sponsor_id: Self::AccountId, + collection_id: Self::CollectionId, + ) -> DispatchResult { + Self::force_set_sponsor(sponsor_id, collection_id) + } + + fn remove_collection_sponsor(collection_id: Self::CollectionId) -> DispatchResult { + Self::force_remove_collection_sponsor(collection_id) + } + + fn sponsor( + collection_id: Self::CollectionId, + ) -> Result, DispatchError> { + Ok(>::try_get(collection_id)? + .sponsorship + .sponsor() + .map(|acc| acc.to_owned())) + } +} +/// Trait for interacting with contracts. +pub trait ContractHandler { + type ContractId; + type AccountId; + + /// Sets sponsor for a contract. + /// + /// - `sponsor_id`: the account of the sponsor-to-be. + /// - `contract_address`: the address of the modified contract. + fn set_sponsor( + sponsor_id: Self::AccountId, + contract_address: Self::ContractId, + ) -> DispatchResult; + + /// Removes sponsor for a contract. + /// + /// - `contract_address`: the address of the modified contract. + fn remove_contract_sponsor(contract_address: Self::ContractId) -> DispatchResult; + + /// Retuns the current sponsor for a contract if one is set. + /// + /// - `contract_address`: the contract address. + fn sponsor( + contract_address: Self::ContractId, + ) -> Result, DispatchError>; +} + +impl ContractHandler for EvmHelpersPallet { + type ContractId = sp_core::H160; + + type AccountId = T::CrossAccountId; + + fn set_sponsor( + sponsor_id: Self::AccountId, + contract_address: Self::ContractId, + ) -> DispatchResult { + Self::force_set_sponsor(contract_address, &sponsor_id) + } + + fn remove_contract_sponsor(contract_address: Self::ContractId) -> DispatchResult { + Self::force_remove_sponsor(contract_address) + } + + fn sponsor( + contract_address: Self::ContractId, + ) -> Result, DispatchError> { + Ok(Self::get_sponsor(contract_address)) + } +} diff --git a/pallets/app-promotion/src/weights.rs b/pallets/app-promotion/src/weights.rs new file mode 100644 index 0000000000..78ea346ce7 --- /dev/null +++ b/pallets/app-promotion/src/weights.rs @@ -0,0 +1,209 @@ +// Template adopted from https://github.com/paritytech/substrate/blob/master/.maintain/frame-weight-template.hbs + +//! Autogenerated weights for pallet_app_promotion +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev +//! DATE: 2022-09-07, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 + +// Executed Command: +// target/release/unique-collator +// benchmark +// pallet +// --pallet +// pallet-app-promotion +// --wasm-execution +// compiled +// --extrinsic +// * +// --template +// .maintain/frame-weight-template.hbs +// --steps=50 +// --repeat=80 +// --heap-pages=4096 +// --output=./pallets/app-promotion/src/weights.rs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] +#![allow(missing_docs)] +#![allow(clippy::unnecessary_cast)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use sp_std::marker::PhantomData; + +/// Weight functions needed for pallet_app_promotion. +pub trait WeightInfo { + fn on_initialize(b: u32, ) -> Weight; + fn set_admin_address() -> Weight; + fn payout_stakers(b: u32, ) -> Weight; + fn stake() -> Weight; + fn unstake() -> Weight; + fn sponsor_collection() -> Weight; + fn stop_sponsoring_collection() -> Weight; + fn sponsor_contract() -> Weight; + fn stop_sponsoring_contract() -> Weight; +} + +/// Weights for pallet_app_promotion using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + // Storage: AppPromotion PendingUnstake (r:1 w:0) + // Storage: System Account (r:1 w:1) + fn on_initialize(b: u32, ) -> Weight { + Weight::from_ref_time(2_651_000) + // Standard Error: 103_000 + .saturating_add(Weight::from_ref_time(6_024_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(b as u64))) + } + // Storage: AppPromotion Admin (r:0 w:1) + fn set_admin_address() -> Weight { + Weight::from_ref_time(7_117_000) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: ParachainSystem ValidationData (r:1 w:0) + // Storage: AppPromotion NextCalculatedRecord (r:1 w:1) + // Storage: AppPromotion Staked (r:2 w:0) + fn payout_stakers(b: u32, ) -> Weight { + Weight::from_ref_time(9_958_000) + // Standard Error: 8_000 + .saturating_add(Weight::from_ref_time(4_406_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion StakesPerAccount (r:1 w:1) + // Storage: System Account (r:1 w:1) + // Storage: Balances Locks (r:1 w:1) + // Storage: ParachainSystem ValidationData (r:1 w:0) + // Storage: AppPromotion Staked (r:1 w:1) + // Storage: AppPromotion TotalStaked (r:1 w:1) + fn stake() -> Weight { + Weight::from_ref_time(20_574_000) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) + } + // Storage: AppPromotion PendingUnstake (r:1 w:1) + // Storage: AppPromotion Staked (r:2 w:1) + // Storage: Balances Locks (r:1 w:1) + // Storage: System Account (r:1 w:1) + // Storage: AppPromotion TotalStaked (r:1 w:1) + // Storage: AppPromotion StakesPerAccount (r:0 w:1) + fn unstake() -> Weight { + Weight::from_ref_time(31_703_000) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: Common CollectionById (r:1 w:1) + fn sponsor_collection() -> Weight { + Weight::from_ref_time(12_932_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: Common CollectionById (r:1 w:1) + fn stop_sponsoring_collection() -> Weight { + Weight::from_ref_time(12_453_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: EvmContractHelpers Sponsoring (r:0 w:1) + fn sponsor_contract() -> Weight { + Weight::from_ref_time(11_952_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: EvmContractHelpers Sponsoring (r:1 w:1) + fn stop_sponsoring_contract() -> Weight { + Weight::from_ref_time(12_538_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } +} + +// For backwards compatibility and tests +impl WeightInfo for () { + // Storage: AppPromotion PendingUnstake (r:1 w:0) + // Storage: System Account (r:1 w:1) + fn on_initialize(b: u32, ) -> Weight { + Weight::from_ref_time(2_651_000) + // Standard Error: 103_000 + .saturating_add(Weight::from_ref_time(6_024_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(b as u64))) + } + // Storage: AppPromotion Admin (r:0 w:1) + fn set_admin_address() -> Weight { + Weight::from_ref_time(7_117_000) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: ParachainSystem ValidationData (r:1 w:0) + // Storage: AppPromotion NextCalculatedRecord (r:1 w:1) + // Storage: AppPromotion Staked (r:2 w:0) + fn payout_stakers(b: u32, ) -> Weight { + Weight::from_ref_time(9_958_000) + // Standard Error: 8_000 + .saturating_add(Weight::from_ref_time(4_406_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion StakesPerAccount (r:1 w:1) + // Storage: System Account (r:1 w:1) + // Storage: Balances Locks (r:1 w:1) + // Storage: ParachainSystem ValidationData (r:1 w:0) + // Storage: AppPromotion Staked (r:1 w:1) + // Storage: AppPromotion TotalStaked (r:1 w:1) + fn stake() -> Weight { + Weight::from_ref_time(20_574_000) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) + } + // Storage: AppPromotion PendingUnstake (r:1 w:1) + // Storage: AppPromotion Staked (r:2 w:1) + // Storage: Balances Locks (r:1 w:1) + // Storage: System Account (r:1 w:1) + // Storage: AppPromotion TotalStaked (r:1 w:1) + // Storage: AppPromotion StakesPerAccount (r:0 w:1) + fn unstake() -> Weight { + Weight::from_ref_time(31_703_000) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: Common CollectionById (r:1 w:1) + fn sponsor_collection() -> Weight { + Weight::from_ref_time(12_932_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: Common CollectionById (r:1 w:1) + fn stop_sponsoring_collection() -> Weight { + Weight::from_ref_time(12_453_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: EvmContractHelpers Sponsoring (r:0 w:1) + fn sponsor_contract() -> Weight { + Weight::from_ref_time(11_952_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: AppPromotion Admin (r:1 w:0) + // Storage: EvmContractHelpers Sponsoring (r:1 w:1) + fn stop_sponsoring_contract() -> Weight { + Weight::from_ref_time(12_538_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } +} diff --git a/pallets/common/CHANGELOG.md b/pallets/common/CHANGELOG.md new file mode 100644 index 0000000000..a2edeaa3d5 --- /dev/null +++ b/pallets/common/CHANGELOG.md @@ -0,0 +1,63 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## [0.1.8] - 2022-08-24 + +## Added + - Eth methods for collection + + set_collection_sponsor_substrate + + has_collection_pending_sponsor + + remove_collection_sponsor + + get_collection_sponsor +- Add convert function from `uint256` to `CrossAccountId`. + +## [0.1.7] - 2022-08-19 + +### Added + + - Add convert funtion from `CrossAccountId` to eth `uint256`. + + +## [0.1.6] - 2022-08-16 + +### Added +- New Ethereum API methods: changeOwner, changeOwner(Substrate) and verifyOwnerOrAdmin(Substrate). + + +## [v0.1.5] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Switch to new prefix removal methods 26734e9567589d75cdd99e404eabf11d5a97d975 + +New methods allows to call `remove_prefix` with limit multiple times +in the same block +However, we don't use prefix removal limits, so upgrade is +straightforward + +Upstream-Change: https://github.com/paritytech/substrate/pull/11490 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.3] - 2022-07-25 +### Add +- Some static property keys and values. + +## [0.1.2] - 2022-07-20 + +### Fixed + +- Some methods in `#[solidity_interface]` for `CollectionHandle` had invalid + mutability modifiers, causing invalid stub/abi generation. + +## [0.1.1] - 2022-07-14 + +### Added + + - Implementation of RPC method `token_owners` returning 10 owners in no particular order. + This was an internal request to improve the web interface and support fractionalization event. diff --git a/pallets/common/Cargo.toml b/pallets/common/Cargo.toml index 28f0e5774a..645e45bb75 100644 --- a/pallets/common/Cargo.toml +++ b/pallets/common/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-common" -version = "0.1.0" +version = "0.1.8" license = "GPLv3" edition = "2021" @@ -11,18 +11,18 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } pallet-evm-coder-substrate = { default-features = false, path = '../../pallets/evm-coder-substrate' } evm-coder = { default-features = false, path = '../../crates/evm-coder' } ethereum = { version = "0.12.0", default-features = false } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } serde = { version = "1.0.130", default-features = false } scale-info = { version = "2.0.1", default-features = false, features = [ @@ -41,5 +41,7 @@ std = [ "pallet-evm/std", ] runtime-benchmarks = [ - "frame-benchmarking" + "frame-benchmarking/runtime-benchmarks", + "up-data-structs/runtime-benchmarks", ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/common/src/benchmarking.rs b/pallets/common/src/benchmarking.rs index a07b3a9a69..6dd8994fae 100644 --- a/pallets/common/src/benchmarking.rs +++ b/pallets/common/src/benchmarking.rs @@ -19,8 +19,8 @@ use crate::{Config, CollectionHandle, Pallet}; use pallet_evm::account::CrossAccountId; use frame_benchmarking::{benchmarks, account}; use up_data_structs::{ - CollectionMode, CreateCollectionData, CollectionId, Property, PropertyKey, PropertyValue, - CollectionPermissions, NestingPermissions, MAX_COLLECTION_NAME_LENGTH, + CollectionMode, CollectionFlags, CreateCollectionData, CollectionId, Property, PropertyKey, + PropertyValue, CollectionPermissions, NestingPermissions, MAX_COLLECTION_NAME_LENGTH, MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, MAX_PROPERTIES_PER_ITEM, }; use frame_support::{ @@ -116,7 +116,9 @@ fn create_collection( create_collection_raw( owner, CollectionMode::NFT, - |owner, data| >::init_collection(owner, data, true), + |owner: T::CrossAccountId, data| { + >::init_collection(owner.clone(), owner, data, CollectionFlags::default()) + }, |h| h, ) } diff --git a/pallets/common/src/dispatch.rs b/pallets/common/src/dispatch.rs index 4a5cdbc5e6..3a75e52599 100644 --- a/pallets/common/src/dispatch.rs +++ b/pallets/common/src/dispatch.rs @@ -1,12 +1,15 @@ +//! Module with interfaces for dispatching collections. + use frame_support::{ dispatch::{ DispatchResultWithPostInfo, PostDispatchInfo, Weight, DispatchErrorWithPostInfo, DispatchResult, }, - weights::Pays, + dispatch::Pays, traits::Get, }; -use up_data_structs::{CollectionId, CreateCollectionData}; +use sp_runtime::DispatchError; +use up_data_structs::{CollectionId, CreateCollectionData, CollectionFlags}; use crate::{pallet::Config, CommonCollectionOperations, CollectionHandle}; @@ -16,11 +19,14 @@ pub fn dispatch_weight() -> Weight { // Read collection ::DbWeight::get().reads(1) // Dynamic dispatch? - + 6_000_000 + + Weight::from_ref_time(6_000_000) // submit_logs is measured as part of collection pallets } -/// Helper function to implement substrate calls for common collection methods +/// Helper function to implement substrate calls for common collection methods. +/// +/// * `collection` - The collection on which to call the method. +/// * `call` - The function in which to call the corresponding method from [`CommonCollectionOperations`]. pub fn dispatch_tx< T: Config, C: FnOnce(&dyn CommonCollectionOperations) -> DispatchResultWithPostInfo, @@ -64,15 +70,33 @@ pub fn dispatch_tx< result } +/// Interface for working with different collections through the dispatcher. pub trait CollectionDispatch { + /// Create a collection. The collection will be created according to the value of [`data.mode`](CreateCollectionData::mode). + /// + /// * `sender` - The user who will become the owner of the collection. + /// * `data` - Description of the created collection. fn create( sender: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, - ) -> DispatchResult; + flags: CollectionFlags, + ) -> Result; + + /// Delete the collection. + /// + /// * `sender` - The owner of the collection. + /// * `handle` - Collection handle. fn destroy(sender: T::CrossAccountId, handle: CollectionHandle) -> DispatchResult; + /// Get a specialized collection from the handle. + /// + /// * `handle` - Collection handle. fn dispatch(handle: CollectionHandle) -> Self; + + /// Get the collection handle for the corresponding implementation. fn into_inner(self) -> CollectionHandle; + /// Get the implementation of [`CommonCollectionOperations`]. fn as_dyn(&self) -> &dyn CommonCollectionOperations; } diff --git a/pallets/common/src/erc.rs b/pallets/common/src/erc.rs index 54f1de6100..5a5121a265 100644 --- a/pallets/common/src/erc.rs +++ b/pallets/common/src/erc.rs @@ -14,44 +14,67 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! This module contains the implementation of pallet methods for evm. + use evm_coder::{ solidity_interface, solidity, ToLog, types::*, execution::{Result, Error}, + weight, }; pub use pallet_evm::{PrecompileOutput, PrecompileResult, PrecompileHandle, account::CrossAccountId}; use pallet_evm_coder_substrate::dispatch_to_evm; use sp_std::vec::Vec; use up_data_structs::{ - Property, SponsoringRateLimit, OwnerRestrictedSet, AccessMode, CollectionPermissions, + AccessMode, CollectionMode, CollectionPermissions, OwnerRestrictedSet, Property, + SponsoringRateLimit, SponsorshipState, }; use alloc::format; -use crate::{Pallet, CollectionHandle, Config, CollectionProperties}; +use crate::{ + Pallet, CollectionHandle, Config, CollectionProperties, SelfWeightOf, + eth::{ + convert_cross_account_to_uint256, convert_uint256_to_cross_account, + convert_cross_account_to_tuple, + }, + weights::WeightInfo, +}; +/// Events for ethereum collection helper. #[derive(ToLog)] pub enum CollectionHelpersEvents { + /// The collection has been created. CollectionCreated { + /// Collection owner. #[indexed] owner: address, + + /// Collection ID. #[indexed] collection_id: address, }, } /// Does not always represent a full collection, for RFT it is either -/// collection (Implementing ERC721), or specific collection token (Implementing ERC20) +/// collection (Implementing ERC721), or specific collection token (Implementing ERC20). pub trait CommonEvmHandler { const CODE: &'static [u8]; + /// Call precompiled handle. fn call(self, handle: &mut impl PrecompileHandle) -> Option; } -#[solidity_interface(name = "Collection")] +/// @title A contract that allows you to work with collections. +#[solidity_interface(name = Collection)] impl CollectionHandle where - T::AccountId: From<[u8; 32]>, + T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]>, { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + #[weight(>::set_collection_properties(1))] fn set_collection_property( &mut self, caller: caller, @@ -68,7 +91,13 @@ where .map_err(dispatch_to_evm::) } + /// Delete collection property. + /// + /// @param key Property key. + #[weight(>::delete_collection_properties(1))] fn delete_collection_property(&mut self, caller: caller, key: string) -> Result<()> { + self.consume_store_reads_and_writes(1, 1)?; + let caller = T::CrossAccountId::from_eth(caller); let key = >::from(key) .try_into() @@ -77,7 +106,12 @@ where >::delete_collection_property(self, &caller, key).map_err(dispatch_to_evm::) } - /// Throws error if key not found + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. fn collection_property(&self, key: string) -> Result { let key = >::from(key) .try_into() @@ -89,7 +123,14 @@ where Ok(prop.to_vec()) } + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. fn set_collection_sponsor(&mut self, caller: caller, sponsor: address) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let sponsor = T::CrossAccountId::from_eth(sponsor); @@ -98,7 +139,41 @@ where save(self) } + // TODO: Temprorary off. Need refactor + // /// Set the substrate sponsor of the collection. + // /// + // /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + // /// + // /// @param sponsor Substrate address of the sponsor from whose account funds will be debited for operations with the contract. + // fn set_collection_sponsor_substrate( + // &mut self, + // caller: caller, + // sponsor: uint256, + // ) -> Result { + // self.consume_store_reads_and_writes(1, 1)?; + + // check_is_owner_or_admin(caller, self)?; + + // let sponsor = convert_uint256_to_cross_account::(sponsor); + // self.set_sponsor(sponsor.as_sub().clone()) + // .map_err(dispatch_to_evm::)?; + // save(self) + // } + + /// Whether there is a pending sponsor. + fn has_collection_pending_sponsor(&self) -> Result { + Ok(matches!( + self.collection.sponsorship, + SponsorshipState::Unconfirmed(_) + )) + } + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. fn confirm_collection_sponsorship(&mut self, caller: caller) -> Result { + self.consume_store_writes(1)?; + let caller = T::CrossAccountId::from_eth(caller); if !self .confirm_sponsorship(caller.as_sub()) @@ -109,8 +184,47 @@ where save(self) } + /// Remove collection sponsor. + fn remove_collection_sponsor(&mut self, caller: caller) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; + self.remove_sponsor().map_err(dispatch_to_evm::)?; + save(self) + } + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + fn collection_sponsor(&self) -> Result<(address, uint256)> { + let sponsor = match self.collection.sponsorship.sponsor() { + Some(sponsor) => sponsor, + None => return Ok(Default::default()), + }; + let sponsor = T::CrossAccountId::from_sub(sponsor.clone()); + let result: (address, uint256) = if sponsor.is_canonical_substrate() { + let sponsor = convert_cross_account_to_uint256::(&sponsor); + (Default::default(), sponsor) + } else { + let sponsor = *sponsor.as_eth(); + (sponsor, Default::default()) + }; + Ok(result) + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. #[solidity(rename_selector = "setCollectionLimit")] fn set_int_limit(&mut self, caller: caller, limit: string, value: uint32) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let mut limits = self.limits.clone(); @@ -145,8 +259,17 @@ where save(self) } + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. #[solidity(rename_selector = "setCollectionLimit")] fn set_bool_limit(&mut self, caller: caller, limit: string, value: bool) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let mut limits = self.limits.clone(); @@ -172,51 +295,73 @@ where save(self) } - fn contract_address(&self, _caller: caller) -> Result

{ + /// Get contract address. + fn contract_address(&self) -> Result
{ Ok(crate::eth::collection_id_to_address(self.id)) } - fn add_collection_admin_substrate(&self, caller: caller, new_admin: uint256) -> Result { - let caller = T::CrossAccountId::from_eth(caller); - let mut new_admin_arr: [u8; 32] = Default::default(); - new_admin.to_big_endian(&mut new_admin_arr); - let account_id = T::AccountId::from(new_admin_arr); - let new_admin = T::CrossAccountId::from_sub(account_id); - >::toggle_admin(self, &caller, &new_admin, true).map_err(dispatch_to_evm::)?; - Ok(()) - } + // TODO: Temprorary off. Need refactor + // /// Add collection admin by substrate address. + // /// @param newAdmin Substrate administrator address. + // fn add_collection_admin_substrate( + // &mut self, + // caller: caller, + // new_admin: uint256, + // ) -> Result { + // self.consume_store_writes(2)?; + + // let caller = T::CrossAccountId::from_eth(caller); + // let new_admin = convert_uint256_to_cross_account::(new_admin); + // >::toggle_admin(self, &caller, &new_admin, true).map_err(dispatch_to_evm::)?; + // Ok(()) + // } + + // TODO: Temprorary off. Need refactor + // /// Remove collection admin by substrate address. + // /// @param admin Substrate administrator address. + // fn remove_collection_admin_substrate( + // &mut self, + // caller: caller, + // admin: uint256, + // ) -> Result { + // self.consume_store_writes(2)?; + + // let caller = T::CrossAccountId::from_eth(caller); + // let admin = convert_uint256_to_cross_account::(admin); + // >::toggle_admin(self, &caller, &admin, false).map_err(dispatch_to_evm::)?; + // Ok(()) + // } + + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + fn add_collection_admin(&mut self, caller: caller, new_admin: address) -> Result { + self.consume_store_writes(2)?; - fn remove_collection_admin_substrate( - &self, - caller: caller, - new_admin: uint256, - ) -> Result { - let caller = T::CrossAccountId::from_eth(caller); - let mut new_admin_arr: [u8; 32] = Default::default(); - new_admin.to_big_endian(&mut new_admin_arr); - let account_id = T::AccountId::from(new_admin_arr); - let new_admin = T::CrossAccountId::from_sub(account_id); - >::toggle_admin(self, &caller, &new_admin, false) - .map_err(dispatch_to_evm::)?; - Ok(()) - } - - fn add_collection_admin(&self, caller: caller, new_admin: address) -> Result { let caller = T::CrossAccountId::from_eth(caller); let new_admin = T::CrossAccountId::from_eth(new_admin); >::toggle_admin(self, &caller, &new_admin, true).map_err(dispatch_to_evm::)?; Ok(()) } - fn remove_collection_admin(&self, caller: caller, admin: address) -> Result { + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + fn remove_collection_admin(&mut self, caller: caller, admin: address) -> Result { + self.consume_store_writes(2)?; + let caller = T::CrossAccountId::from_eth(caller); let admin = T::CrossAccountId::from_eth(admin); >::toggle_admin(self, &caller, &admin, false).map_err(dispatch_to_evm::)?; Ok(()) } + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' #[solidity(rename_selector = "setCollectionNesting")] fn set_nesting_bool(&mut self, caller: caller, enable: bool) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let mut permissions = self.collection.permissions.clone(); @@ -235,6 +380,10 @@ where save(self) } + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. #[solidity(rename_selector = "setCollectionNesting")] fn set_nesting( &mut self, @@ -242,6 +391,8 @@ where enable: bool, collections: Vec
, ) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + if collections.is_empty() { return Err("no addresses provided".into()); } @@ -258,9 +409,9 @@ where true => { let mut bv = OwnerRestrictedSet::new(); for i in collections { - bv.try_insert(crate::eth::map_eth_to_id(&i).ok_or(Error::Revert( - "Can't convert address into collection id".into(), - ))?) + bv.try_insert(crate::eth::map_eth_to_id(&i).ok_or_else(|| { + Error::Revert("Can't convert address into collection id".into()) + })?) .map_err(|_| "too many collections")?; } let mut nesting = permissions.nesting().clone(); @@ -280,7 +431,13 @@ where save(self) } + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList fn set_collection_access(&mut self, caller: caller, mode: uint8) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let permissions = CollectionPermissions { access: Some(match mode { @@ -300,21 +457,80 @@ where save(self) } - fn add_to_collection_allow_list(&self, caller: caller, user: address) -> Result { + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + fn allowed(&self, user: address) -> Result { + Ok(Pallet::::allowed( + self.id, + T::CrossAccountId::from_eth(user), + )) + } + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + fn add_to_collection_allow_list(&mut self, caller: caller, user: address) -> Result { + self.consume_store_writes(1)?; + let caller = T::CrossAccountId::from_eth(caller); let user = T::CrossAccountId::from_eth(user); >::toggle_allowlist(self, &caller, &user, true).map_err(dispatch_to_evm::)?; Ok(()) } - fn remove_from_collection_allow_list(&self, caller: caller, user: address) -> Result { + // TODO: Temprorary off. Need refactor + // /// Add substrate user to allowed list. + // /// + // /// @param user User substrate address. + // fn add_to_collection_allow_list_substrate( + // &mut self, + // caller: caller, + // user: uint256, + // ) -> Result { + // self.consume_store_writes(1)?; + + // let caller = T::CrossAccountId::from_eth(caller); + // let user = convert_uint256_to_cross_account::(user); + // Pallet::::toggle_allowlist(self, &caller, &user, true).map_err(dispatch_to_evm::)?; + // Ok(()) + // } + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + fn remove_from_collection_allow_list(&mut self, caller: caller, user: address) -> Result { + self.consume_store_writes(1)?; + let caller = T::CrossAccountId::from_eth(caller); let user = T::CrossAccountId::from_eth(user); >::toggle_allowlist(self, &caller, &user, false).map_err(dispatch_to_evm::)?; Ok(()) } + // TODO: Temprorary off. Need refactor + // /// Remove substrate user from allowed list. + // /// + // /// @param user User substrate address. + // fn remove_from_collection_allow_list_substrate( + // &mut self, + // caller: caller, + // user: uint256, + // ) -> Result { + // self.consume_store_writes(1)?; + + // let caller = T::CrossAccountId::from_eth(caller); + // let user = convert_uint256_to_cross_account::(user); + // Pallet::::toggle_allowlist(self, &caller, &user, false).map_err(dispatch_to_evm::)?; + // Ok(()) + // } + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". fn set_collection_mint_mode(&mut self, caller: caller, mode: bool) -> Result { + self.consume_store_reads_and_writes(1, 1)?; + check_is_owner_or_admin(caller, self)?; let permissions = CollectionPermissions { mint_mode: Some(mode), @@ -329,8 +545,88 @@ where save(self) } + + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + #[solidity(rename_selector = "isOwnerOrAdmin")] + fn is_owner_or_admin_eth(&self, user: address) -> Result { + let user = T::CrossAccountId::from_eth(user); + Ok(self.is_owner_or_admin(&user)) + } + + // TODO: Temprorary off. Need refactor + // /// Check that substrate account is the owner or admin of the collection + // /// + // /// @param user account to verify + // /// @return "true" if account is the owner or admin + // fn is_owner_or_admin_substrate(&self, user: uint256) -> Result { + // let user = convert_uint256_to_cross_account::(user); + // Ok(self.is_owner_or_admin(&user)) + // } + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + fn unique_collection_type(&self) -> Result { + let mode = match self.collection.mode { + CollectionMode::Fungible(_) => "Fungible", + CollectionMode::NFT => "NFT", + CollectionMode::ReFungible => "ReFungible", + }; + Ok(mode.into()) + } + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + fn collection_owner(&self) -> Result<(address, uint256)> { + Ok(convert_cross_account_to_tuple::( + &T::CrossAccountId::from_sub(self.owner.clone()), + )) + } + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + #[solidity(rename_selector = "changeCollectionOwner")] + fn set_owner(&mut self, caller: caller, new_owner: address) -> Result { + self.consume_store_writes(1)?; + + let caller = T::CrossAccountId::from_eth(caller); + let new_owner = T::CrossAccountId::from_eth(new_owner); + self.set_owner_internal(caller, new_owner) + .map_err(dispatch_to_evm::) + } + + // TODO: Temprorary off. Need refactor + // /// Changes collection owner to another substrate account + // /// + // /// @dev Owner can be changed only by current owner + // /// @param newOwner new owner substrate account + // fn set_owner_substrate(&mut self, caller: caller, new_owner: uint256) -> Result { + // self.consume_store_writes(1)?; + + // let caller = T::CrossAccountId::from_eth(caller); + // let new_owner = convert_uint256_to_cross_account::(new_owner); + // self.set_owner_internal(caller, new_owner) + // .map_err(dispatch_to_evm::) + // } + + // TODO: need implement AbiWriter for &Vec + // fn collection_admins(&self) -> Result> { + // let result = pallet_common::IsAdmin::::iter_prefix((self.id,)) + // .map(|(admin, _)| pallet_common::eth::convert_cross_account_to_tuple::(&admin)) + // .collect(); + // Ok(result) + // } } +/// ### Note +/// Do not forget to add: `self.consume_store_reads(1)?;` fn check_is_owner_or_admin( caller: caller, collection: &CollectionHandle, @@ -338,22 +634,71 @@ fn check_is_owner_or_admin( let caller = T::CrossAccountId::from_eth(caller); collection .check_is_owner_or_admin(&caller) - .map_err(pallet_evm_coder_substrate::dispatch_to_evm::)?; + .map_err(dispatch_to_evm::)?; Ok(caller) } +/// ### Note +/// Do not forget to add: `self.consume_store_writes(1)?;` fn save(collection: &CollectionHandle) -> Result { - // TODO possibly delete for the lack of transaction collection .check_is_internal() .map_err(dispatch_to_evm::)?; - >::insert(collection.id, collection.collection.clone()); + collection.save().map_err(dispatch_to_evm::)?; Ok(()) } -pub fn token_uri_key() -> up_data_structs::PropertyKey { - b"tokenURI" - .to_vec() - .try_into() - .expect("length < limit; qed") +/// Contains static property keys and values. +pub mod static_property { + use evm_coder::{ + execution::{Result, Error}, + }; + use alloc::format; + + const EXPECT_CONVERT_ERROR: &str = "length < limit"; + + /// Keys. + pub mod key { + use super::*; + + /// Key "baseURI". + pub fn base_uri() -> up_data_structs::PropertyKey { + property_key_from_bytes(b"baseURI").expect(EXPECT_CONVERT_ERROR) + } + + /// Key "url". + pub fn url() -> up_data_structs::PropertyKey { + property_key_from_bytes(b"URI").expect(EXPECT_CONVERT_ERROR) + } + + /// Key "suffix". + pub fn suffix() -> up_data_structs::PropertyKey { + property_key_from_bytes(b"URISuffix").expect(EXPECT_CONVERT_ERROR) + } + + /// Key "parentNft". + pub fn parent_nft() -> up_data_structs::PropertyKey { + property_key_from_bytes(b"parentNft").expect(EXPECT_CONVERT_ERROR) + } + } + + /// Convert `byte` to [`PropertyKey`]. + pub fn property_key_from_bytes(bytes: &[u8]) -> Result { + bytes.to_vec().try_into().map_err(|_| { + Error::Revert(format!( + "Property key is too long. Max length is {}.", + up_data_structs::PropertyKey::bound() + )) + }) + } + + /// Convert `bytes` to [`PropertyValue`]. + pub fn property_value_from_bytes(bytes: &[u8]) -> Result { + bytes.to_vec().try_into().map_err(|_| { + Error::Revert(format!( + "Property key is too long. Max length is {}.", + up_data_structs::PropertyKey::bound() + )) + }) + } } diff --git a/pallets/common/src/eth.rs b/pallets/common/src/eth.rs index 3a49fb3d74..f25c65a6f4 100644 --- a/pallets/common/src/eth.rs +++ b/pallets/common/src/eth.rs @@ -14,8 +14,12 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -use up_data_structs::CollectionId; +//! The module contains a number of functions for converting and checking ethereum identifiers. + +use evm_coder::types::{uint256, address}; +pub use pallet_evm::account::{Config, CrossAccountId}; use sp_core::H160; +use up_data_structs::CollectionId; // 0x17c4e6453Cc49AAAaEACA894e6D9683e00000001 - collection 1 // TODO: Unhardcode prefix @@ -23,6 +27,7 @@ const ETH_COLLECTION_PREFIX: [u8; 16] = [ 0x17, 0xc4, 0xe6, 0x45, 0x3c, 0xc4, 0x9a, 0xaa, 0xae, 0xac, 0xa8, 0x94, 0xe6, 0xd9, 0x68, 0x3e, ]; +/// Maps the ethereum address of the collection in substrate. pub fn map_eth_to_id(eth: &H160) -> Option { if eth[0..16] != ETH_COLLECTION_PREFIX { return None; @@ -31,6 +36,8 @@ pub fn map_eth_to_id(eth: &H160) -> Option { id_bytes.copy_from_slice(ð[16..20]); Some(CollectionId(u32::from_be_bytes(id_bytes))) } + +/// Maps the substrate collection id in ethereum. pub fn collection_id_to_address(id: CollectionId) -> H160 { let mut out = [0; 20]; out[0..16].copy_from_slice(Ð_COLLECTION_PREFIX); @@ -38,6 +45,43 @@ pub fn collection_id_to_address(id: CollectionId) -> H160 { H160(out) } +/// Check if the ethereum address is a collection. pub fn is_collection(address: &H160) -> bool { address[0..16] == ETH_COLLECTION_PREFIX } + +/// Convert `CrossAccountId` to `uint256`. +pub fn convert_cross_account_to_uint256(from: &T::CrossAccountId) -> uint256 +where + T::AccountId: AsRef<[u8; 32]>, +{ + let slice = from.as_sub().as_ref(); + uint256::from_big_endian(slice) +} + +/// Convert `uint256` to `CrossAccountId`. +pub fn convert_uint256_to_cross_account(from: uint256) -> T::CrossAccountId +where + T::AccountId: From<[u8; 32]>, +{ + let mut new_admin_arr = [0_u8; 32]; + from.to_big_endian(&mut new_admin_arr); + let account_id = T::AccountId::from(new_admin_arr); + T::CrossAccountId::from_sub(account_id) +} + +/// Convert `CrossAccountId` to `(address, uint256)`. +pub fn convert_cross_account_to_tuple( + cross_account_id: &T::CrossAccountId, +) -> (address, uint256) +where + T::AccountId: AsRef<[u8; 32]>, +{ + if cross_account_id.is_canonical_substrate() { + let sub = convert_cross_account_to_uint256::(cross_account_id); + (Default::default(), sub) + } else { + let eth = *cross_account_id.as_eth(); + (eth, Default::default()) + } +} diff --git a/pallets/common/src/lib.rs b/pallets/common/src/lib.rs index 0525ebf78d..008a057d61 100644 --- a/pallets/common/src/lib.rs +++ b/pallets/common/src/lib.rs @@ -14,8 +14,43 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Common pallet +//! +//! The Common pallet provides functionality for handling collections. +//! +//! ## Overview +//! +//! The Common pallet provides an interface for common collection operations for different collection types +//! (see [CommonCollectionOperations]), as well as a generic dispatcher for these, see [dispatch] module. +//! It also provides this functionality to EVM, see [erc] and [eth] modules. +//! +//! The Common pallet provides functions for: +//! +//! - Setting and approving collection sponsor. +//! - Get\set\delete allow list. +//! - Get\set\delete collection properties. +//! - Get\set\delete collection property permissions. +//! - Get\set\delete token property permissions. +//! - Get\set\delete collection administrators. +//! - Checking access permissions. +//! +//! ### Terminology +//! **Collection sponsor** - For the collection, you can set a sponsor, at whose expense it will +//! be possible to mint tokens. +//! +//! **Allow list** - List of users who have the right to minting tokens. +//! +//! **Collection properties** - Collection properties are simply key-value stores where various +//! metadata can be placed. +//! +//! **Permissions on token properties** - For each property in the token can be set permission +//! to change, see [`PropertyPermission`]. +//! +//! **Collection administrator** - For a collection, you can set administrators who have the right +//! to most actions on the collection. + +#![warn(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] - extern crate alloc; use core::ops::{Deref, DerefMut}; @@ -27,7 +62,7 @@ use frame_support::{ dispatch::{DispatchErrorWithPostInfo, DispatchResultWithPostInfo, Weight, PostDispatchInfo}, ensure, traits::{Imbalance, Get, Currency, WithdrawReasons, ExistenceRequirement}, - weights::Pays, + dispatch::Pays, transactional, }; use pallet_evm::GasWeightMapping; @@ -35,6 +70,8 @@ use up_data_structs::{ COLLECTION_NUMBER_LIMIT, Collection, RpcCollection, + CollectionFlags, + RpcCollectionFlags, CollectionId, CreateItemData, MAX_TOKEN_PREFIX_LENGTH, @@ -77,7 +114,6 @@ use up_data_structs::{ RmrkBoundedTheme, RmrkNftChild, CollectionPermissions, - SchemaVersion, }; pub use pallet::*; @@ -90,14 +126,24 @@ pub mod erc; pub mod eth; pub mod weights; +/// Weight info. pub type SelfWeightOf = ::WeightInfo; +/// Collection handle contains information about collection data and id. +/// Also provides functionality to count consumed gas. +/// +/// CollectionHandle is used as a generic wrapper for collections of all types. +/// It allows to perform common operations and queries on any collection type, +/// both completely general for all, as well as their respective implementations of [`CommonCollectionOperations`]. #[must_use = "Should call submit_logs or save, otherwise some data will be lost for evm side"] pub struct CollectionHandle { + /// Collection id pub id: CollectionId, collection: Collection, + /// Substrate recorder for counting consumed gas pub recorder: SubstrateRecorder, } + impl WithRecorder for CollectionHandle { fn recorder(&self) -> &SubstrateRecorder { &self.recorder @@ -106,7 +152,9 @@ impl WithRecorder for CollectionHandle { self.recorder } } + impl CollectionHandle { + /// Same as [CollectionHandle::new] but with an explicit gas limit. pub fn new_with_gas_limit(id: CollectionId, gas_limit: u64) -> Option { >::get(id).map(|collection| Self { id, @@ -115,6 +163,7 @@ impl CollectionHandle { }) } + /// Same as [CollectionHandle::new] but with an existed [`SubstrateRecorder`]. pub fn new_with_recorder(id: CollectionId, recorder: SubstrateRecorder) -> Option { >::get(id).map(|collection| Self { id, @@ -123,41 +172,72 @@ impl CollectionHandle { }) } + /// Retrives collection data from storage and creates collection handle with default parameters. + /// If collection not found return `None` pub fn new(id: CollectionId) -> Option { Self::new_with_gas_limit(id, u64::MAX) } + /// Same as [`CollectionHandle::new`] but if collection not found [CollectionNotFound](Error::CollectionNotFound) returned. pub fn try_get(id: CollectionId) -> Result { Ok(Self::new(id).ok_or(>::CollectionNotFound)?) } + /// Consume gas for reading. pub fn consume_store_reads(&self, reads: u64) -> evm_coder::execution::Result<()> { self.recorder - .consume_gas(T::GasWeightMapping::weight_to_gas( + .consume_gas(T::GasWeightMapping::weight_to_gas(Weight::from_ref_time( ::DbWeight::get() .read .saturating_mul(reads), - )) + ))) } + /// Consume gas for writing. pub fn consume_store_writes(&self, writes: u64) -> evm_coder::execution::Result<()> { self.recorder - .consume_gas(T::GasWeightMapping::weight_to_gas( + .consume_gas(T::GasWeightMapping::weight_to_gas(Weight::from_ref_time( ::DbWeight::get() .write .saturating_mul(writes), - )) + ))) + } + + /// Consume gas for reading and writing. + pub fn consume_store_reads_and_writes( + &self, + reads: u64, + writes: u64, + ) -> evm_coder::execution::Result<()> { + let weight = ::DbWeight::get(); + let reads = weight.read.saturating_mul(reads); + let writes = weight.read.saturating_mul(writes); + self.recorder + .consume_gas(T::GasWeightMapping::weight_to_gas(Weight::from_ref_time( + reads.saturating_add(writes), + ))) } - pub fn save(self) -> DispatchResult { - >::insert(self.id, self.collection); + + /// Save collection to storage. + pub fn save(&self) -> DispatchResult { + >::insert(self.id, &self.collection); Ok(()) } + /// Set collection sponsor. + /// + /// Unique collections allows sponsoring for certain actions. + /// This method allows you to set the sponsor of the collection. + /// In order for sponsorship to become active, it must be confirmed through [`Self::confirm_sponsorship`]. pub fn set_sponsor(&mut self, sponsor: T::AccountId) -> DispatchResult { self.collection.sponsorship = SponsorshipState::Unconfirmed(sponsor); Ok(()) } + /// Confirm sponsorship + /// + /// In order for the sponsorship to become active, the user set as the sponsor must confirm their participation. + /// Before confirming sponsorship, the user must be specified as the sponsor of the collection via [`Self::set_sponsor`]. pub fn confirm_sponsorship(&mut self, sender: &T::AccountId) -> Result { if self.collection.sponsorship.pending_sponsor() != Some(sender) { return Ok(false); @@ -167,10 +247,16 @@ impl CollectionHandle { Ok(true) } + /// Remove collection sponsor. + pub fn remove_sponsor(&mut self) -> DispatchResult { + self.collection.sponsorship = SponsorshipState::Disabled; + Ok(()) + } + /// Checks that the collection was created with, and must be operated upon through **Unique API**. - /// Now check only the `external_collection` flag and if it's **true**, then return `CollectionIsExternal` error. + /// Now check only the `external` flag and if it's **true**, then return [`Error::CollectionIsExternal`] error. pub fn check_is_internal(&self) -> DispatchResult { - if self.external_collection { + if self.flags.external { return Err(>::CollectionIsExternal)?; } @@ -178,9 +264,9 @@ impl CollectionHandle { } /// Checks that the collection was created with, and must be operated upon through an **assimilated API**. - /// Now check only the `external_collection` flag and if it's **false**, then return `CollectionIsInternal` error. + /// Now check only the `external` flag and if it's **false**, then return [`Error::CollectionIsInternal`] error. pub fn check_is_external(&self) -> DispatchResult { - if !self.external_collection { + if !self.flags.external { return Err(>::CollectionIsInternal)?; } @@ -203,23 +289,34 @@ impl DerefMut for CollectionHandle { } impl CollectionHandle { - pub fn check_is_owner(&self, subject: &T::CrossAccountId) -> DispatchResult { - ensure!(*subject.as_sub() == self.owner, >::NoPermission); + /// Checks if the `user` is the owner of the collection. + pub fn check_is_owner(&self, user: &T::CrossAccountId) -> DispatchResult { + ensure!(*user.as_sub() == self.owner, >::NoPermission); Ok(()) } - pub fn is_owner_or_admin(&self, subject: &T::CrossAccountId) -> bool { - *subject.as_sub() == self.owner || >::get((self.id, subject)) + + /// Returns **true** if the `user` is the owner or administrator of the collection. + pub fn is_owner_or_admin(&self, user: &T::CrossAccountId) -> bool { + *user.as_sub() == self.owner || >::get((self.id, user)) } - pub fn check_is_owner_or_admin(&self, subject: &T::CrossAccountId) -> DispatchResult { - ensure!(self.is_owner_or_admin(subject), >::NoPermission); + + /// Checks if the `user` is the owner or administrator of the collection. + pub fn check_is_owner_or_admin(&self, user: &T::CrossAccountId) -> DispatchResult { + ensure!(self.is_owner_or_admin(user), >::NoPermission); Ok(()) } + + /// Return **true** if `user` was not allowed to have tokens, and he can ignore such restrictions. pub fn ignores_allowance(&self, user: &T::CrossAccountId) -> bool { self.limits.owner_can_transfer() && self.is_owner_or_admin(user) } + + /// Return **true** if `user` does not have enough token parts, and he can ignore such restrictions. pub fn ignores_owned_amount(&self, user: &T::CrossAccountId) -> bool { self.limits.owner_can_transfer() && self.is_owner_or_admin(user) } + + /// Checks if the user is in the allow list. If not [Error::AddressNotInAllowlist] returns. pub fn check_allowlist(&self, user: &T::CrossAccountId) -> DispatchResult { ensure!( >::get((self.id, user)), @@ -227,6 +324,19 @@ impl CollectionHandle { ); Ok(()) } + + /// Changes collection owner to another account + /// #### Store read/writes + /// 1 writes + fn set_owner_internal( + &mut self, + caller: T::CrossAccountId, + new_owner: T::CrossAccountId, + ) -> DispatchResult { + self.check_is_owner(&caller)?; + self.collection.owner = new_owner.as_sub().clone(); + self.save() + } } #[frame_support::pallet] @@ -249,21 +359,34 @@ pub mod pallet { + TypeInfo + account::Config { + /// Weight information for functions of this pallet. type WeightInfo: WeightInfo; - type Event: IsType<::Event> + From>; + /// Events compatible with [`frame_system::Config::Event`]. + type RuntimeEvent: IsType<::RuntimeEvent> + From>; + + /// Handler of accounts and payment. type Currency: Currency; + /// Set price to create a collection. #[pallet::constant] type CollectionCreationPrice: Get< <::Currency as Currency>::Balance, >; + + /// Dispatcher of operations on collections. type CollectionDispatch: CollectionDispatch; + /// Account which holds the chain's treasury. type TreasuryAccountId: Get; + + /// Address under which the CollectionHelper contract would be available. type ContractAddress: Get; + /// Mapper for token addresses to Ethereum addresses. type EvmTokenAddressMapping: TokenAddressMapping; + + /// Mapper for token addresses to [`CrossAccountId`]. type CrossTokenAddressMapping: TokenAddressMapping; } @@ -276,6 +399,7 @@ pub mod pallet { #[pallet::extra_constants] impl Pallet { + /// Maximum admins per collection. pub fn collection_admins_limit() -> u32 { COLLECTION_ADMINS_LIMIT } @@ -285,94 +409,116 @@ pub mod pallet { #[pallet::generate_deposit(pub fn deposit_event)] pub enum Event { /// New collection was created - /// - /// # Arguments - /// - /// * collection_id: Globally unique identifier of newly created collection. - /// - /// * mode: [CollectionMode] converted into u8. - /// - /// * account_id: Collection owner. - CollectionCreated(CollectionId, u8, T::AccountId), + CollectionCreated( + /// Globally unique identifier of newly created collection. + CollectionId, + /// [`CollectionMode`] converted into _u8_. + u8, + /// Collection owner. + T::AccountId, + ), /// New collection was destroyed - /// - /// # Arguments - /// - /// * collection_id: Globally unique identifier of collection. - CollectionDestroyed(CollectionId), + CollectionDestroyed( + /// Globally unique identifier of collection. + CollectionId, + ), /// New item was created. - /// - /// # Arguments - /// - /// * collection_id: Id of the collection where item was created. - /// - /// * item_id: Id of an item. Unique within the collection. - /// - /// * recipient: Owner of newly created item - /// - /// * amount: Always 1 for NFT - ItemCreated(CollectionId, TokenId, T::CrossAccountId, u128), + ItemCreated( + /// Id of the collection where item was created. + CollectionId, + /// Id of an item. Unique within the collection. + TokenId, + /// Owner of newly created item + T::CrossAccountId, + /// Always 1 for NFT + u128, + ), /// Collection item was burned. - /// - /// # Arguments - /// - /// * collection_id. - /// - /// * item_id: Identifier of burned NFT. - /// - /// * owner: which user has destroyed its tokens - /// - /// * amount: Always 1 for NFT - ItemDestroyed(CollectionId, TokenId, T::CrossAccountId, u128), + ItemDestroyed( + /// Id of the collection where item was destroyed. + CollectionId, + /// Identifier of burned NFT. + TokenId, + /// Which user has destroyed its tokens. + T::CrossAccountId, + /// Amount of token pieces destroed. Always 1 for NFT. + u128, + ), /// Item was transferred - /// - /// * collection_id: Id of collection to which item is belong - /// - /// * item_id: Id of an item - /// - /// * sender: Original owner of item - /// - /// * recipient: New owner of item - /// - /// * amount: Always 1 for NFT Transfer( + /// Id of collection to which item is belong. CollectionId, + /// Id of an item. TokenId, + /// Original owner of item. T::CrossAccountId, + /// New owner of item. T::CrossAccountId, + /// Amount of token pieces transfered. Always 1 for NFT. u128, ), - /// * collection_id - /// - /// * item_id - /// - /// * sender - /// - /// * spender - /// - /// * amount + /// Amount pieces of token owned by `sender` was approved for `spender`. Approved( + /// Id of collection to which item is belong. CollectionId, + /// Id of an item. TokenId, + /// Original owner of item. T::CrossAccountId, + /// Id for which the approval was granted. T::CrossAccountId, + /// Amount of token pieces transfered. Always 1 for NFT. u128, ), - CollectionPropertySet(CollectionId, PropertyKey), + /// The colletion property has been added or edited. + CollectionPropertySet( + /// Id of collection to which property has been set. + CollectionId, + /// The property that was set. + PropertyKey, + ), - CollectionPropertyDeleted(CollectionId, PropertyKey), + /// The property has been deleted. + CollectionPropertyDeleted( + /// Id of collection to which property has been deleted. + CollectionId, + /// The property that was deleted. + PropertyKey, + ), - TokenPropertySet(CollectionId, TokenId, PropertyKey), + /// The token property has been added or edited. + TokenPropertySet( + /// Identifier of the collection whose token has the property set. + CollectionId, + /// The token for which the property was set. + TokenId, + /// The property that was set. + PropertyKey, + ), - TokenPropertyDeleted(CollectionId, TokenId, PropertyKey), + /// The token property has been deleted. + TokenPropertyDeleted( + /// Identifier of the collection whose token has the property deleted. + CollectionId, + /// The token for which the property was deleted. + TokenId, + /// The property that was deleted. + PropertyKey, + ), - PropertyPermissionSet(CollectionId, PropertyKey), + /// The token property permission of a collection has been set. + PropertyPermissionSet( + /// ID of collection to which property permission has been set. + CollectionId, + /// The property permission that was set. + PropertyKey, + ), } #[pallet::error] @@ -413,26 +559,27 @@ pub mod pallet { /// Metadata flag frozen MetadataFlagFrozen, - /// Item not exists. + /// Item does not exist TokenNotFound, - /// Item balance not enough. + /// Item is balance not enough TokenValueTooLow, - /// Requested value more than approved. + /// Requested value is more than the approved ApprovedValueTooLow, /// Tried to approve more than owned CantApproveMoreThanOwned, /// Can't transfer tokens to ethereum zero address AddressIsZero, - /// Target collection doesn't supports this operation + + /// The operation is not supported UnsupportedOperation, - /// Not sufficient funds to perform action + /// Insufficient funds to perform an action NotSufficientFounds, - /// User not passed nesting rule + /// User does not satisfy the nesting rule UserIsNotAllowedToNest, - /// Only tokens from specific collections may nest tokens under this + /// Only tokens from specific collections may nest tokens under this one SourceCollectionIsNotAllowedToNest, /// Tried to store more data than allowed in collection field @@ -447,7 +594,7 @@ pub mod pallet { /// Property key is too long PropertyKeyIsTooLong, - /// Only ASCII letters, digits, and '_', '-' are allowed + /// Only ASCII letters, digits, and symbols `_`, `-`, and `.` are allowed InvalidCharacterInPropertyKey, /// Empty property keys are forbidden @@ -460,13 +607,16 @@ pub mod pallet { CollectionIsInternal, } + /// Storage of the count of created collections. Essentially contains the last collection ID. #[pallet::storage] pub type CreatedCollectionCount = StorageValue; + + /// Storage of the count of deleted collections. #[pallet::storage] pub type DestroyedCollectionCount = StorageValue; - /// Collection info + /// Storage of collection info. #[pallet::storage] pub type CollectionById = StorageMap< Hasher = Blake2_128Concat, @@ -475,7 +625,7 @@ pub mod pallet { QueryKind = OptionQuery, >; - /// Collection properties + /// Storage of collection properties. #[pallet::storage] #[pallet::getter(fn collection_properties)] pub type CollectionProperties = StorageMap< @@ -486,6 +636,7 @@ pub mod pallet { OnEmpty = up_data_structs::CollectionProperties, >; + /// Storage of token property permissions of a collection. #[pallet::storage] #[pallet::getter(fn property_permissions)] pub type CollectionPropertyPermissions = StorageMap< @@ -495,6 +646,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Storage of the amount of collection admins. #[pallet::storage] pub type AdminAmount = StorageMap< Hasher = Blake2_128Concat, @@ -503,7 +655,7 @@ pub mod pallet { QueryKind = ValueQuery, >; - /// List of collection admins + /// List of collection admins. #[pallet::storage] pub type IsAdmin = StorageNMap< Key = ( @@ -514,7 +666,7 @@ pub mod pallet { QueryKind = ValueQuery, >; - /// Allowlisted collection users + /// Allowlisted collection users. #[pallet::storage] pub type Allowlist = StorageNMap< Key = ( @@ -525,7 +677,7 @@ pub mod pallet { QueryKind = ValueQuery, >; - /// Not used by code, exists only to provide some types to metadata + /// Not used by code, exists only to provide some types to metadata. #[pallet::storage] pub type DummyStorageValue = StorageValue< Value = ( @@ -555,13 +707,15 @@ pub mod pallet { fn on_runtime_upgrade() -> Weight { StorageVersion::new(1).put::>(); - 0 + Weight::zero() } } } impl Pallet { - /// Ethereum receiver 0x0000000000000000000000000000000000000000 is reserved, and shouldn't own tokens + /// Enshure that receiver address is correct. + /// + /// Ethereum receiver 0x0000000000000000000000000000000000000000 is reserved, and shouldn't own tokens. pub fn ensure_correct_receiver(receiver: &T::CrossAccountId) -> DispatchResult { ensure!( &T::CrossAccountId::from_eth(H160([0; 20])) != receiver, @@ -569,19 +723,27 @@ impl Pallet { ); Ok(()) } + + /// Get a vector of collection admins. pub fn adminlist(collection: CollectionId) -> Vec { >::iter_prefix((collection,)) .map(|(a, _)| a) .collect() } + + /// Get a vector of users allowed to mint tokens. pub fn allowlist(collection: CollectionId) -> Vec { >::iter_prefix((collection,)) .map(|(a, _)| a) .collect() } + + /// Is `user` allowed to mint token in `collection`. pub fn allowed(collection: CollectionId, user: T::CrossAccountId) -> bool { >::get((collection, user)) } + + /// Get statistics of collections. pub fn collection_stats() -> CollectionStats { let created = >::get(); let destroyed = >::get(); @@ -592,13 +754,9 @@ impl Pallet { } } + /// Get the effective limits for the collection. pub fn effective_collection_limits(collection: CollectionId) -> Option { - let collection = >::get(collection); - if collection.is_none() { - return None; - } - - let collection = collection.unwrap(); + let collection = >::get(collection)?; let limits = collection.limits; let effective_limits = CollectionLimits { account_token_ownership_limit: Some(limits.account_token_ownership_limit()), @@ -625,6 +783,7 @@ impl Pallet { Some(effective_limits) } + /// Returns information about the `collection` adapted for rpc. pub fn rpc_collection(collection: CollectionId) -> Option> { let Collection { name, @@ -635,7 +794,7 @@ impl Pallet { sponsorship, limits, permissions, - external_collection, + flags, } = >::get(collection)?; let token_property_permissions = >::get(collection) @@ -665,7 +824,12 @@ impl Pallet { permissions, token_property_permissions, properties, - read_only: external_collection, + read_only: flags.external, + + flags: RpcCollectionFlags { + foreign: flags.foreign, + erc721metadata: flags.erc721metadata, + }, }) } } @@ -700,10 +864,16 @@ macro_rules! limit_default_clone { } impl Pallet { + /// Create new collection. + /// + /// * `owner` - The owner of the collection. + /// * `data` - Description of the created collection. + /// * `flags` - Extra flags to store. pub fn init_collection( owner: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, - is_external: bool, + flags: CollectionFlags, ) -> Result { { ensure!( @@ -747,7 +917,7 @@ impl Pallet { Self::clamp_permissions(data.mode.clone(), &Default::default(), permissions) }) .unwrap_or_else(|| Ok(CollectionPermissions::default()))?, - external_collection: is_external, + flags, }; let mut collection_properties = up_data_structs::CollectionProperties::get(); @@ -775,7 +945,7 @@ impl Pallet { ), ); ::Currency::settle( - &owner.as_sub(), + payer.as_sub(), imbalance, WithdrawReasons::TRANSFER, ExistenceRequirement::KeepAlive, @@ -800,6 +970,10 @@ impl Pallet { Ok(id) } + /// Destroy collection. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. pub fn destroy_collection( collection: CollectionHandle, sender: &T::CrossAccountId, @@ -820,14 +994,19 @@ impl Pallet { >::put(destroyed_collections); >::remove(collection.id); >::remove(collection.id); - >::remove_prefix((collection.id,), None); - >::remove_prefix((collection.id,), None); + let _ = >::clear_prefix((collection.id,), u32::MAX, None); + let _ = >::clear_prefix((collection.id,), u32::MAX, None); >::remove(collection.id); >::deposit_event(Event::CollectionDestroyed(collection.id)); Ok(()) } + /// Set collection property. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `property` - The property to set. pub fn set_collection_property( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -846,6 +1025,11 @@ impl Pallet { Ok(()) } + /// Set scouped collection property. + /// + /// * `collection_id` - ID of the collection for which the property is being set. + /// * `scope` - Property scope. + /// * `property` - The property to set. pub fn set_scoped_collection_property( collection_id: CollectionId, scope: PropertyScope, @@ -859,6 +1043,11 @@ impl Pallet { Ok(()) } + /// Set scouped collection properties. + /// + /// * `collection_id` - ID of the collection for which the properties is being set. + /// * `scope` - Property scope. + /// * `properties` - The properties to set. pub fn set_scoped_collection_properties( collection_id: CollectionId, scope: PropertyScope, @@ -872,6 +1061,11 @@ impl Pallet { Ok(()) } + /// Set collection properties. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `properties` - The properties to set. #[transactional] pub fn set_collection_properties( collection: &CollectionHandle, @@ -885,6 +1079,11 @@ impl Pallet { Ok(()) } + /// Delete collection property. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `property` - The property to delete. pub fn delete_collection_property( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -905,6 +1104,11 @@ impl Pallet { Ok(()) } + /// Delete collection properties. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `properties` - The properties to delete. #[transactional] pub fn delete_collection_properties( collection: &CollectionHandle, @@ -918,7 +1122,12 @@ impl Pallet { Ok(()) } - // For migrations + /// Set collection propetry permission without any checks. + /// + /// Used for migrations. + /// + /// * `collection` - Collection handler. + /// * `property_permissions` - Property permissions. pub fn set_property_permission_unchecked( collection: CollectionId, property_permission: PropertyKeyPermission, @@ -930,10 +1139,35 @@ impl Pallet { Ok(()) } + /// Set collection property permission. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `property_permission` - Property permission. pub fn set_property_permission( collection: &CollectionHandle, sender: &T::CrossAccountId, property_permission: PropertyKeyPermission, + ) -> DispatchResult { + Self::set_scoped_property_permission( + collection, + sender, + PropertyScope::None, + property_permission, + ) + } + + /// Set collection property permission with scope. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `scope` - Property scope. + /// * `property_permission` - Property permission. + pub fn set_scoped_property_permission( + collection: &CollectionHandle, + sender: &T::CrossAccountId, + scope: PropertyScope, + property_permission: PropertyKeyPermission, ) -> DispatchResult { collection.check_is_owner_or_admin(sender)?; @@ -948,7 +1182,11 @@ impl Pallet { CollectionPropertyPermissions::::try_mutate(collection.id, |permissions| { let property_permission = property_permission.clone(); - permissions.try_set(property_permission.key, property_permission.permission) + permissions.try_scoped_set( + scope, + property_permission.key, + property_permission.permission, + ) }) .map_err(>::from)?; @@ -960,19 +1198,46 @@ impl Pallet { Ok(()) } + /// Set token property permission. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `property_permissions` - Property permissions. #[transactional] pub fn set_token_property_permissions( collection: &CollectionHandle, sender: &T::CrossAccountId, property_permissions: Vec, + ) -> DispatchResult { + Self::set_scoped_token_property_permissions( + collection, + sender, + PropertyScope::None, + property_permissions, + ) + } + + /// Set token property permission with scope. + /// + /// * `collection` - Collection handler. + /// * `sender` - The owner or administrator of the collection. + /// * `scope` - Property scope. + /// * `property_permissions` - Property permissions. + #[transactional] + pub fn set_scoped_token_property_permissions( + collection: &CollectionHandle, + sender: &T::CrossAccountId, + scope: PropertyScope, + property_permissions: Vec, ) -> DispatchResult { for prop_pemission in property_permissions { - Self::set_property_permission(collection, sender, prop_pemission)?; + Self::set_scoped_property_permission(collection, sender, scope, prop_pemission)?; } Ok(()) } + /// Get collection property. pub fn get_collection_property( collection_id: CollectionId, key: &PropertyKey, @@ -980,6 +1245,7 @@ impl Pallet { Self::collection_properties(collection_id).get(key).cloned() } + /// Convert byte vector to property key vector. pub fn bytes_keys_to_property_keys( keys: Vec>, ) -> Result, DispatchError> { @@ -991,6 +1257,7 @@ impl Pallet { .collect::, DispatchError>>() } + /// Get properties according to given keys. pub fn filter_collection_properties( collection_id: CollectionId, keys: Option>, @@ -1018,6 +1285,7 @@ impl Pallet { Ok(properties) } + /// Get property permissions according to given keys. pub fn filter_property_permissions( collection_id: CollectionId, keys: Option>, @@ -1047,6 +1315,9 @@ impl Pallet { Ok(key_permissions) } + /// Toggle `user` participation in the `collection`'s allow list. + /// #### Store read/writes + /// 1 writes pub fn toggle_allowlist( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -1066,6 +1337,9 @@ impl Pallet { Ok(()) } + /// Toggle `user` participation in the `collection`'s admin list. + /// #### Store read/writes + /// 2 writes pub fn toggle_admin( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -1101,6 +1375,7 @@ impl Pallet { Ok(()) } + /// Merge set fields from `new_limit` to `old_limit`. pub fn clamp_limits( mode: CollectionMode, old_limit: &CollectionLimits, @@ -1146,65 +1421,122 @@ impl Pallet { Ok(new_limit) } + /// Merge set fields from `new_permission` to `old_permission`. pub fn clamp_permissions( _mode: CollectionMode, - old_limit: &CollectionPermissions, - mut new_limit: CollectionPermissions, + old_permission: &CollectionPermissions, + mut new_permission: CollectionPermissions, ) -> Result { - limit_default_clone!(old_limit, new_limit, + limit_default_clone!(old_permission, new_permission, access => {}, mint_mode => {}, nesting => { /* todo check for permissive, if only it gets out of benchmarks */ }, ); - Ok(new_limit) + Ok(new_permission) } } +/// Indicates unsupported methods by returning [Error::UnsupportedOperation]. #[macro_export] macro_rules! unsupported { - () => { - Err(>::UnsupportedOperation.into()) + ($runtime:path) => { + Err($crate::Error::<$runtime>::UnsupportedOperation.into()) }; } -/// Worst cases +/// Return weights for various worst-case operations. pub trait CommonWeightInfo { + /// Weight of item creation. fn create_item() -> Weight; + + /// Weight of items creation. fn create_multiple_items(amount: &[CreateItemData]) -> Weight; + + /// Weight of items creation. fn create_multiple_items_ex(cost: &CreateItemExData) -> Weight; + + /// The weight of the burning item. fn burn_item() -> Weight; + + /// Property setting weight. + /// + /// * `amount`- The number of properties to set. fn set_collection_properties(amount: u32) -> Weight; + + /// Collection property deletion weight. + /// + /// * `amount`- The number of properties to set. fn delete_collection_properties(amount: u32) -> Weight; + + /// Token property setting weight. + /// + /// * `amount`- The number of properties to set. fn set_token_properties(amount: u32) -> Weight; + + /// Token property deletion weight. + /// + /// * `amount`- The number of properties to delete. fn delete_token_properties(amount: u32) -> Weight; + + /// Token property permissions set weight. + /// + /// * `amount`- The number of property permissions to set. fn set_token_property_permissions(amount: u32) -> Weight; + + /// Transfer price of the token or its parts. fn transfer() -> Weight; + + /// The price of setting the permission of the operation from another user. fn approve() -> Weight; + + /// Transfer price from another user. fn transfer_from() -> Weight; + + /// The price of burning a token from another user. fn burn_from() -> Weight; /// Differs from burn_item in case of Fungible and Refungible, as it should burn - /// whole users's balance + /// whole users's balance. /// - /// This method shouldn't be used directly, as it doesn't count breadth price, use `burn_recursively` instead + /// This method shouldn't be used directly, as it doesn't count breadth price, use [burn_recursively](CommonWeightInfo::burn_recursively) instead fn burn_recursively_self_raw() -> Weight; - /// Cost of iterating over `amount` children while burning, without counting child burning itself + + /// Cost of iterating over `amount` children while burning, without counting child burning itself. /// - /// This method shouldn't be used directly, as it doesn't count depth price, use `burn_recursively` instead + /// This method shouldn't be used directly, as it doesn't count depth price, use [burn_recursively](CommonWeightInfo::burn_recursively) instead fn burn_recursively_breadth_raw(amount: u32) -> Weight; + /// The price of recursive burning a token. + /// + /// `max_selfs` - The maximum burning weight of the token itself. + /// `max_breadth` - The maximum number of nested tokens to burn. fn burn_recursively(max_selfs: u32, max_breadth: u32) -> Weight { Self::burn_recursively_self_raw() .saturating_mul(max_selfs.max(1) as u64) .saturating_add(Self::burn_recursively_breadth_raw(max_breadth)) } + + /// The price of retrieving token owner + fn token_owner() -> Weight; } +/// Weight info extension trait for refungible pallet. pub trait RefungibleExtensionsWeightInfo { + /// Weight of token repartition. fn repartition() -> Weight; } +/// Common collection operations. +/// +/// It wraps methods in Fungible, Nonfungible and Refungible pallets +/// and adds weight info. pub trait CommonCollectionOperations { + /// Create token. + /// + /// * `sender` - The user who mint the token and pays for the transaction. + /// * `to` - The user who will own the token. + /// * `data` - Token data. + /// * `nesting_budget` - A budget that can be spent on nesting tokens. fn create_item( &self, sender: T::CrossAccountId, @@ -1212,6 +1544,13 @@ pub trait CommonCollectionOperations { data: CreateItemData, nesting_budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Create multiple tokens. + /// + /// * `sender` - The user who mint the token and pays for the transaction. + /// * `to` - The user who will own the token. + /// * `data` - Token data. + /// * `nesting_budget` - A budget that can be spent on nesting tokens. fn create_multiple_items( &self, sender: T::CrossAccountId, @@ -1219,18 +1558,38 @@ pub trait CommonCollectionOperations { data: Vec, nesting_budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Create multiple tokens. + /// + /// * `sender` - The user who mint the token and pays for the transaction. + /// * `to` - The user who will own the token. + /// * `data` - Token data. + /// * `nesting_budget` - A budget that can be spent on nesting tokens. fn create_multiple_items_ex( &self, sender: T::CrossAccountId, data: CreateItemExData, nesting_budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Burn token. + /// + /// * `sender` - The user who owns the token. + /// * `token` - Token id that will burned. + /// * `amount` - The number of parts of the token that will be burned. fn burn_item( &self, sender: T::CrossAccountId, token: TokenId, amount: u128, ) -> DispatchResultWithPostInfo; + + /// Burn token and all nested tokens recursievly. + /// + /// * `sender` - The user who owns the token. + /// * `token` - Token id that will burned. + /// * `self_budget` - The budget that can be spent on burning tokens. + /// * `breadth_budget` - The budget that can be spent on burning nested tokens. fn burn_item_recursively( &self, sender: T::CrossAccountId, @@ -1238,43 +1597,95 @@ pub trait CommonCollectionOperations { self_budget: &dyn Budget, breadth_budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Set collection properties. + /// + /// * `sender` - Must be either the owner of the collection or its admin. + /// * `properties` - Properties to be set. fn set_collection_properties( &self, sender: T::CrossAccountId, properties: Vec, ) -> DispatchResultWithPostInfo; + + /// Delete collection properties. + /// + /// * `sender` - Must be either the owner of the collection or its admin. + /// * `properties` - The properties to be removed. fn delete_collection_properties( &self, sender: &T::CrossAccountId, property_keys: Vec, ) -> DispatchResultWithPostInfo; + + /// Set token properties. + /// + /// The appropriate [`PropertyPermission`] for the token property + /// must be set with [`Self::set_token_property_permissions`]. + /// + /// * `sender` - Must be either the owner of the token or its admin. + /// * `token_id` - The token for which the properties are being set. + /// * `properties` - Properties to be set. + /// * `budget` - Budget for setting properties. fn set_token_properties( &self, sender: T::CrossAccountId, token_id: TokenId, - property: Vec, - nesting_budget: &dyn Budget, + properties: Vec, + budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Remove token properties. + /// + /// The appropriate [`PropertyPermission`] for the token property + /// must be set with [`Self::set_token_property_permissions`]. + /// + /// * `sender` - Must be either the owner of the token or its admin. + /// * `token_id` - The token for which the properties are being remove. + /// * `property_keys` - Keys to remove corresponding properties. + /// * `budget` - Budget for removing properties. fn delete_token_properties( &self, sender: T::CrossAccountId, token_id: TokenId, property_keys: Vec, - nesting_budget: &dyn Budget, + budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Set token property permissions. + /// + /// * `sender` - Must be either the owner of the token or its admin. + /// * `token_id` - The token for which the properties are being set. + /// * `property_permissions` - Property permissions to be set. + /// * `budget` - Budget for setting properties. fn set_token_property_permissions( &self, sender: &T::CrossAccountId, property_permissions: Vec, ) -> DispatchResultWithPostInfo; + + /// Transfer amount of token pieces. + /// + /// * `sender` - Donor user. + /// * `to` - Recepient user. + /// * `token` - The token of which parts are being sent. + /// * `amount` - The number of parts of the token that will be transferred. + /// * `budget` - The maximum budget that can be spent on the transfer. fn transfer( &self, sender: T::CrossAccountId, to: T::CrossAccountId, token: TokenId, amount: u128, - nesting_budget: &dyn Budget, + budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Grant access to another account to transfer parts of the token owned by the calling user via [Self::transfer_from]. + /// + /// * `sender` - The user who grants access to the token. + /// * `spender` - The user to whom the rights are granted. + /// * `token` - The token to which access is granted. + /// * `amount` - The amount of pieces that another user can dispose of. fn approve( &self, sender: T::CrossAccountId, @@ -1282,6 +1693,17 @@ pub trait CommonCollectionOperations { token: TokenId, amount: u128, ) -> DispatchResultWithPostInfo; + + /// Send parts of a token owned by another user. + /// + /// Before calling this method, you must grant rights to the calling user via [`Self::approve`]. + /// + /// * `sender` - The user who must have access to the token (see [`Self::approve`]). + /// * `from` - The user who owns the token. + /// * `to` - Recepient user. + /// * `token` - The token of which parts are being sent. + /// * `amount` - The number of parts of the token that will be transferred. + /// * `budget` - The maximum budget that can be spent on the transfer. fn transfer_from( &self, sender: T::CrossAccountId, @@ -1289,65 +1711,145 @@ pub trait CommonCollectionOperations { to: T::CrossAccountId, token: TokenId, amount: u128, - nesting_budget: &dyn Budget, + budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + + /// Burn parts of a token owned by another user. + /// + /// Before calling this method, you must grant rights to the calling user via [`Self::approve`]. + /// + /// * `sender` - The user who must have access to the token (see [`Self::approve`]). + /// * `from` - The user who owns the token. + /// * `token` - The token of which parts are being sent. + /// * `amount` - The number of parts of the token that will be transferred. + /// * `budget` - The maximum budget that can be spent on the burn. fn burn_from( &self, sender: T::CrossAccountId, from: T::CrossAccountId, token: TokenId, amount: u128, - nesting_budget: &dyn Budget, + budget: &dyn Budget, ) -> DispatchResultWithPostInfo; + /// Check permission to nest token. + /// + /// * `sender` - The user who initiated the check. + /// * `from` - The token that is checked for embedding. + /// * `under` - Token under which to check. + /// * `budget` - The maximum budget that can be spent on the check. fn check_nesting( &self, sender: T::CrossAccountId, from: (CollectionId, TokenId), under: TokenId, - nesting_budget: &dyn Budget, + budget: &dyn Budget, ) -> DispatchResult; + /// Nest one token into another. + /// + /// * `under` - Token holder. + /// * `to_nest` - Nested token. fn nest(&self, under: TokenId, to_nest: (CollectionId, TokenId)); + /// Unnest token. + /// + /// * `under` - Token holder. + /// * `to_nest` - Token to unnest. fn unnest(&self, under: TokenId, to_nest: (CollectionId, TokenId)); + /// Get all user tokens. + /// + /// * `account` - Account for which you need to get tokens. fn account_tokens(&self, account: T::CrossAccountId) -> Vec; + + /// Get all the tokens in the collection. fn collection_tokens(&self) -> Vec; + + /// Check if the token exists. + /// + /// * `token` - Id token to check. fn token_exists(&self, token: TokenId) -> bool; + + /// Get the id of the last minted token. fn last_token_id(&self) -> TokenId; + /// Get the owner of the token. + /// + /// * `token` - The token for which you need to find out the owner. fn token_owner(&self, token: TokenId) -> Option; + + /// Returns 10 tokens owners in no particular order. + /// + /// * `token` - The token for which you need to find out the owners. + fn token_owners(&self, token: TokenId) -> Vec; + + /// Get the value of the token property by key. + /// + /// * `token` - Token with the property to get. + /// * `key` - Property name. fn token_property(&self, token_id: TokenId, key: &PropertyKey) -> Option; - fn token_properties(&self, token_id: TokenId, keys: Option>) -> Vec; + + /// Get a set of token properties by key vector. + /// + /// * `token` - Token with the property to get. + /// * `keys` - Vector of property keys. If this parameter is [None](sp_std::result::Result), + /// then all properties are returned. + fn token_properties(&self, token: TokenId, keys: Option>) -> Vec; + /// Amount of unique collection tokens fn total_supply(&self) -> u32; - /// Amount of different tokens account has (Applicable to nonfungible/refungible) + + /// Amount of different tokens account has. + /// + /// * `account` - The account for which need to get the balance. fn account_balance(&self, account: T::CrossAccountId) -> u32; - /// Amount of specific token account have (Applicable to fungible/refungible) + + /// Amount of specific token account have. fn balance(&self, account: T::CrossAccountId, token: TokenId) -> u128; + + /// Amount of token pieces + fn total_pieces(&self, token: TokenId) -> Option; + + /// Get the number of parts of the token that a trusted user can manage. + /// + /// * `sender` - Trusted user. + /// * `spender` - Owner of the token. + /// * `token` - The token for which to get the value. fn allowance( &self, sender: T::CrossAccountId, spender: T::CrossAccountId, token: TokenId, ) -> u128; + + /// Get extension for RFT collection. fn refungible_extensions(&self) -> Option<&dyn RefungibleExtensions>; } +/// Extension for RFT collection. pub trait RefungibleExtensions where T: Config, { + /// Change the number of parts of the token. + /// + /// When the value changes down, this function is equivalent to burning parts of the token. + /// + /// * `sender` - The user calling the repartition operation. Must be the owner of the token. + /// * `token` - The token for which you want to change the number of parts. + /// * `amount` - The new value of the parts of the token. fn repartition( &self, - owner: &T::CrossAccountId, + sender: &T::CrossAccountId, token: TokenId, amount: u128, ) -> DispatchResultWithPostInfo; } -// Flexible enough for implementing CommonCollectionOperations +/// Merge [`DispatchResult`] with [`Weight`] into [`DispatchResultWithPostInfo`]. +/// +/// Used for [`CommonCollectionOperations`] implementations and flexible enough to do so. pub fn with_weight(res: DispatchResult, weight: Weight) -> DispatchResultWithPostInfo { let post_info = PostDispatchInfo { actual_weight: Some(weight), diff --git a/pallets/common/src/weights.rs b/pallets/common/src/weights.rs index 7d5417bdc2..756d94a9e6 100644 --- a/pallets/common/src/weights.rs +++ b/pallets/common/src/weights.rs @@ -42,19 +42,19 @@ pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { // Storage: Common CollectionProperties (r:1 w:1) fn set_collection_properties(b: u32, ) -> Weight { - (0 as Weight) + (Weight::from_ref_time(0)) // Standard Error: 142_818_000 - .saturating_add((2_786_252_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + .saturating_add(Weight::from_ref_time(2_786_252_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionProperties (r:1 w:1) fn delete_collection_properties(b: u32, ) -> Weight { - (0 as Weight) + (Weight::from_ref_time(0)) // Standard Error: 101_087_000 - .saturating_add((2_739_521_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + .saturating_add(Weight::from_ref_time(2_739_521_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } } @@ -62,18 +62,18 @@ impl WeightInfo for SubstrateWeight { impl WeightInfo for () { // Storage: Common CollectionProperties (r:1 w:1) fn set_collection_properties(b: u32, ) -> Weight { - (0 as Weight) + (Weight::from_ref_time(0)) // Standard Error: 142_818_000 - .saturating_add((2_786_252_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + .saturating_add(Weight::from_ref_time(2_786_252_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionProperties (r:1 w:1) fn delete_collection_properties(b: u32, ) -> Weight { - (0 as Weight) + (Weight::from_ref_time(0)) // Standard Error: 101_087_000 - .saturating_add((2_739_521_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + .saturating_add(Weight::from_ref_time(2_739_521_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } } diff --git a/pallets/configuration/CHANGELOG.md b/pallets/configuration/CHANGELOG.md new file mode 100644 index 0000000000..20e9e64fc7 --- /dev/null +++ b/pallets/configuration/CHANGELOG.md @@ -0,0 +1,6 @@ + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a \ No newline at end of file diff --git a/pallets/configuration/Cargo.toml b/pallets/configuration/Cargo.toml new file mode 100644 index 0000000000..4065654fba --- /dev/null +++ b/pallets/configuration/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "pallet-configuration" +version = "0.1.1" +edition = "2021" + +[dependencies] +parity-scale-codec = { version = "3.1.5", features = [ + "derive", +], default-features = false } +scale-info = { version = "2.0.1", default-features = false, features = [ + "derive", +] } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-arithmetic = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +fp-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +smallvec = "1.6.1" + +[features] +default = ["std"] +std = [ + "parity-scale-codec/std", + "frame-support/std", + "frame-system/std", + "sp-runtime/std", + "sp-std/std", + "sp-core/std", + "sp-arithmetic/std", + "fp-evm/std", +] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/configuration/src/lib.rs b/pallets/configuration/src/lib.rs new file mode 100644 index 0000000000..aefaf1c870 --- /dev/null +++ b/pallets/configuration/src/lib.rs @@ -0,0 +1,124 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#![cfg_attr(not(feature = "std"), no_std)] + +use core::marker::PhantomData; + +use frame_support::{ + pallet, + weights::{WeightToFeePolynomial, WeightToFeeCoefficients, WeightToFeeCoefficient, Weight}, + traits::Get, +}; +use sp_arithmetic::traits::{BaseArithmetic, Unsigned}; +use smallvec::smallvec; + +pub use pallet::*; +use sp_core::U256; +use sp_runtime::Perbill; + +#[pallet] +mod pallet { + use super::*; + use frame_support::{ + traits::Get, + pallet_prelude::{StorageValue, ValueQuery, DispatchResult}, + }; + use frame_system::{pallet_prelude::OriginFor, ensure_root}; + + #[pallet::config] + pub trait Config: frame_system::Config { + #[pallet::constant] + type DefaultWeightToFeeCoefficient: Get; + #[pallet::constant] + type DefaultMinGasPrice: Get; + } + + #[pallet::storage] + pub type WeightToFeeCoefficientOverride = StorageValue< + Value = u32, + QueryKind = ValueQuery, + OnEmpty = T::DefaultWeightToFeeCoefficient, + >; + + #[pallet::storage] + pub type MinGasPriceOverride = + StorageValue; + + #[pallet::call] + impl Pallet { + #[pallet::weight(T::DbWeight::get().writes(1))] + pub fn set_weight_to_fee_coefficient_override( + origin: OriginFor, + coeff: Option, + ) -> DispatchResult { + let _sender = ensure_root(origin)?; + if let Some(coeff) = coeff { + >::set(coeff); + } else { + >::kill(); + } + Ok(()) + } + + #[pallet::weight(T::DbWeight::get().writes(1))] + pub fn set_min_gas_price_override( + origin: OriginFor, + coeff: Option, + ) -> DispatchResult { + let _sender = ensure_root(origin)?; + if let Some(coeff) = coeff { + >::set(coeff); + } else { + >::kill(); + } + Ok(()) + } + } + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); +} + +pub struct WeightToFee(PhantomData<(T, B)>); + +impl WeightToFeePolynomial for WeightToFee +where + T: Config, + B: BaseArithmetic + From + Copy + Unsigned, +{ + type Balance = B; + + fn polynomial() -> WeightToFeeCoefficients { + smallvec!(WeightToFeeCoefficient { + coeff_integer: >::get().into(), + coeff_frac: Perbill::zero(), + negative: false, + degree: 1, + }) + } +} + +pub struct FeeCalculator(PhantomData); +impl fp_evm::FeeCalculator for FeeCalculator { + fn min_gas_price() -> (U256, Weight) { + ( + >::get().into(), + T::DbWeight::get().reads(1), + ) + } +} diff --git a/pallets/evm-coder-substrate/CHANGELOG.md b/pallets/evm-coder-substrate/CHANGELOG.md new file mode 100644 index 0000000000..ef5a3fde1e --- /dev/null +++ b/pallets/evm-coder-substrate/CHANGELOG.md @@ -0,0 +1,20 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + +## [v0.1.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.2] - 2022-08-12 + +### Fixed + + - Issue with error not being thrown when non existing function is called on collection contract diff --git a/pallets/evm-coder-substrate/Cargo.toml b/pallets/evm-coder-substrate/Cargo.toml index a5df5cf917..a24f275296 100644 --- a/pallets/evm-coder-substrate/Cargo.toml +++ b/pallets/evm-coder-substrate/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-evm-coder-substrate" -version = "0.1.0" +version = "0.1.3" license = "GPLv3" edition = "2021" @@ -8,15 +8,15 @@ edition = "2021" scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } ethereum = { version = "0.12.0", default-features = false } evm-coder = { default-features = false, path = "../../crates/evm-coder" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } up-data-structs = { default-features = false, path = "../../primitives/data-structs" } [dependencies.codec] @@ -39,3 +39,4 @@ std = [ 'frame-benchmarking/std', ] runtime-benchmarks = ['frame-benchmarking'] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/evm-coder-substrate/src/lib.rs b/pallets/evm-coder-substrate/src/lib.rs index ca4cec60f5..6d9bc2644f 100644 --- a/pallets/evm-coder-substrate/src/lib.rs +++ b/pallets/evm-coder-substrate/src/lib.rs @@ -40,7 +40,7 @@ use sp_core::H160; use evm_coder::{ abi::{AbiReader, AbiWrite, AbiWriter}, - execution::{self, Result}, + execution, types::{Msg, value}, }; @@ -147,13 +147,13 @@ impl SubstrateRecorder { Ok(()) } - pub fn consume_sload(&self) -> Result<()> { + pub fn consume_sload(&self) -> execution::Result<()> { self.consume_gas(G_SLOAD_WORD) } - pub fn consume_sstore(&self) -> Result<()> { + pub fn consume_sstore(&self) -> execution::Result<()> { self.consume_gas(G_SSTORE_WORD) } - pub fn consume_gas(&self, gas: u64) -> Result<()> { + pub fn consume_gas(&self, gas: u64) -> execution::Result<()> { if gas == u64::MAX { return Err(execution::Error::Error(ExitError::OutOfGas)); } @@ -172,7 +172,7 @@ impl SubstrateRecorder { pub fn evm_to_precompile_output( self, handle: &mut impl PrecompileHandle, - result: evm_coder::execution::Result>, + result: execution::Result>, ) -> Option { use evm_coder::execution::Error; // We ignore error here, as it should not occur, as we have our own bookkeeping of gas @@ -198,7 +198,7 @@ impl SubstrateRecorder { } } -pub fn dispatch_to_evm(err: DispatchError) -> evm_coder::execution::Error { +pub fn dispatch_to_evm(err: DispatchError) -> execution::Error { use evm_coder::execution::Error as ExError; match err { DispatchError::Module(ModuleError { index, error, .. }) @@ -257,11 +257,12 @@ fn call_internal< e: &mut E, value: value, input: &[u8], -) -> evm_coder::execution::Result> { +) -> execution::Result> { let (selector, mut reader) = AbiReader::new_call(input)?; let call = C::parse(selector, &mut reader)?; if call.is_none() { - return Ok(None); + let selector = u32::from_be_bytes(selector); + return Err(format!("unrecognized selector: 0x{selector:0<8x}").into()); } let call = call.unwrap(); diff --git a/pallets/evm-contract-helpers/CHANGELOG.md b/pallets/evm-contract-helpers/CHANGELOG.md new file mode 100644 index 0000000000..2c86325fb0 --- /dev/null +++ b/pallets/evm-contract-helpers/CHANGELOG.md @@ -0,0 +1,36 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## [v0.3.0] 2022-09-05 + +### Added + +- Methods `force_set_sponsor` , `force_remove_sponsor` to be able to administer sponsorships with other pallets. Added to implement `AppPromotion` pallet logic. + +## [v0.2.0] - 2022-08-19 + +### Added + +- Set arbitrary evm address as contract sponsor. +- Ability to remove current sponsor. + +### Removed + +- Remove methods + - sponsoring_enabled + - toggle_sponsoring + +### Changed + +- Change `toggle_sponsoring` to `self_sponsored_enable`. + +## [v0.1.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/pallets/evm-contract-helpers/Cargo.toml b/pallets/evm-contract-helpers/Cargo.toml index ce3b25e880..287044a30e 100644 --- a/pallets/evm-contract-helpers/Cargo.toml +++ b/pallets/evm-contract-helpers/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-evm-contract-helpers" -version = "0.1.0" +version = "0.3.0" license = "GPLv3" edition = "2021" @@ -9,24 +9,28 @@ scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } log = { default-features = false, version = "0.4.14" } +ethereum = { version = "0.12.0", default-features = false } # Substrate -frame-support = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -frame-system = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-runtime = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-std = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-core = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } # Unique -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30" } # Locals evm-coder = { default-features = false, path = '../../crates/evm-coder' } pallet-common = { default-features = false, path = '../../pallets/common' } pallet-evm-coder-substrate = { default-features = false, path = '../../pallets/evm-coder-substrate' } -up-data-structs = { default-features = false, path = '../../primitives/data-structs', features = ['serde1'] } +pallet-evm-transaction-payment = { default-features = false, path = '../../pallets/evm-transaction-payment' } +up-data-structs = { default-features = false, path = '../../primitives/data-structs', features = [ + 'serde1', +] } [dependencies.codec] default-features = false @@ -47,3 +51,4 @@ std = [ "pallet-evm/std", "up-sponsorship/std", ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/evm-contract-helpers/README.md b/pallets/evm-contract-helpers/README.md new file mode 100644 index 0000000000..f320a12c2c --- /dev/null +++ b/pallets/evm-contract-helpers/README.md @@ -0,0 +1,13 @@ +# EVM Contract Helpers + +This pallet extends pallet-evm contracts with several new functions. + +## Overview + +Evm contract helpers pallet provides ability to + +- Tracking and getting of user, which deployed contract +- Sponsoring EVM contract calls (Make transaction calls to be free for users, instead making them being paid from contract address) +- Allowlist access mode + +As most of those functions are intented to be consumed by ethereum users, only API provided by this pallet is [ContractHelpers magic contract](./src/stubs/ContractHelpers.sol) \ No newline at end of file diff --git a/pallets/evm-contract-helpers/src/eth.rs b/pallets/evm-contract-helpers/src/eth.rs index e4e6375f81..9d41395e42 100644 --- a/pallets/evm-contract-helpers/src/eth.rs +++ b/pallets/evm-contract-helpers/src/eth.rs @@ -14,22 +14,59 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Implementation of magic contract + use core::marker::PhantomData; -use evm_coder::{abi::AbiWriter, execution::Result, generate_stubgen, solidity_interface, types::*}; -use pallet_evm_coder_substrate::{SubstrateRecorder, WithRecorder}; +use evm_coder::{ + abi::AbiWriter, execution::Result, generate_stubgen, solidity_interface, types::*, ToLog, +}; use pallet_evm::{ ExitRevert, OnCreate, OnMethodCall, PrecompileResult, PrecompileFailure, PrecompileHandle, account::CrossAccountId, }; -use sp_core::H160; +use pallet_evm_coder_substrate::{SubstrateRecorder, WithRecorder, dispatch_to_evm}; +use pallet_evm_transaction_payment::CallContext; +use sp_core::{H160, U256}; +use up_data_structs::SponsorshipState; use crate::{ - AllowlistEnabled, Config, Owner, Pallet, SponsorBasket, SponsoringRateLimit, SponsoringModeT, + AllowlistEnabled, Config, Owner, Pallet, SponsorBasket, SponsoringFeeLimit, + SponsoringRateLimit, SponsoringModeT, Sponsoring, }; use frame_support::traits::Get; use up_sponsorship::SponsorshipHandler; use sp_std::vec::Vec; -struct ContractHelpers(SubstrateRecorder); +/// Pallet events. +#[derive(ToLog)] +pub enum ContractHelpersEvents { + /// Contract sponsor was set. + ContractSponsorSet { + /// Contract address of the affected collection. + #[indexed] + contract_address: address, + /// New sponsor address. + sponsor: address, + }, + + /// New sponsor was confirm. + ContractSponsorshipConfirmed { + /// Contract address of the affected collection. + #[indexed] + contract_address: address, + /// New sponsor address. + sponsor: address, + }, + + /// Collection sponsor was removed. + ContractSponsorRemoved { + /// Contract address of the affected collection. + #[indexed] + contract_address: address, + }, +} + +/// See [`ContractHelpersCall`] +pub struct ContractHelpers(SubstrateRecorder); impl WithRecorder for ContractHelpers { fn recorder(&self) -> &SubstrateRecorder { &self.0 @@ -40,96 +77,274 @@ impl WithRecorder for ContractHelpers { } } -#[solidity_interface(name = "ContractHelpers")] -impl ContractHelpers { +/// @title Magic contract, which allows users to reconfigure other contracts +#[solidity_interface(name = ContractHelpers, events(ContractHelpersEvents))] +impl ContractHelpers +where + T::AccountId: AsRef<[u8; 32]>, +{ + /// Get user, which deployed specified contract + /// @dev May return zero address in case if contract is deployed + /// using uniquenetwork evm-migration pallet, or using other terms not + /// intended by pallet-evm + /// @dev Returns zero address if contract does not exists + /// @param contractAddress Contract to get owner of + /// @return address Owner of contract fn contract_owner(&self, contract_address: address) -> Result
{ Ok(>::get(contract_address)) } - fn sponsoring_enabled(&self, contract_address: address) -> Result { - Ok(>::sponsoring_mode(contract_address) != SponsoringModeT::Disabled) - } - - /// Deprecated - fn toggle_sponsoring( + /// Set sponsor. + /// @param contractAddress Contract for which a sponsor is being established. + /// @param sponsor User address who set as pending sponsor. + fn set_sponsor( &mut self, caller: caller, contract_address: address, - enabled: bool, + sponsor: address, ) -> Result { - >::ensure_owner(contract_address, caller)?; - >::toggle_sponsoring(contract_address, enabled); + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + Pallet::::set_sponsor( + &T::CrossAccountId::from_eth(caller), + contract_address, + &T::CrossAccountId::from_eth(sponsor), + ) + .map_err(dispatch_to_evm::)?; + + Ok(()) + } + + /// Set contract as self sponsored. + /// + /// @param contractAddress Contract for which a self sponsoring is being enabled. + fn self_sponsored_enable(&mut self, caller: caller, contract_address: address) -> Result { + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + let caller = T::CrossAccountId::from_eth(caller); + + Pallet::::ensure_owner(contract_address, *caller.as_eth()) + .map_err(dispatch_to_evm::)?; + + Pallet::::force_set_sponsor( + contract_address, + &T::CrossAccountId::from_eth(contract_address), + ) + .map_err(dispatch_to_evm::)?; + Ok(()) } + /// Remove sponsor. + /// + /// @param contractAddress Contract for which a sponsorship is being removed. + fn remove_sponsor(&mut self, caller: caller, contract_address: address) -> Result { + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + Pallet::::remove_sponsor(&T::CrossAccountId::from_eth(caller), contract_address) + .map_err(dispatch_to_evm::)?; + + Ok(()) + } + + /// Confirm sponsorship. + /// + /// @dev Caller must be same that set via [`setSponsor`]. + /// + /// @param contractAddress Сontract for which need to confirm sponsorship. + fn confirm_sponsorship(&mut self, caller: caller, contract_address: address) -> Result { + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + Pallet::::confirm_sponsorship(&T::CrossAccountId::from_eth(caller), contract_address) + .map_err(dispatch_to_evm::)?; + + Ok(()) + } + + /// Get current sponsor. + /// + /// @param contractAddress The contract for which a sponsor is requested. + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + fn sponsor(&self, contract_address: address) -> Result<(address, uint256)> { + let sponsor = + Pallet::::get_sponsor(contract_address).ok_or("Contract has no sponsor")?; + Ok(pallet_common::eth::convert_cross_account_to_tuple::( + &sponsor, + )) + } + + /// Check tat contract has confirmed sponsor. + /// + /// @param contractAddress The contract for which the presence of a confirmed sponsor is checked. + /// @return **true** if contract has confirmed sponsor. + fn has_sponsor(&self, contract_address: address) -> Result { + Ok(Pallet::::get_sponsor(contract_address).is_some()) + } + + /// Check tat contract has pending sponsor. + /// + /// @param contractAddress The contract for which the presence of a pending sponsor is checked. + /// @return **true** if contract has pending sponsor. + fn has_pending_sponsor(&self, contract_address: address) -> Result { + Ok(match Sponsoring::::get(contract_address) { + SponsorshipState::Disabled | SponsorshipState::Confirmed(_) => false, + SponsorshipState::Unconfirmed(_) => true, + }) + } + + fn sponsoring_enabled(&self, contract_address: address) -> Result { + Ok(>::sponsoring_mode(contract_address) != SponsoringModeT::Disabled) + } + fn set_sponsoring_mode( &mut self, caller: caller, contract_address: address, + // TODO: implement support for enums in evm-coder mode: uint8, ) -> Result { - >::ensure_owner(contract_address, caller)?; + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + >::ensure_owner(contract_address, caller).map_err(dispatch_to_evm::)?; let mode = SponsoringModeT::from_eth(mode).ok_or("unknown mode")?; >::set_sponsoring_mode(contract_address, mode); + Ok(()) } - fn sponsoring_mode(&self, contract_address: address) -> Result { - Ok(>::sponsoring_mode(contract_address).to_eth()) + /// Get current contract sponsoring rate limit + /// @param contractAddress Contract to get sponsoring rate limit of + /// @return uint32 Amount of blocks between two sponsored transactions + fn sponsoring_rate_limit(&self, contract_address: address) -> Result { + self.recorder().consume_sload()?; + + Ok(>::get(contract_address) + .try_into() + .map_err(|_| "rate limit > u32::MAX")?) } + /// Set contract sponsoring rate limit + /// @dev Sponsoring rate limit - is a minimum amount of blocks that should + /// pass between two sponsored transactions + /// @param contractAddress Contract to change sponsoring rate limit of + /// @param rateLimit Target rate limit + /// @dev Only contract owner can change this setting fn set_sponsoring_rate_limit( &mut self, caller: caller, contract_address: address, rate_limit: uint32, ) -> Result { - >::ensure_owner(contract_address, caller)?; + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + >::ensure_owner(contract_address, caller).map_err(dispatch_to_evm::)?; >::set_sponsoring_rate_limit(contract_address, rate_limit.into()); Ok(()) } - fn get_sponsoring_rate_limit(&self, contract_address: address) -> Result { - Ok(>::get(contract_address) - .try_into() - .map_err(|_| "rate limit > u32::MAX")?) + /// Set contract sponsoring fee limit + /// @dev Sponsoring fee limit - is maximum fee that could be spent by + /// single transaction + /// @param contractAddress Contract to change sponsoring fee limit of + /// @param feeLimit Fee limit + /// @dev Only contract owner can change this setting + fn set_sponsoring_fee_limit( + &mut self, + caller: caller, + contract_address: address, + fee_limit: uint256, + ) -> Result { + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + >::ensure_owner(contract_address, caller).map_err(dispatch_to_evm::)?; + >::set_sponsoring_fee_limit(contract_address, fee_limit.into()) + .map_err(dispatch_to_evm::)?; + Ok(()) + } + + /// Get current contract sponsoring fee limit + /// @param contractAddress Contract to get sponsoring fee limit of + /// @return uint256 Maximum amount of fee that could be spent by single + /// transaction + fn sponsoring_fee_limit(&self, contract_address: address) -> Result { + self.recorder().consume_sload()?; + + Ok(get_sponsoring_fee_limit::(contract_address)) } + /// Is specified user present in contract allow list + /// @dev Contract owner always implicitly included + /// @param contractAddress Contract to check allowlist of + /// @param user User to check + /// @return bool Is specified users exists in contract allowlist fn allowed(&self, contract_address: address, user: address) -> Result { self.0.consume_sload()?; Ok(>::allowed(contract_address, user)) } - fn allowlist_enabled(&self, contract_address: address) -> Result { - Ok(>::get(contract_address)) - } - - fn toggle_allowlist( + /// Toggle user presence in contract allowlist + /// @param contractAddress Contract to change allowlist of + /// @param user Which user presence should be toggled + /// @param isAllowed `true` if user should be allowed to be sponsored + /// or call this contract, `false` otherwise + /// @dev Only contract owner can change this setting + fn toggle_allowed( &mut self, caller: caller, contract_address: address, - enabled: bool, + user: address, + is_allowed: bool, ) -> Result { - >::ensure_owner(contract_address, caller)?; - >::toggle_allowlist(contract_address, enabled); + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + >::ensure_owner(contract_address, caller).map_err(dispatch_to_evm::)?; + >::toggle_allowed(contract_address, user, is_allowed); + Ok(()) } - fn toggle_allowed( + /// Is this contract has allowlist access enabled + /// @dev Allowlist always can have users, and it is used for two purposes: + /// in case of allowlist sponsoring mode, users will be sponsored if they exist in allowlist + /// in case of allowlist access enabled, only users from allowlist may call this contract + /// @param contractAddress Contract to get allowlist access of + /// @return bool Is specified contract has allowlist access enabled + fn allowlist_enabled(&self, contract_address: address) -> Result { + Ok(>::get(contract_address)) + } + + /// Toggle contract allowlist access + /// @param contractAddress Contract to change allowlist access of + /// @param enabled Should allowlist access to be enabled? + fn toggle_allowlist( &mut self, caller: caller, contract_address: address, - user: address, - allowed: bool, + enabled: bool, ) -> Result { - >::ensure_owner(contract_address, caller)?; - >::toggle_allowed(contract_address, user, allowed); + self.recorder().consume_sload()?; + self.recorder().consume_sstore()?; + + >::ensure_owner(contract_address, caller).map_err(dispatch_to_evm::)?; + >::toggle_allowlist(contract_address, enabled); Ok(()) } } +/// Implements [`OnMethodCall`], which delegates call to [`ContractHelpers`] pub struct HelpersOnMethodCall(PhantomData<*const T>); -impl OnMethodCall for HelpersOnMethodCall { +impl OnMethodCall for HelpersOnMethodCall +where + T::AccountId: AsRef<[u8; 32]>, +{ fn is_reserved(contract: &sp_core::H160) -> bool { contract == &T::ContractAddress::get() } @@ -167,6 +382,7 @@ impl OnMethodCall for HelpersOnMethodCall { } } +/// Hooks into contract creation, storing owner of newly deployed contract pub struct HelpersOnCreate(PhantomData<*const T>); impl OnCreate for HelpersOnCreate { fn on_create(owner: H160, contract: H160) { @@ -174,23 +390,35 @@ impl OnCreate for HelpersOnCreate { } } +/// Bridge to pallet-sponsoring pub struct HelpersContractSponsoring(PhantomData<*const T>); -impl SponsorshipHandler)> +impl SponsorshipHandler for HelpersContractSponsoring { - fn get_sponsor(who: &T::CrossAccountId, call: &(H160, Vec)) -> Option { - let mode = >::sponsoring_mode(call.0); + fn get_sponsor( + who: &T::CrossAccountId, + call_context: &CallContext, + ) -> Option { + let contract_address = call_context.contract_address; + let mode = >::sponsoring_mode(contract_address); if mode == SponsoringModeT::Disabled { return None; } - if mode == SponsoringModeT::Allowlisted && !>::allowed(call.0, *who.as_eth()) { + let sponsor = match >::get_sponsor(contract_address) { + Some(sponsor) => sponsor, + None => return None, + }; + + if mode == SponsoringModeT::Allowlisted + && !>::allowed(contract_address, *who.as_eth()) + { return None; } let block_number = >::block_number() as T::BlockNumber; - if let Some(last_tx_block) = >::get(&call.0, who.as_eth()) { - let limit = >::get(&call.0); + if let Some(last_tx_block) = >::get(contract_address, who.as_eth()) { + let limit = >::get(contract_address); let timeout = last_tx_block + limit; if block_number < timeout { @@ -198,12 +426,24 @@ impl SponsorshipHandler)> } } - >::insert(&call.0, who.as_eth(), block_number); + let sponsored_fee_limit = get_sponsoring_fee_limit::(contract_address); + + if call_context.max_fee > sponsored_fee_limit { + return None; + } + + >::insert(contract_address, who.as_eth(), block_number); - let sponsor = T::CrossAccountId::from_eth(call.0); Some(sponsor) } } +fn get_sponsoring_fee_limit(contract_address: address) -> uint256 { + >::get(contract_address) + .get(&0xffffffff) + .cloned() + .unwrap_or(U256::MAX) +} + generate_stubgen!(contract_helpers_impl, ContractHelpersCall<()>, true); generate_stubgen!(contract_helpers_iface, ContractHelpersCall<()>, false); diff --git a/pallets/evm-contract-helpers/src/lib.rs b/pallets/evm-contract-helpers/src/lib.rs index e18866cd9a..7cd363cc19 100644 --- a/pallets/evm-contract-helpers/src/lib.rs +++ b/pallets/evm-contract-helpers/src/lib.rs @@ -14,39 +14,66 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#![doc = include_str!("../README.md")] #![cfg_attr(not(feature = "std"), no_std)] +#![warn(missing_docs)] use codec::{Decode, Encode, MaxEncodedLen}; pub use pallet::*; pub use eth::*; use scale_info::TypeInfo; +use frame_support::storage::bounded_btree_map::BoundedBTreeMap; pub mod eth; +/// Maximum number of methods per contract that could have fee limit +pub const MAX_FEE_LIMITED_METHODS: u32 = 5; + #[frame_support::pallet] pub mod pallet { pub use super::*; - use evm_coder::execution::Result; + use crate::eth::ContractHelpersEvents; use frame_support::pallet_prelude::*; - use sp_core::H160; + use pallet_evm_coder_substrate::DispatchResult; + use sp_core::{H160, U256}; + use pallet_evm::{account::CrossAccountId, Pallet as PalletEvm}; + use up_data_structs::SponsorshipState; + use evm_coder::ToLog; #[pallet::config] pub trait Config: frame_system::Config + pallet_evm_coder_substrate::Config + pallet_evm::account::Config { + /// Overarching event type. + type RuntimeEvent: IsType<::RuntimeEvent> + From>; + + /// Address, under which magic contract will be available type ContractAddress: Get; + + /// In case of enabled sponsoring, but no sponsoring rate limit set, + /// this value will be used implicitly type DefaultSponsoringRateLimit: Get; } #[pallet::error] pub enum Error { - /// This method is only executable by owner + /// This method is only executable by contract owner NoPermission, + + /// No pending sponsor for contract. + NoPendingSponsor, + + /// Number of methods that sponsored limit is defined for exceeds maximum. + TooManyMethodsHaveSponsoredLimit, } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); + /// Store owner for contract. + /// + /// * **Key** - contract address. + /// * **Value** - owner for contract. #[pallet::storage] pub(super) type Owner = StorageMap; @@ -56,10 +83,33 @@ pub mod pallet { pub(super) type SelfSponsoring = StorageMap; + /// Store for contract sponsorship state. + /// + /// * **Key** - contract address. + /// * **Value** - sponsorship state. + #[pallet::storage] + pub(super) type Sponsoring = StorageMap< + Hasher = Twox64Concat, + Key = H160, + Value = SponsorshipState, + QueryKind = ValueQuery, + >; + + /// Store for sponsoring mode. + /// + /// ### Usage + /// Prefer to delete collection from storage if mode chaged to [`Disabled`](SponsoringModeT::Disabled). + /// + /// * **Key** - contract address. + /// * **Value** - [`sponsoring mode`](SponsoringModeT). #[pallet::storage] pub(super) type SponsoringMode = StorageMap; + /// Storage for sponsoring rate limit in blocks. + /// + /// * **Key** - contract address. + /// * **Value** - amount of sponsored blocks. #[pallet::storage] pub(super) type SponsoringRateLimit = StorageMap< Hasher = Twox128, @@ -69,6 +119,19 @@ pub mod pallet { OnEmpty = T::DefaultSponsoringRateLimit, >; + /// Storage for last sponsored block. + /// + /// * **Key1** - contract address. + /// * **Key2** - sponsored user address. + /// * **Value** - last sponsored block number. + #[pallet::storage] + pub(super) type SponsoringFeeLimit = StorageMap< + Hasher = Twox128, + Key = H160, + Value = BoundedBTreeMap>, + QueryKind = ValueQuery, + >; + #[pallet::storage] pub(super) type SponsorBasket = StorageDoubleMap< Hasher1 = Twox128, @@ -79,10 +142,25 @@ pub mod pallet { QueryKind = OptionQuery, >; + /// Storege for contracts with [`Allowlisted`](SponsoringModeT::Allowlisted) sponsoring mode. + /// + /// ### Usage + /// Prefer to delete collection from storage if mode chaged to non `Allowlisted`, than set **Value** to **false**. + /// + /// * **Key** - contract address. + /// * **Value** - is contract in [`Allowlisted`](SponsoringModeT::Allowlisted) mode. #[pallet::storage] pub(super) type AllowlistEnabled = StorageMap; + /// Storage for users that allowed for sponsorship. + /// + /// ### Usage + /// Prefer to delete record from storage if user no more allowed for sponsorship. + /// + /// * **Key1** - contract address. + /// * **Key2** - user that allowed for sponsorship. + /// * **Value** - allowance for sponsorship. #[pallet::storage] pub(super) type Allowlist = StorageDoubleMap< Hasher1 = Twox128, @@ -93,7 +171,182 @@ pub mod pallet { QueryKind = ValueQuery, >; + #[pallet::event] + #[pallet::generate_deposit(pub fn deposit_event)] + pub enum Event { + /// Contract sponsor was set. + ContractSponsorSet( + /// Contract address of the affected collection. + H160, + /// New sponsor address. + T::AccountId, + ), + + /// New sponsor was confirm. + ContractSponsorshipConfirmed( + /// Contract address of the affected collection. + H160, + /// New sponsor address. + T::AccountId, + ), + + /// Collection sponsor was removed. + ContractSponsorRemoved( + /// Contract address of the affected collection. + H160, + ), + } + impl Pallet { + /// Get contract owner. + pub fn contract_owner(contract: H160) -> H160 { + >::get(contract) + } + + /// Set `sponsor` for `contract`. + /// + /// `sender` must be owner of contract. + pub fn set_sponsor( + sender: &T::CrossAccountId, + contract: H160, + sponsor: &T::CrossAccountId, + ) -> DispatchResult { + Pallet::::ensure_owner(contract, *sender.as_eth())?; + Sponsoring::::insert( + contract, + SponsorshipState::::Unconfirmed(sponsor.clone()), + ); + + >::deposit_event(Event::::ContractSponsorSet( + contract, + sponsor.as_sub().clone(), + )); + >::deposit_log( + ContractHelpersEvents::ContractSponsorSet { + contract_address: contract, + sponsor: *sponsor.as_eth(), + } + .to_log(contract), + ); + Ok(()) + } + + /// Force set `sponsor` for `contract`. + /// + /// Differs from `set_sponsor` in that confirmation + /// from the sponsor is not required. + pub fn force_set_sponsor( + contract_address: H160, + sponsor: &T::CrossAccountId, + ) -> DispatchResult { + Sponsoring::::insert( + contract_address, + SponsorshipState::::Confirmed(sponsor.clone()), + ); + + let eth_sponsor = *sponsor.as_eth(); + let sub_sponsor = sponsor.as_sub().clone(); + + >::deposit_event(Event::::ContractSponsorSet( + contract_address, + sub_sponsor.clone(), + )); + >::deposit_log( + ContractHelpersEvents::ContractSponsorSet { + contract_address, + sponsor: eth_sponsor, + } + .to_log(contract_address), + ); + + >::deposit_event(Event::::ContractSponsorshipConfirmed( + contract_address, + sub_sponsor, + )); + >::deposit_log( + ContractHelpersEvents::ContractSponsorshipConfirmed { + contract_address, + sponsor: eth_sponsor, + } + .to_log(contract_address), + ); + + Ok(()) + } + + /// Remove sponsor for `contract`. + /// + /// `sender` must be owner of contract. + pub fn remove_sponsor( + sender: &T::CrossAccountId, + contract_address: H160, + ) -> DispatchResult { + Self::ensure_owner(contract_address, *sender.as_eth())?; + Self::force_remove_sponsor(contract_address) + } + + /// Force remove `sponsor` for `contract`. + /// + /// Differs from `remove_sponsor` in that + /// it doesn't require consent from the `owner` of the contract. + pub fn force_remove_sponsor(contract_address: H160) -> DispatchResult { + Sponsoring::::remove(contract_address); + + Self::deposit_event(Event::::ContractSponsorRemoved(contract_address)); + >::deposit_log( + ContractHelpersEvents::ContractSponsorRemoved { contract_address } + .to_log(contract_address), + ); + + Ok(()) + } + + /// Confirm sponsorship. + /// + /// `sender` must be same that set via [`set_sponsor`]. + pub fn confirm_sponsorship( + sender: &T::CrossAccountId, + contract_address: H160, + ) -> DispatchResult { + match Sponsoring::::get(contract_address) { + SponsorshipState::Unconfirmed(sponsor) => { + ensure!(sponsor == *sender, Error::::NoPermission); + let eth_sponsor = *sponsor.as_eth(); + let sub_sponsor = sponsor.as_sub().clone(); + Sponsoring::::insert( + contract_address, + SponsorshipState::::Confirmed(sponsor), + ); + + >::deposit_event(Event::::ContractSponsorshipConfirmed( + contract_address, + sub_sponsor, + )); + >::deposit_log( + ContractHelpersEvents::ContractSponsorshipConfirmed { + contract_address, + sponsor: eth_sponsor, + } + .to_log(contract_address), + ); + + Ok(()) + } + SponsorshipState::Disabled | SponsorshipState::Confirmed(_) => { + Err(Error::::NoPendingSponsor.into()) + } + } + } + + /// Get sponsor. + pub fn get_sponsor(contract: H160) -> Option { + match Sponsoring::::get(contract) { + SponsorshipState::Disabled | SponsorshipState::Unconfirmed(_) => None, + SponsorshipState::Confirmed(sponsor) => Some(sponsor), + } + } + + /// Get current sponsoring mode, performing lazy migration from legacy storage pub fn sponsoring_mode(contract: H160) -> SponsoringModeT { >::get(contract) .or_else(|| { @@ -101,6 +354,8 @@ pub mod pallet { }) .unwrap_or_default() } + + /// Reconfigure contract sponsoring mode pub fn set_sponsoring_mode(contract: H160, mode: SponsoringModeT) { if mode == SponsoringModeT::Disabled { >::remove(contract); @@ -110,44 +365,53 @@ pub mod pallet { >::remove(contract) } - pub fn toggle_sponsoring(contract: H160, enabled: bool) { - Self::set_sponsoring_mode( - contract, - if enabled { - SponsoringModeT::Allowlisted - } else { - SponsoringModeT::Disabled - }, - ) - } - + /// Set duration between two sponsored contract calls pub fn set_sponsoring_rate_limit(contract: H160, rate_limit: T::BlockNumber) { >::insert(contract, rate_limit); } + /// Set maximum for gas limit of transaction + pub fn set_sponsoring_fee_limit(contract: H160, fee_limit: U256) -> DispatchResult { + >::try_mutate(contract, |limits_map| { + limits_map + .try_insert(0xffffffff, fee_limit) + .map_err(|_| >::TooManyMethodsHaveSponsoredLimit) + })?; + Ok(()) + } + + /// Is user added to allowlist, or he is owner of specified contract pub fn allowed(contract: H160, user: H160) -> bool { >::get(&contract, &user) || >::get(&contract) == user } + /// Toggle contract allowlist access pub fn toggle_allowlist(contract: H160, enabled: bool) { >::insert(contract, enabled) } + /// Toggle user presence in contract's allowlist pub fn toggle_allowed(contract: H160, user: H160, allowed: bool) { >::insert(contract, user, allowed); } - pub fn ensure_owner(contract: H160, user: H160) -> Result<()> { - ensure!(>::get(&contract) == user, "no permission"); + /// Throw error if user is not allowed to reconfigure target contract + pub fn ensure_owner(contract: H160, user: H160) -> DispatchResult { + ensure!(>::get(&contract) == user, Error::::NoPermission); Ok(()) } } } -#[derive(Encode, Decode, PartialEq, TypeInfo, MaxEncodedLen)] +/// Available contract sponsoring modes +#[derive(Encode, Decode, PartialEq, TypeInfo, MaxEncodedLen, Default)] pub enum SponsoringModeT { + /// Sponsoring is disabled + #[default] Disabled, + /// Only users from allowlist will be sponsored Allowlisted, + /// All users will be sponsored Generous, } @@ -168,9 +432,3 @@ impl SponsoringModeT { } } } - -impl Default for SponsoringModeT { - fn default() -> Self { - Self::Disabled - } -} diff --git a/pallets/evm-contract-helpers/src/stubs/ContractHelpers.raw b/pallets/evm-contract-helpers/src/stubs/ContractHelpers.raw index 285ebeb3a6..23965de275 100644 Binary files a/pallets/evm-contract-helpers/src/stubs/ContractHelpers.raw and b/pallets/evm-contract-helpers/src/stubs/ContractHelpers.raw differ diff --git a/pallets/evm-contract-helpers/src/stubs/ContractHelpers.sol b/pallets/evm-contract-helpers/src/stubs/ContractHelpers.sol index 3de92cd40e..78098a6478 100644 --- a/pallets/evm-contract-helpers/src/stubs/ContractHelpers.sol +++ b/pallets/evm-contract-helpers/src/stubs/ContractHelpers.sol @@ -3,61 +3,143 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder contract Dummy { uint8 dummy; string stub_error = "this contract is implemented in native"; } contract ERC165 is Dummy { - function supportsInterface(bytes4 interfaceID) - external - view - returns (bool) - { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { require(false, stub_error); interfaceID; return true; } } -// Selector: 7b4866f9 -contract ContractHelpers is Dummy, ERC165 { - // Selector: contractOwner(address) 5152b14c - function contractOwner(address contractAddress) - public - view - returns (address) - { +/// @dev inlined interface +contract ContractHelpersEvents { + event ContractSponsorSet(address indexed contractAddress, address sponsor); + event ContractSponsorshipConfirmed(address indexed contractAddress, address sponsor); + event ContractSponsorRemoved(address indexed contractAddress); +} + +/// @title Magic contract, which allows users to reconfigure other contracts +/// @dev the ERC-165 identifier for this interface is 0x30afad04 +contract ContractHelpers is Dummy, ERC165, ContractHelpersEvents { + /// Get user, which deployed specified contract + /// @dev May return zero address in case if contract is deployed + /// using uniquenetwork evm-migration pallet, or using other terms not + /// intended by pallet-evm + /// @dev Returns zero address if contract does not exists + /// @param contractAddress Contract to get owner of + /// @return address Owner of contract + /// @dev EVM selector for this function is: 0x5152b14c, + /// or in textual repr: contractOwner(address) + function contractOwner(address contractAddress) public view returns (address) { require(false, stub_error); contractAddress; dummy; return 0x0000000000000000000000000000000000000000; } - // Selector: sponsoringEnabled(address) 6027dc61 - function sponsoringEnabled(address contractAddress) - public - view - returns (bool) - { + /// Set sponsor. + /// @param contractAddress Contract for which a sponsor is being established. + /// @param sponsor User address who set as pending sponsor. + /// @dev EVM selector for this function is: 0xf01fba93, + /// or in textual repr: setSponsor(address,address) + function setSponsor(address contractAddress, address sponsor) public { + require(false, stub_error); + contractAddress; + sponsor; + dummy = 0; + } + + /// Set contract as self sponsored. + /// + /// @param contractAddress Contract for which a self sponsoring is being enabled. + /// @dev EVM selector for this function is: 0x89f7d9ae, + /// or in textual repr: selfSponsoredEnable(address) + function selfSponsoredEnable(address contractAddress) public { + require(false, stub_error); + contractAddress; + dummy = 0; + } + + /// Remove sponsor. + /// + /// @param contractAddress Contract for which a sponsorship is being removed. + /// @dev EVM selector for this function is: 0xef784250, + /// or in textual repr: removeSponsor(address) + function removeSponsor(address contractAddress) public { + require(false, stub_error); + contractAddress; + dummy = 0; + } + + /// Confirm sponsorship. + /// + /// @dev Caller must be same that set via [`setSponsor`]. + /// + /// @param contractAddress Сontract for which need to confirm sponsorship. + /// @dev EVM selector for this function is: 0xabc00001, + /// or in textual repr: confirmSponsorship(address) + function confirmSponsorship(address contractAddress) public { + require(false, stub_error); + contractAddress; + dummy = 0; + } + + /// Get current sponsor. + /// + /// @param contractAddress The contract for which a sponsor is requested. + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x766c4f37, + /// or in textual repr: sponsor(address) + function sponsor(address contractAddress) public view returns (Tuple0 memory) { + require(false, stub_error); + contractAddress; + dummy; + return Tuple0(0x0000000000000000000000000000000000000000, 0); + } + + /// Check tat contract has confirmed sponsor. + /// + /// @param contractAddress The contract for which the presence of a confirmed sponsor is checked. + /// @return **true** if contract has confirmed sponsor. + /// @dev EVM selector for this function is: 0x97418603, + /// or in textual repr: hasSponsor(address) + function hasSponsor(address contractAddress) public view returns (bool) { require(false, stub_error); contractAddress; dummy; return false; } - // Deprecated - // - // Selector: toggleSponsoring(address,bool) fcac6d86 - function toggleSponsoring(address contractAddress, bool enabled) public { + /// Check tat contract has pending sponsor. + /// + /// @param contractAddress The contract for which the presence of a pending sponsor is checked. + /// @return **true** if contract has pending sponsor. + /// @dev EVM selector for this function is: 0x39b9b242, + /// or in textual repr: hasPendingSponsor(address) + function hasPendingSponsor(address contractAddress) public view returns (bool) { require(false, stub_error); contractAddress; - enabled; - dummy = 0; + dummy; + return false; } - // Selector: setSponsoringMode(address,uint8) fde8a560 + /// @dev EVM selector for this function is: 0x6027dc61, + /// or in textual repr: sponsoringEnabled(address) + function sponsoringEnabled(address contractAddress) public view returns (bool) { + require(false, stub_error); + contractAddress; + dummy; + return false; + } + + /// @dev EVM selector for this function is: 0xfde8a560, + /// or in textual repr: setSponsoringMode(address,uint8) function setSponsoringMode(address contractAddress, uint8 mode) public { require(false, stub_error); contractAddress; @@ -65,46 +147,69 @@ contract ContractHelpers is Dummy, ERC165 { dummy = 0; } - // Selector: sponsoringMode(address) b70c7267 - function sponsoringMode(address contractAddress) - public - view - returns (uint8) - { + /// Get current contract sponsoring rate limit + /// @param contractAddress Contract to get sponsoring rate limit of + /// @return uint32 Amount of blocks between two sponsored transactions + /// @dev EVM selector for this function is: 0xf29694d8, + /// or in textual repr: sponsoringRateLimit(address) + function sponsoringRateLimit(address contractAddress) public view returns (uint32) { require(false, stub_error); contractAddress; dummy; return 0; } - // Selector: setSponsoringRateLimit(address,uint32) 77b6c908 - function setSponsoringRateLimit(address contractAddress, uint32 rateLimit) - public - { + /// Set contract sponsoring rate limit + /// @dev Sponsoring rate limit - is a minimum amount of blocks that should + /// pass between two sponsored transactions + /// @param contractAddress Contract to change sponsoring rate limit of + /// @param rateLimit Target rate limit + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x77b6c908, + /// or in textual repr: setSponsoringRateLimit(address,uint32) + function setSponsoringRateLimit(address contractAddress, uint32 rateLimit) public { require(false, stub_error); contractAddress; rateLimit; dummy = 0; } - // Selector: getSponsoringRateLimit(address) 610cfabd - function getSponsoringRateLimit(address contractAddress) - public - view - returns (uint32) - { + /// Set contract sponsoring fee limit + /// @dev Sponsoring fee limit - is maximum fee that could be spent by + /// single transaction + /// @param contractAddress Contract to change sponsoring fee limit of + /// @param feeLimit Fee limit + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x03aed665, + /// or in textual repr: setSponsoringFeeLimit(address,uint256) + function setSponsoringFeeLimit(address contractAddress, uint256 feeLimit) public { + require(false, stub_error); + contractAddress; + feeLimit; + dummy = 0; + } + + /// Get current contract sponsoring fee limit + /// @param contractAddress Contract to get sponsoring fee limit of + /// @return uint256 Maximum amount of fee that could be spent by single + /// transaction + /// @dev EVM selector for this function is: 0x75b73606, + /// or in textual repr: sponsoringFeeLimit(address) + function sponsoringFeeLimit(address contractAddress) public view returns (uint256) { require(false, stub_error); contractAddress; dummy; return 0; } - // Selector: allowed(address,address) 5c658165 - function allowed(address contractAddress, address user) - public - view - returns (bool) - { + /// Is specified user present in contract allow list + /// @dev Contract owner always implicitly included + /// @param contractAddress Contract to check allowlist of + /// @param user User to check + /// @return bool Is specified users exists in contract allowlist + /// @dev EVM selector for this function is: 0x5c658165, + /// or in textual repr: allowed(address,address) + function allowed(address contractAddress, address user) public view returns (bool) { require(false, stub_error); contractAddress; user; @@ -112,36 +217,56 @@ contract ContractHelpers is Dummy, ERC165 { return false; } - // Selector: allowlistEnabled(address) c772ef6c - function allowlistEnabled(address contractAddress) - public - view - returns (bool) - { + /// Toggle user presence in contract allowlist + /// @param contractAddress Contract to change allowlist of + /// @param user Which user presence should be toggled + /// @param isAllowed `true` if user should be allowed to be sponsored + /// or call this contract, `false` otherwise + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x4706cc1c, + /// or in textual repr: toggleAllowed(address,address,bool) + function toggleAllowed( + address contractAddress, + address user, + bool isAllowed + ) public { + require(false, stub_error); + contractAddress; + user; + isAllowed; + dummy = 0; + } + + /// Is this contract has allowlist access enabled + /// @dev Allowlist always can have users, and it is used for two purposes: + /// in case of allowlist sponsoring mode, users will be sponsored if they exist in allowlist + /// in case of allowlist access enabled, only users from allowlist may call this contract + /// @param contractAddress Contract to get allowlist access of + /// @return bool Is specified contract has allowlist access enabled + /// @dev EVM selector for this function is: 0xc772ef6c, + /// or in textual repr: allowlistEnabled(address) + function allowlistEnabled(address contractAddress) public view returns (bool) { require(false, stub_error); contractAddress; dummy; return false; } - // Selector: toggleAllowlist(address,bool) 36de20f5 + /// Toggle contract allowlist access + /// @param contractAddress Contract to change allowlist access of + /// @param enabled Should allowlist access to be enabled? + /// @dev EVM selector for this function is: 0x36de20f5, + /// or in textual repr: toggleAllowlist(address,bool) function toggleAllowlist(address contractAddress, bool enabled) public { require(false, stub_error); contractAddress; enabled; dummy = 0; } +} - // Selector: toggleAllowed(address,address,bool) 4706cc1c - function toggleAllowed( - address contractAddress, - address user, - bool allowed - ) public { - require(false, stub_error); - contractAddress; - user; - allowed; - dummy = 0; - } +/// @dev anonymous struct +struct Tuple0 { + address field_0; + uint256 field_1; } diff --git a/pallets/evm-migration/CHANGELOG.md b/pallets/evm-migration/CHANGELOG.md new file mode 100644 index 0000000000..1e324e9a52 --- /dev/null +++ b/pallets/evm-migration/CHANGELOG.md @@ -0,0 +1,17 @@ + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Remove `#[transactional]` from extrinsics 7fd36cea2f6e00c02c67ccc1de9649ae404efd31 + +Every extrinsic now runs in transaction implicitly, and +`#[transactional]` on pallet dispatchable is now meaningless + +Upstream-Change: https://github.com/paritytech/substrate/issues/10806 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/evm-migration/Cargo.toml b/pallets/evm-migration/Cargo.toml index 7fc998e3bd..9419937983 100644 --- a/pallets/evm-migration/Cargo.toml +++ b/pallets/evm-migration/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-evm-migration" -version = "0.1.0" +version = "0.1.1" license = "GPLv3" edition = "2021" @@ -8,15 +8,15 @@ edition = "2021" scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } [dependencies.codec] default-features = false @@ -38,3 +38,4 @@ std = [ "fp-evm/std", ] runtime-benchmarks = ["frame-benchmarking"] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/evm-migration/README.md b/pallets/evm-migration/README.md new file mode 100644 index 0000000000..ace6de8f0f --- /dev/null +++ b/pallets/evm-migration/README.md @@ -0,0 +1,18 @@ +# EVM contract migration pallet + +This pallet is only callable by root, it has functionality to migrate contract +from other ethereum chain to pallet-evm + +Contract data includes contract code, and contract storage, +where contract storage is a mapping from evm word to evm word (evm word = 32 byte) + +To import contract data into pallet-evm admin should call this pallet multiple times: +1. Start migration via `begin` +2. Insert all contract data using single or + multiple (If data can't be fit into single extrinsic) calls + to `set_data` +3. Finish migration using `finish`, providing contract code + +During migration no one can insert code at address of this contract, +as [`pallet::OnMethodCall`] prevents this, and no one can call this contract, +as code is only supplied at final stage of contract deployment \ No newline at end of file diff --git a/pallets/evm-migration/src/benchmarking.rs b/pallets/evm-migration/src/benchmarking.rs index cfe44d5cc1..247953105e 100644 --- a/pallets/evm-migration/src/benchmarking.rs +++ b/pallets/evm-migration/src/benchmarking.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#![allow(missing_docs)] + use super::{Call, Config, Pallet}; use frame_benchmarking::benchmarks; use frame_system::RawOrigin; diff --git a/pallets/evm-migration/src/lib.rs b/pallets/evm-migration/src/lib.rs index e453a91dbd..795f23f44a 100644 --- a/pallets/evm-migration/src/lib.rs +++ b/pallets/evm-migration/src/lib.rs @@ -14,7 +14,9 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#![doc = include_str!("../README.md")] #![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] pub use pallet::*; #[cfg(feature = "runtime-benchmarks")] @@ -23,7 +25,7 @@ pub mod weights; #[frame_support::pallet] pub mod pallet { - use frame_support::{pallet_prelude::*, transactional}; + use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; use sp_core::{H160, H256}; use sp_std::vec::Vec; @@ -32,6 +34,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config + pallet_evm::Config { + /// Weights type WeightInfo: WeightInfo; } @@ -43,7 +46,9 @@ pub mod pallet { #[pallet::error] pub enum Error { + /// Can only migrate to empty address. AccountNotEmpty, + /// Migration of this account is not yet started, or already finished. AccountIsNotMigrating, } @@ -53,6 +58,8 @@ pub mod pallet { #[pallet::call] impl Pallet { + /// Start contract migration, inserts contract stub at target address, + /// and marks account as pending, allowing to insert storage #[pallet::weight(>::begin())] pub fn begin(origin: OriginFor, address: H160) -> DispatchResult { ensure_root(origin)?; @@ -65,6 +72,8 @@ pub mod pallet { Ok(()) } + /// Insert items into contract storage, this method can be called + /// multiple times #[pallet::weight(>::set_data(data.len() as u32))] pub fn set_data( origin: OriginFor, @@ -83,8 +92,10 @@ pub mod pallet { Ok(()) } + /// Finish contract migration, allows it to be called. + /// It is not possible to alter contract storage via [`Self::set_data`] + /// after this call. #[pallet::weight(>::finish(code.len() as u32))] - #[transactional] pub fn finish(origin: OriginFor, address: H160, code: Vec) -> DispatchResult { ensure_root(origin)?; ensure!( @@ -98,6 +109,7 @@ pub mod pallet { } } + /// Implements [`pallet_evm::OnMethodCall`], which reserves accounts with pending migration pub struct OnMethodCall(PhantomData); impl pallet_evm::OnMethodCall for OnMethodCall { fn is_reserved(contract: &H160) -> bool { diff --git a/pallets/evm-migration/src/weights.rs b/pallets/evm-migration/src/weights.rs index af58744a75..5002d29546 100644 --- a/pallets/evm-migration/src/weights.rs +++ b/pallets/evm-migration/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_evm_migration //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-15, STEPS: `50`, REPEAT: 200, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -19,13 +19,14 @@ // --template // .maintain/frame-weight-template.hbs // --steps=50 -// --repeat=200 +// --repeat=80 // --heap-pages=4096 // --output=./pallets/evm-migration/src/weights.rs #![cfg_attr(rustfmt, rustfmt_skip)] #![allow(unused_parens)] #![allow(unused_imports)] +#![allow(missing_docs)] #![allow(clippy::unnecessary_cast)] use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; @@ -45,27 +46,25 @@ impl WeightInfo for SubstrateWeight { // Storage: System Account (r:1 w:0) // Storage: EVM AccountCodes (r:1 w:0) fn begin() -> Weight { - (6_914_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(8_035_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: EvmMigration MigrationPending (r:1 w:0) // Storage: EVM AccountStorages (r:0 w:1) fn set_data(b: u32, ) -> Weight { - (2_875_000 as Weight) + Weight::from_ref_time(3_076_000) // Standard Error: 0 - .saturating_add((794_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + .saturating_add(Weight::from_ref_time(828_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: EvmMigration MigrationPending (r:1 w:1) // Storage: EVM AccountCodes (r:0 w:1) - fn finish(b: u32, ) -> Weight { - (6_320_000 as Weight) - // Standard Error: 0 - .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + fn finish(_b: u32, ) -> Weight { + Weight::from_ref_time(6_591_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } } @@ -75,26 +74,24 @@ impl WeightInfo for () { // Storage: System Account (r:1 w:0) // Storage: EVM AccountCodes (r:1 w:0) fn begin() -> Weight { - (6_914_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(8_035_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: EvmMigration MigrationPending (r:1 w:0) // Storage: EVM AccountStorages (r:0 w:1) fn set_data(b: u32, ) -> Weight { - (2_875_000 as Weight) + Weight::from_ref_time(3_076_000) // Standard Error: 0 - .saturating_add((794_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + .saturating_add(Weight::from_ref_time(828_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: EvmMigration MigrationPending (r:1 w:1) // Storage: EVM AccountCodes (r:0 w:1) - fn finish(b: u32, ) -> Weight { - (6_320_000 as Weight) - // Standard Error: 0 - .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + fn finish(_b: u32, ) -> Weight { + Weight::from_ref_time(6_591_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } } diff --git a/pallets/evm-transaction-payment/CHANGELOG.md b/pallets/evm-transaction-payment/CHANGELOG.md new file mode 100644 index 0000000000..732ab15dc7 --- /dev/null +++ b/pallets/evm-transaction-payment/CHANGELOG.md @@ -0,0 +1,10 @@ + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/evm-transaction-payment/Cargo.toml b/pallets/evm-transaction-payment/Cargo.toml index 7f352bb20e..7f7bae15ce 100644 --- a/pallets/evm-transaction-payment/Cargo.toml +++ b/pallets/evm-transaction-payment/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-evm-transaction-payment" -version = "0.1.0" +version = "0.1.1" license = "GPLv3" edition = "2021" @@ -8,17 +8,17 @@ edition = "2021" scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24" } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } [dependencies.codec] default-features = false @@ -41,3 +41,4 @@ std = [ "up-sponsorship/std", "fp-evm-mapping/std", ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/evm-transaction-payment/README.md b/pallets/evm-transaction-payment/README.md new file mode 100644 index 0000000000..6ecc640857 --- /dev/null +++ b/pallets/evm-transaction-payment/README.md @@ -0,0 +1,5 @@ +# Evm transaction payment pallet + +pallet-evm-transaction-payment is a bridge between pallet-evm substrate calls and pallet-sponsoring. +It doesn't provide any sponsoring logic by itself, instead all sponsoring handlers +are loosly coupled via [`Config::EvmSponsorshipHandler`] trait. \ No newline at end of file diff --git a/pallets/evm-transaction-payment/src/lib.rs b/pallets/evm-transaction-payment/src/lib.rs index 1343a0948a..5aca441ba2 100644 --- a/pallets/evm-transaction-payment/src/lib.rs +++ b/pallets/evm-transaction-payment/src/lib.rs @@ -14,14 +14,16 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#![doc = include_str!("../README.md")] #![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] use core::marker::PhantomData; use fp_evm::WithdrawReason; use frame_support::traits::IsSubType; pub use pallet::*; -use pallet_evm::{EnsureAddressOrigin, account::CrossAccountId}; -use sp_core::H160; +use pallet_evm::{account::CrossAccountId, EnsureAddressOrigin}; +use sp_core::{H160, U256}; use sp_runtime::{TransactionOutcome, DispatchError}; use up_sponsorship::SponsorshipHandler; @@ -29,13 +31,22 @@ use up_sponsorship::SponsorshipHandler; pub mod pallet { use super::*; - use frame_support::traits::Currency; use sp_std::vec::Vec; + /// Contains call data + pub struct CallContext { + /// Contract address + pub contract_address: H160, + /// Transaction data + pub input: Vec, + /// Max fee for transaction - gasLimit * gasPrice + pub max_fee: U256, + } + #[pallet::config] pub trait Config: frame_system::Config + pallet_evm::account::Config { - type EvmSponsorshipHandler: SponsorshipHandler)>; - type Currency: Currency; + /// Loosly-coupled handlers for evm call sponsoring + type EvmSponsorshipHandler: SponsorshipHandler; } #[pallet::pallet] @@ -43,13 +54,23 @@ pub mod pallet { pub struct Pallet(_); } +/// Implements [`fp_evm::TransactionValidityHack`], which provides sponsor address to pallet-evm pub struct TransactionValidityHack(PhantomData<*const T>); impl fp_evm::TransactionValidityHack for TransactionValidityHack { - fn who_pays_fee(origin: H160, reason: &WithdrawReason) -> Option { + fn who_pays_fee( + origin: H160, + max_fee: U256, + reason: &WithdrawReason, + ) -> Option { match reason { WithdrawReason::Call { target, input } => { let origin_sub = T::CrossAccountId::from_eth(origin); - T::EvmSponsorshipHandler::get_sponsor(&origin_sub, &(*target, input.clone())) + let call_context = CallContext { + contract_address: *target, + input: input.clone(), + max_fee, + }; + T::EvmSponsorshipHandler::get_sponsor(&origin_sub, &call_context) } _ => None, } @@ -69,6 +90,8 @@ where source, target, input, + gas_limit, + max_fee_per_gas, .. } => { let _ = T::CallOrigin::ensure_address_origin( @@ -77,11 +100,17 @@ where ) .ok()?; let who = T::CrossAccountId::from_sub(who.clone()); + let max_fee = max_fee_per_gas.saturating_mul((*gas_limit).into()); + let call_context = CallContext { + contract_address: *target, + input: input.clone(), + max_fee, + }; // Effects from EvmSponsorshipHandler are applied by pallet_evm::runner // TODO: Should we implement simulation mode (test, but do not apply effects) in `up-sponsorship`? let sponsor = frame_support::storage::with_transaction(|| { TransactionOutcome::Rollback(Ok::<_, DispatchError>( - T::EvmSponsorshipHandler::get_sponsor(&who, &(*target, input.clone())), + T::EvmSponsorshipHandler::get_sponsor(&who, &call_context), )) }) // FIXME: it may fail with DispatchError in case of depth limit diff --git a/pallets/foreign-assets/Cargo.toml b/pallets/foreign-assets/Cargo.toml new file mode 100644 index 0000000000..a7362bc5d3 --- /dev/null +++ b/pallets/foreign-assets/Cargo.toml @@ -0,0 +1,56 @@ +cargo-features = ["workspace-inheritance"] + +[package] +name = "pallet-foreign-assets" +version = "0.1.0" +license = "GPLv3" +edition = "2021" + +[dependencies] +log = { version = "0.4.16", default-features = false } +serde = { version = "1.0.136", optional = true } +scale-info = { version = "2.0.1", default-features = false, features = [ + "derive", +] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false } +sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30", default-features = false } +sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30", default-features = false } +frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30", default-features = false } +frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30", default-features = false } +up-data-structs = { default-features = false, path = '../../primitives/data-structs' } +pallet-balances = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30", default-features = false } +pallet-common = { default-features = false, path = '../common' } +pallet-fungible = { default-features = false, path = '../fungible' } +xcm = { git = "https://github.com/paritytech/polkadot", branch = "release-v0.9.30", default-features = false } +xcm-builder = { git = "https://github.com/paritytech/polkadot", branch = "release-v0.9.30", default-features = false } +xcm-executor = { git = "https://github.com/paritytech/polkadot", branch = "release-v0.9.30", default-features = false } +orml-tokens.workspace = true +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } + +[dev-dependencies] +serde_json = "1.0.68" +hex = { version = "0.4" } +sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-timestamp = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-balances = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } + +[features] +default = ["std"] +std = [ + "serde", + "log/std", + "codec/std", + "scale-info/std", + "sp-runtime/std", + "sp-std/std", + "frame-support/std", + "frame-system/std", + "up-data-structs/std", + "pallet-common/std", + "pallet-balances/std", + "pallet-fungible/std", + "orml-tokens/std", +] +try-runtime = ["frame-support/try-runtime"] +runtime-benchmarks = ['frame-benchmarking', 'pallet-common/runtime-benchmarks'] diff --git a/pallets/foreign-assets/src/benchmarking.rs b/pallets/foreign-assets/src/benchmarking.rs new file mode 100644 index 0000000000..1317ed2f0d --- /dev/null +++ b/pallets/foreign-assets/src/benchmarking.rs @@ -0,0 +1,68 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#![allow(missing_docs)] + +use super::{Config, Pallet, Call}; +use frame_benchmarking::{benchmarks, account}; +use frame_system::RawOrigin; +use crate::AssetMetadata; +use xcm::opaque::latest::Junction::Parachain; +use xcm::VersionedMultiLocation; +use frame_support::{ + traits::{Currency}, +}; +use sp_std::boxed::Box; + +benchmarks! { + register_foreign_asset { + let owner: T::AccountId = account("user", 0, 1); + let location: VersionedMultiLocation = VersionedMultiLocation::from(Parachain(1000).into()); + let metadata: AssetMetadata<<::Currency as Currency<::AccountId>>::Balance> = AssetMetadata{ + name: "name".into(), + symbol: "symbol".into(), + decimals: 18, + minimal_balance: 1u32.into() + }; + let mut balance: <::Currency as Currency<::AccountId>>::Balance = + 4_000_000_000u32.into(); + balance = balance * balance; + ::Currency::make_free_balance_be(&owner, + balance); + }: _(RawOrigin::Root, owner, Box::new(location), Box::new(metadata)) + + update_foreign_asset { + let owner: T::AccountId = account("user", 0, 1); + let location: VersionedMultiLocation = VersionedMultiLocation::from(Parachain(2000).into()); + let metadata: AssetMetadata<<::Currency as Currency<::AccountId>>::Balance> = AssetMetadata{ + name: "name".into(), + symbol: "symbol".into(), + decimals: 18, + minimal_balance: 1u32.into() + }; + let metadata2: AssetMetadata<<::Currency as Currency<::AccountId>>::Balance> = AssetMetadata{ + name: "name2".into(), + symbol: "symbol2".into(), + decimals: 18, + minimal_balance: 1u32.into() + }; + let mut balance: <::Currency as Currency<::AccountId>>::Balance = + 4_000_000_000u32.into(); + balance = balance * balance; + ::Currency::make_free_balance_be(&owner, balance); + Pallet::::register_foreign_asset(RawOrigin::Root.into(), owner, Box::new(location.clone()), Box::new(metadata))?; + }: _(RawOrigin::Root, 0, Box::new(location), Box::new(metadata2)) +} diff --git a/pallets/foreign-assets/src/impl_fungibles.rs b/pallets/foreign-assets/src/impl_fungibles.rs new file mode 100644 index 0000000000..6980ff1f61 --- /dev/null +++ b/pallets/foreign-assets/src/impl_fungibles.rs @@ -0,0 +1,454 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! Implementations for fungibles trait. + +use super::*; +use frame_system::Config as SystemConfig; + +use frame_support::traits::tokens::{DepositConsequence, WithdrawConsequence}; +use pallet_common::CollectionHandle; +use pallet_fungible::FungibleHandle; +use pallet_common::CommonCollectionOperations; +use up_data_structs::budget::Value; +use sp_runtime::traits::{CheckedAdd, CheckedSub}; + +impl fungibles::Inspect<::AccountId> for Pallet +where + T: orml_tokens::Config, + BalanceOf: From<::Balance>, + BalanceOf: From<::Balance>, + ::Balance: From>, + ::Balance: From>, +{ + type AssetId = AssetIds; + type Balance = BalanceOf; + + fn total_issuance(asset: Self::AssetId) -> Self::Balance { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible total_issuance"); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Inspect>::total_issuance() + .into() + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Inspect>::total_issuance( + AssetIds::NativeAssetId(NativeCurrency::Parent), + ) + .into() + } + AssetIds::ForeignAssetId(fid) => { + let target_collection_id = match >::get(fid) { + Some(v) => v, + None => return Zero::zero(), + }; + let collection_handle = match >::try_get(target_collection_id) { + Ok(v) => v, + Err(_) => return Zero::zero(), + }; + let collection = FungibleHandle::cast(collection_handle); + Self::Balance::try_from(collection.total_supply()).unwrap_or(Zero::zero()) + } + } + } + + fn minimum_balance(asset: Self::AssetId) -> Self::Balance { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible minimum_balance"); + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Inspect>::minimum_balance() + .into() + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Inspect>::minimum_balance( + AssetIds::NativeAssetId(NativeCurrency::Parent), + ) + .into() + } + AssetIds::ForeignAssetId(fid) => { + AssetMetadatas::::get(AssetIds::ForeignAssetId(fid)) + .map(|x| x.minimal_balance) + .unwrap_or_else(Zero::zero) + } + } + } + + fn balance(asset: Self::AssetId, who: &::AccountId) -> Self::Balance { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible balance"); + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Inspect>::balance(who).into() + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Inspect>::balance( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + ) + .into() + } + AssetIds::ForeignAssetId(fid) => { + let target_collection_id = match >::get(fid) { + Some(v) => v, + None => return Zero::zero(), + }; + let collection_handle = match >::try_get(target_collection_id) { + Ok(v) => v, + Err(_) => return Zero::zero(), + }; + let collection = FungibleHandle::cast(collection_handle); + Self::Balance::try_from( + collection.balance(T::CrossAccountId::from_sub(who.clone()), TokenId(0)), + ) + .unwrap_or(Zero::zero()) + } + } + } + + fn reducible_balance( + asset: Self::AssetId, + who: &::AccountId, + keep_alive: bool, + ) -> Self::Balance { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible reducible_balance"); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Inspect>::reducible_balance( + who, keep_alive, + ) + .into() + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Inspect>::reducible_balance( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + keep_alive, + ) + .into() + } + _ => Self::balance(asset, who), + } + } + + fn can_deposit( + asset: Self::AssetId, + who: &::AccountId, + amount: Self::Balance, + mint: bool, + ) -> DepositConsequence { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible can_deposit"); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Inspect>::can_deposit( + who, + amount.into(), + mint, + ) + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Inspect>::can_deposit( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + amount.into(), + mint, + ) + } + _ => { + if amount.is_zero() { + return DepositConsequence::Success; + } + + let extential_deposit_value = T::ExistentialDeposit::get(); + let ed_value: u128 = match extential_deposit_value.try_into() { + Ok(val) => val, + Err(_) => return DepositConsequence::CannotCreate, + }; + let extential_deposit: Self::Balance = match ed_value.try_into() { + Ok(val) => val, + Err(_) => return DepositConsequence::CannotCreate, + }; + + let new_total_balance = match Self::balance(asset, who).checked_add(&amount) { + Some(x) => x, + None => return DepositConsequence::Overflow, + }; + + if new_total_balance < extential_deposit { + return DepositConsequence::BelowMinimum; + } + + DepositConsequence::Success + } + } + } + + fn can_withdraw( + asset: Self::AssetId, + who: &::AccountId, + amount: Self::Balance, + ) -> WithdrawConsequence { + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible can_withdraw"); + let value: u128 = match amount.try_into() { + Ok(val) => val, + Err(_) => return WithdrawConsequence::UnknownAsset, + }; + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + let this_amount: ::Balance = match value.try_into() { + Ok(val) => val, + Err(_) => { + return WithdrawConsequence::UnknownAsset; + } + }; + match as fungible::Inspect>::can_withdraw( + who, + this_amount, + ) { + WithdrawConsequence::NoFunds => WithdrawConsequence::NoFunds, + WithdrawConsequence::WouldDie => WithdrawConsequence::WouldDie, + WithdrawConsequence::UnknownAsset => WithdrawConsequence::UnknownAsset, + WithdrawConsequence::Underflow => WithdrawConsequence::Underflow, + WithdrawConsequence::Overflow => WithdrawConsequence::Overflow, + WithdrawConsequence::Frozen => WithdrawConsequence::Frozen, + WithdrawConsequence::Success => WithdrawConsequence::Success, + _ => WithdrawConsequence::NoFunds, + } + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + let parent_amount: ::Balance = match value.try_into() { + Ok(val) => val, + Err(_) => { + return WithdrawConsequence::UnknownAsset; + } + }; + match as fungibles::Inspect>::can_withdraw( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + parent_amount, + ) { + WithdrawConsequence::NoFunds => WithdrawConsequence::NoFunds, + WithdrawConsequence::WouldDie => WithdrawConsequence::WouldDie, + WithdrawConsequence::UnknownAsset => WithdrawConsequence::UnknownAsset, + WithdrawConsequence::Underflow => WithdrawConsequence::Underflow, + WithdrawConsequence::Overflow => WithdrawConsequence::Overflow, + WithdrawConsequence::Frozen => WithdrawConsequence::Frozen, + WithdrawConsequence::Success => WithdrawConsequence::Success, + _ => WithdrawConsequence::NoFunds, + } + } + _ => match Self::balance(asset, who).checked_sub(&amount) { + Some(_) => WithdrawConsequence::Success, + None => WithdrawConsequence::NoFunds, + }, + } + } +} + +impl fungibles::Mutate<::AccountId> for Pallet +where + T: orml_tokens::Config, + BalanceOf: From<::Balance>, + BalanceOf: From<::Balance>, + ::Balance: From>, + ::Balance: From>, + u128: From>, +{ + fn mint_into( + asset: Self::AssetId, + who: &::AccountId, + amount: Self::Balance, + ) -> DispatchResult { + //Self::do_mint(asset, who, amount, None) + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible mint_into {:?}", asset); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + as fungible::Mutate>::mint_into( + who, + amount.into(), + ) + .into() + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + as fungibles::Mutate>::mint_into( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + amount.into(), + ) + .into() + } + AssetIds::ForeignAssetId(fid) => { + let target_collection_id = match >::get(fid) { + Some(v) => v, + None => { + return Err(DispatchError::Other( + "Associated collection not found for asset", + )) + } + }; + let collection = + FungibleHandle::cast(>::try_get(target_collection_id)?); + let account = T::CrossAccountId::from_sub(who.clone()); + + let amount_data: pallet_fungible::CreateItemData = + (account.clone(), amount.into()); + + pallet_fungible::Pallet::::create_item_foreign( + &collection, + &account, + amount_data, + &Value::new(0), + )?; + + Ok(()) + } + } + } + + fn burn_from( + asset: Self::AssetId, + who: &::AccountId, + amount: Self::Balance, + ) -> Result { + // let f = DebitFlags { keep_alive: false, best_effort: false }; + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible burn_from"); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + match as fungible::Mutate>::burn_from( + who, + amount.into(), + ) { + Ok(v) => Ok(v.into()), + Err(e) => Err(e), + } + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + match as fungibles::Mutate>::burn_from( + AssetIds::NativeAssetId(NativeCurrency::Parent), + who, + amount.into(), + ) { + Ok(v) => Ok(v.into()), + Err(e) => Err(e), + } + } + AssetIds::ForeignAssetId(fid) => { + let target_collection_id = match >::get(fid) { + Some(v) => v, + None => { + return Err(DispatchError::Other( + "Associated collection not found for asset", + )) + } + }; + let collection = + FungibleHandle::cast(>::try_get(target_collection_id)?); + pallet_fungible::Pallet::::burn_foreign( + &collection, + &T::CrossAccountId::from_sub(who.clone()), + amount.into(), + )?; + + Ok(amount) + } + } + } + + fn slash( + asset: Self::AssetId, + who: &::AccountId, + amount: Self::Balance, + ) -> Result { + // let f = DebitFlags { keep_alive: false, best_effort: true }; + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible slash"); + Ok(Self::burn_from(asset, who, amount)?) + } +} + +impl fungibles::Transfer for Pallet +where + T: orml_tokens::Config, + BalanceOf: From<::Balance>, + BalanceOf: From<::Balance>, + ::Balance: From>, + ::Balance: From>, + u128: From>, +{ + fn transfer( + asset: Self::AssetId, + source: &::AccountId, + dest: &::AccountId, + amount: Self::Balance, + keep_alive: bool, + ) -> Result { + // let f = TransferFlags { keep_alive, best_effort: false, burn_dust: false }; + log::trace!(target: "fassets::impl_foreign_assets", "impl_fungible transfer"); + + match asset { + AssetIds::NativeAssetId(NativeCurrency::Here) => { + match as fungible::Transfer>::transfer( + source, + dest, + amount.into(), + keep_alive, + ) { + Ok(_) => Ok(amount), + Err(_) => Err(DispatchError::Other( + "Bad amount to relay chain value conversion", + )), + } + } + AssetIds::NativeAssetId(NativeCurrency::Parent) => { + match as fungibles::Transfer>::transfer( + AssetIds::NativeAssetId(NativeCurrency::Parent), + source, + dest, + amount.into(), + keep_alive, + ) { + Ok(_) => Ok(amount), + Err(e) => Err(e), + } + } + AssetIds::ForeignAssetId(fid) => { + let target_collection_id = match >::get(fid) { + Some(v) => v, + None => { + return Err(DispatchError::Other( + "Associated collection not found for asset", + )) + } + }; + let collection = + FungibleHandle::cast(>::try_get(target_collection_id)?); + + pallet_fungible::Pallet::::transfer( + &collection, + &T::CrossAccountId::from_sub(source.clone()), + &T::CrossAccountId::from_sub(dest.clone()), + amount.into(), + &Value::new(0), + )?; + + Ok(amount) + } + } + } +} diff --git a/pallets/foreign-assets/src/lib.rs b/pallets/foreign-assets/src/lib.rs new file mode 100644 index 0000000000..8021267186 --- /dev/null +++ b/pallets/foreign-assets/src/lib.rs @@ -0,0 +1,496 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! # Foreign assets +//! +//! - [`Config`] +//! - [`Call`] +//! - [`Pallet`] +//! +//! ## Overview +//! +//! The foreign assests pallet provides functions for: +//! +//! - Local and foreign assets management. The foreign assets can be updated without runtime upgrade. +//! - Bounds between asset and target collection for cross chain transfer and inner transfers. +//! +//! ## Overview +//! +//! Under construction + +#![cfg_attr(not(feature = "std"), no_std)] +#![allow(clippy::unused_unit)] + +use frame_support::{ + dispatch::DispatchResult, + ensure, + pallet_prelude::*, + traits::{fungible, fungibles, Currency, EnsureOrigin}, + RuntimeDebug, +}; +use frame_system::pallet_prelude::*; +use up_data_structs::{CollectionMode}; +use pallet_fungible::{Pallet as PalletFungible}; +use scale_info::{TypeInfo}; +use sp_runtime::{ + traits::{One, Zero}, + ArithmeticError, +}; +use sp_std::{boxed::Box, vec::Vec}; +use up_data_structs::{CollectionId, TokenId, CreateCollectionData}; + +// NOTE:v1::MultiLocation is used in storages, we would need to do migration if upgrade the +// MultiLocation in the future. +use xcm::opaque::latest::{prelude::XcmError, Weight}; +use xcm::{v1::MultiLocation, VersionedMultiLocation}; +use xcm_executor::{traits::WeightTrader, Assets}; + +use pallet_common::erc::CrossAccountId; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +// TODO: Move to primitives +// Id of native currency. +// 0 - QTZ\UNQ +// 1 - KSM\DOT +#[derive( + Clone, + Copy, + Eq, + PartialEq, + PartialOrd, + Ord, + MaxEncodedLen, + RuntimeDebug, + Encode, + Decode, + TypeInfo, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub enum NativeCurrency { + Here = 0, + Parent = 1, +} + +#[derive( + Clone, + Copy, + Eq, + PartialEq, + PartialOrd, + Ord, + MaxEncodedLen, + RuntimeDebug, + Encode, + Decode, + TypeInfo, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub enum AssetIds { + ForeignAssetId(ForeignAssetId), + NativeAssetId(NativeCurrency), +} + +pub trait TryAsForeign { + fn try_as_foreign(asset: T) -> Option; +} + +impl TryAsForeign for AssetIds { + fn try_as_foreign(asset: AssetIds) -> Option { + match asset { + AssetIds::ForeignAssetId(id) => Some(id), + _ => None, + } + } +} + +pub type ForeignAssetId = u32; +pub type CurrencyId = AssetIds; + +mod impl_fungibles; +pub mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +pub use module::*; +pub use weights::WeightInfo; + +/// Type alias for currency balance. +pub type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; + +/// A mapping between ForeignAssetId and AssetMetadata. +pub trait AssetIdMapping { + /// Returns the AssetMetadata associated with a given ForeignAssetId. + fn get_asset_metadata(foreign_asset_id: ForeignAssetId) -> Option; + /// Returns the MultiLocation associated with a given ForeignAssetId. + fn get_multi_location(foreign_asset_id: ForeignAssetId) -> Option; + /// Returns the CurrencyId associated with a given MultiLocation. + fn get_currency_id(multi_location: MultiLocation) -> Option; +} + +pub struct XcmForeignAssetIdMapping(sp_std::marker::PhantomData); + +impl AssetIdMapping>> + for XcmForeignAssetIdMapping +{ + fn get_asset_metadata(foreign_asset_id: ForeignAssetId) -> Option>> { + log::trace!(target: "fassets::asset_metadatas", "call"); + Pallet::::asset_metadatas(AssetIds::ForeignAssetId(foreign_asset_id)) + } + + fn get_multi_location(foreign_asset_id: ForeignAssetId) -> Option { + log::trace!(target: "fassets::get_multi_location", "call"); + Pallet::::foreign_asset_locations(foreign_asset_id) + } + + fn get_currency_id(multi_location: MultiLocation) -> Option { + log::trace!(target: "fassets::get_currency_id", "call"); + Some(AssetIds::ForeignAssetId( + Pallet::::location_to_currency_ids(multi_location).unwrap_or(0), + )) + } +} + +#[frame_support::pallet] +pub mod module { + use super::*; + + #[pallet::config] + pub trait Config: + frame_system::Config + + pallet_common::Config + + pallet_fungible::Config + + orml_tokens::Config + + pallet_balances::Config + { + /// The overarching event type. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + + /// Currency type for withdraw and balance storage. + type Currency: Currency; + + /// Required origin for registering asset. + type RegisterOrigin: EnsureOrigin; + + /// Weight information for the extrinsics in this module. + type WeightInfo: WeightInfo; + } + + #[derive(Clone, Eq, PartialEq, RuntimeDebug, Encode, Decode, TypeInfo)] + pub struct AssetMetadata { + pub name: Vec, + pub symbol: Vec, + pub decimals: u8, + pub minimal_balance: Balance, + } + + #[pallet::error] + pub enum Error { + /// The given location could not be used (e.g. because it cannot be expressed in the + /// desired version of XCM). + BadLocation, + /// MultiLocation existed + MultiLocationExisted, + /// AssetId not exists + AssetIdNotExists, + /// AssetId exists + AssetIdExisted, + } + + #[pallet::event] + #[pallet::generate_deposit(fn deposit_event)] + pub enum Event { + /// The foreign asset registered. + ForeignAssetRegistered { + asset_id: ForeignAssetId, + asset_address: MultiLocation, + metadata: AssetMetadata>, + }, + /// The foreign asset updated. + ForeignAssetUpdated { + asset_id: ForeignAssetId, + asset_address: MultiLocation, + metadata: AssetMetadata>, + }, + /// The asset registered. + AssetRegistered { + asset_id: AssetIds, + metadata: AssetMetadata>, + }, + /// The asset updated. + AssetUpdated { + asset_id: AssetIds, + metadata: AssetMetadata>, + }, + } + + /// Next available Foreign AssetId ID. + /// + /// NextForeignAssetId: ForeignAssetId + #[pallet::storage] + #[pallet::getter(fn next_foreign_asset_id)] + pub type NextForeignAssetId = StorageValue<_, ForeignAssetId, ValueQuery>; + /// The storages for MultiLocations. + /// + /// ForeignAssetLocations: map ForeignAssetId => Option + #[pallet::storage] + #[pallet::getter(fn foreign_asset_locations)] + pub type ForeignAssetLocations = + StorageMap<_, Twox64Concat, ForeignAssetId, MultiLocation, OptionQuery>; + + /// The storages for CurrencyIds. + /// + /// LocationToCurrencyIds: map MultiLocation => Option + #[pallet::storage] + #[pallet::getter(fn location_to_currency_ids)] + pub type LocationToCurrencyIds = + StorageMap<_, Twox64Concat, MultiLocation, ForeignAssetId, OptionQuery>; + + /// The storages for AssetMetadatas. + /// + /// AssetMetadatas: map AssetIds => Option + #[pallet::storage] + #[pallet::getter(fn asset_metadatas)] + pub type AssetMetadatas = + StorageMap<_, Twox64Concat, AssetIds, AssetMetadata>, OptionQuery>; + + /// The storages for assets to fungible collection binding + /// + #[pallet::storage] + #[pallet::getter(fn asset_binding)] + pub type AssetBinding = + StorageMap<_, Twox64Concat, ForeignAssetId, CollectionId, OptionQuery>; + + #[pallet::pallet] + #[pallet::without_storage_info] + pub struct Pallet(_); + + #[pallet::call] + impl Pallet { + #[pallet::weight(::WeightInfo::register_foreign_asset())] + pub fn register_foreign_asset( + origin: OriginFor, + owner: T::AccountId, + location: Box, + metadata: Box>>, + ) -> DispatchResult { + T::RegisterOrigin::ensure_origin(origin.clone())?; + + let location: MultiLocation = (*location) + .try_into() + .map_err(|()| Error::::BadLocation)?; + + let md = metadata.clone(); + let name: Vec = md.name.into_iter().map(|x| x as u16).collect::>(); + let mut description: Vec = "Foreign assets collection for " + .encode_utf16() + .collect::>(); + description.append(&mut name.clone()); + + let data: CreateCollectionData = CreateCollectionData { + name: name.try_into().unwrap(), + description: description.try_into().unwrap(), + mode: CollectionMode::Fungible(md.decimals), + ..Default::default() + }; + let owner = T::CrossAccountId::from_sub(owner); + let bounded_collection_id = + >::init_foreign_collection(owner.clone(), owner, data)?; + let foreign_asset_id = + Self::do_register_foreign_asset(&location, &metadata, bounded_collection_id)?; + + Self::deposit_event(Event::::ForeignAssetRegistered { + asset_id: foreign_asset_id, + asset_address: location, + metadata: *metadata, + }); + Ok(()) + } + + #[pallet::weight(::WeightInfo::update_foreign_asset())] + pub fn update_foreign_asset( + origin: OriginFor, + foreign_asset_id: ForeignAssetId, + location: Box, + metadata: Box>>, + ) -> DispatchResult { + T::RegisterOrigin::ensure_origin(origin)?; + + let location: MultiLocation = (*location) + .try_into() + .map_err(|()| Error::::BadLocation)?; + Self::do_update_foreign_asset(foreign_asset_id, &location, &metadata)?; + + Self::deposit_event(Event::::ForeignAssetUpdated { + asset_id: foreign_asset_id, + asset_address: location, + metadata: *metadata, + }); + Ok(()) + } + } +} + +impl Pallet { + fn get_next_foreign_asset_id() -> Result { + NextForeignAssetId::::try_mutate(|current| -> Result { + let id = *current; + *current = current + .checked_add(One::one()) + .ok_or(ArithmeticError::Overflow)?; + Ok(id) + }) + } + + fn do_register_foreign_asset( + location: &MultiLocation, + metadata: &AssetMetadata>, + bounded_collection_id: CollectionId, + ) -> Result { + let foreign_asset_id = Self::get_next_foreign_asset_id()?; + LocationToCurrencyIds::::try_mutate(location, |maybe_currency_ids| -> DispatchResult { + ensure!( + maybe_currency_ids.is_none(), + Error::::MultiLocationExisted + ); + *maybe_currency_ids = Some(foreign_asset_id); + // *maybe_currency_ids = Some(CurrencyId::ForeignAsset(foreign_asset_id)); + + ForeignAssetLocations::::try_mutate( + foreign_asset_id, + |maybe_location| -> DispatchResult { + ensure!(maybe_location.is_none(), Error::::MultiLocationExisted); + *maybe_location = Some(location.clone()); + + AssetMetadatas::::try_mutate( + AssetIds::ForeignAssetId(foreign_asset_id), + |maybe_asset_metadatas| -> DispatchResult { + ensure!(maybe_asset_metadatas.is_none(), Error::::AssetIdExisted); + *maybe_asset_metadatas = Some(metadata.clone()); + Ok(()) + }, + ) + }, + )?; + + AssetBinding::::try_mutate(foreign_asset_id, |collection_id| -> DispatchResult { + *collection_id = Some(bounded_collection_id); + Ok(()) + }) + })?; + + Ok(foreign_asset_id) + } + + fn do_update_foreign_asset( + foreign_asset_id: ForeignAssetId, + location: &MultiLocation, + metadata: &AssetMetadata>, + ) -> DispatchResult { + ForeignAssetLocations::::try_mutate( + foreign_asset_id, + |maybe_multi_locations| -> DispatchResult { + let old_multi_locations = maybe_multi_locations + .as_mut() + .ok_or(Error::::AssetIdNotExists)?; + + AssetMetadatas::::try_mutate( + AssetIds::ForeignAssetId(foreign_asset_id), + |maybe_asset_metadatas| -> DispatchResult { + ensure!( + maybe_asset_metadatas.is_some(), + Error::::AssetIdNotExists + ); + + // modify location + if location != old_multi_locations { + LocationToCurrencyIds::::remove(old_multi_locations.clone()); + LocationToCurrencyIds::::try_mutate( + location, + |maybe_currency_ids| -> DispatchResult { + ensure!( + maybe_currency_ids.is_none(), + Error::::MultiLocationExisted + ); + // *maybe_currency_ids = Some(CurrencyId::ForeignAsset(foreign_asset_id)); + *maybe_currency_ids = Some(foreign_asset_id); + Ok(()) + }, + )?; + } + *maybe_asset_metadatas = Some(metadata.clone()); + *old_multi_locations = location.clone(); + Ok(()) + }, + ) + }, + ) + } +} + +pub use frame_support::{ + traits::{ + fungibles::{Balanced, CreditOf}, + tokens::currency::Currency as CurrencyT, + OnUnbalanced as OnUnbalancedT, + }, + weights::{WeightToFeePolynomial, WeightToFee}, +}; + +pub struct FreeForAll< + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + AccountId, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, +>( + Weight, + Currency::Balance, + PhantomData<(WeightToFee, AssetId, AccountId, Currency, OnUnbalanced)>, +); + +impl< + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + AccountId, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, + > WeightTrader for FreeForAll +{ + fn new() -> Self { + Self(0, Zero::zero(), PhantomData) + } + + fn buy_weight(&mut self, weight: Weight, payment: Assets) -> Result { + log::trace!(target: "fassets::weight", "buy_weight weight: {:?}, payment: {:?}", weight, payment); + Ok(payment) + } +} +impl Drop + for FreeForAll +where + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, +{ + fn drop(&mut self) { + OnUnbalanced::on_unbalanced(Currency::issue(self.1)); + } +} diff --git a/pallets/foreign-assets/src/weights.rs b/pallets/foreign-assets/src/weights.rs new file mode 100644 index 0000000000..767539c334 --- /dev/null +++ b/pallets/foreign-assets/src/weights.rs @@ -0,0 +1,94 @@ +// Template adopted from https://github.com/paritytech/substrate/blob/master/.maintain/frame-weight-template.hbs + +//! Autogenerated weights for pallet_foreign_assets +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev +//! DATE: 2022-09-16, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 + +// Executed Command: +// target/release/unique-collator +// benchmark +// pallet +// --pallet +// pallet-foreign-assets +// --wasm-execution +// compiled +// --extrinsic +// * +// --template +// .maintain/frame-weight-template.hbs +// --steps=50 +// --repeat=80 +// --heap-pages=4096 +// --output=./pallets/foreign-assets/src/weights.rs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] +#![allow(missing_docs)] +#![allow(clippy::unnecessary_cast)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use sp_std::marker::PhantomData; + +/// Weight functions needed for pallet_foreign_assets. +pub trait WeightInfo { + fn register_foreign_asset() -> Weight; + fn update_foreign_asset() -> Weight; +} + +/// Weights for pallet_foreign_assets using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + // Storage: Common CreatedCollectionCount (r:1 w:1) + // Storage: Common DestroyedCollectionCount (r:1 w:0) + // Storage: System Account (r:2 w:2) + // Storage: ForeignAssets NextForeignAssetId (r:1 w:1) + // Storage: ForeignAssets LocationToCurrencyIds (r:1 w:1) + // Storage: ForeignAssets ForeignAssetLocations (r:1 w:1) + // Storage: ForeignAssets AssetMetadatas (r:1 w:1) + // Storage: ForeignAssets AssetBinding (r:1 w:1) + // Storage: Common CollectionPropertyPermissions (r:0 w:1) + // Storage: Common CollectionProperties (r:0 w:1) + // Storage: Common CollectionById (r:0 w:1) + fn register_foreign_asset() -> Weight { + Weight::from_ref_time(52_161_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(11 as u64)) + } + // Storage: ForeignAssets ForeignAssetLocations (r:1 w:1) + // Storage: ForeignAssets AssetMetadatas (r:1 w:1) + fn update_foreign_asset() -> Weight { + Weight::from_ref_time(19_111_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + } +} + +// For backwards compatibility and tests +impl WeightInfo for () { + // Storage: Common CreatedCollectionCount (r:1 w:1) + // Storage: Common DestroyedCollectionCount (r:1 w:0) + // Storage: System Account (r:2 w:2) + // Storage: ForeignAssets NextForeignAssetId (r:1 w:1) + // Storage: ForeignAssets LocationToCurrencyIds (r:1 w:1) + // Storage: ForeignAssets ForeignAssetLocations (r:1 w:1) + // Storage: ForeignAssets AssetMetadatas (r:1 w:1) + // Storage: ForeignAssets AssetBinding (r:1 w:1) + // Storage: Common CollectionPropertyPermissions (r:0 w:1) + // Storage: Common CollectionProperties (r:0 w:1) + // Storage: Common CollectionById (r:0 w:1) + fn register_foreign_asset() -> Weight { + Weight::from_ref_time(52_161_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(11 as u64)) + } + // Storage: ForeignAssets ForeignAssetLocations (r:1 w:1) + // Storage: ForeignAssets AssetMetadatas (r:1 w:1) + fn update_foreign_asset() -> Weight { + Weight::from_ref_time(19_111_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + } +} diff --git a/pallets/fungible/CHANGELOG.md b/pallets/fungible/CHANGELOG.md new file mode 100644 index 0000000000..ef52e80d02 --- /dev/null +++ b/pallets/fungible/CHANGELOG.md @@ -0,0 +1,48 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + +## [0.1.5] - 2022-08-29 + +### Added + + - Implementation of `mint` and `mint_bulk` methods for ERC20 API. + +## [v0.1.4] - 2022-08-24 + +### Change + - Add bound `AsRef<[u8; 32]>` to `T::CrossAccountId`. + + +## [v0.1.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Switch to new prefix removal methods 26734e9567589d75cdd99e404eabf11d5a97d975 + +New methods allows to call `remove_prefix` with limit multiple times +in the same block +However, we don't use prefix removal limits, so upgrade is +straightforward + +Upstream-Change: https://github.com/paritytech/substrate/pull/11490 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.2] - 2022-08-04 + +### Fixed + + - Issue with ItemCreated event containing total supply of tokens instead minted amount + +## [0.1.1] - 2022-07-14 + +### Added + + - Implementation of RPC method `token_owners` returning 10 owners in no particular order. + This was an internal request to improve the web interface and support fractionalization event. diff --git a/pallets/fungible/Cargo.toml b/pallets/fungible/Cargo.toml index 51ea0f108c..8669d9e75a 100644 --- a/pallets/fungible/Cargo.toml +++ b/pallets/fungible/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-fungible" -version = "0.1.0" +version = "0.1.5" license = "GPLv3" edition = "2021" @@ -11,19 +11,19 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } pallet-common = { default-features = false, path = '../common' } pallet-structure = { default-features = false, path = '../structure' } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } evm-coder = { default-features = false, path = '../../crates/evm-coder' } pallet-evm-coder-substrate = { default-features = false, path = '../evm-coder-substrate' } ethereum = { version = "0.12.0", default-features = false } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } @@ -42,6 +42,7 @@ std = [ "ethereum/std", "pallet-evm-coder-substrate/std", 'frame-benchmarking/std', - "pallet-evm/std" + "pallet-evm/std", ] runtime-benchmarks = ['frame-benchmarking', 'pallet-common/runtime-benchmarks'] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/fungible/src/benchmarking.rs b/pallets/fungible/src/benchmarking.rs index c24df19dd3..430436bb37 100644 --- a/pallets/fungible/src/benchmarking.rs +++ b/pallets/fungible/src/benchmarking.rs @@ -31,7 +31,9 @@ fn create_collection( create_collection_raw( owner, CollectionMode::Fungible(0), - >::init_collection, + |owner: T::CrossAccountId, data| { + >::init_collection(owner.clone(), owner, data, Default::default()) + }, FungibleHandle::cast, ) } diff --git a/pallets/fungible/src/common.rs b/pallets/fungible/src/common.rs index e9a93813f6..79456de536 100644 --- a/pallets/fungible/src/common.rs +++ b/pallets/fungible/src/common.rs @@ -25,7 +25,8 @@ use sp_std::{vec::Vec, vec}; use up_data_structs::{Property, PropertyKey, PropertyValue, PropertyKeyPermission}; use crate::{ - Allowance, Balance, Config, Error, FungibleHandle, Pallet, SelfWeightOf, weights::WeightInfo, + Allowance, TotalSupply, Balance, Config, Error, FungibleHandle, Pallet, SelfWeightOf, + weights::WeightInfo, }; pub struct CommonWeights(PhantomData); @@ -44,7 +45,7 @@ impl CommonWeightInfo for CommonWeights { CreateItemExData::Fungible(f) => { >::create_multiple_items_ex(f.len() as u32) } - _ => 0, + _ => Weight::zero(), } } @@ -54,27 +55,27 @@ impl CommonWeightInfo for CommonWeights { fn set_collection_properties(_amount: u32) -> Weight { // Error - 0 + Weight::zero() } fn delete_collection_properties(_amount: u32) -> Weight { // Error - 0 + Weight::zero() } fn set_token_properties(_amount: u32) -> Weight { // Error - 0 + Weight::zero() } fn delete_token_properties(_amount: u32) -> Weight { // Error - 0 + Weight::zero() } fn set_token_property_permissions(_amount: u32) -> Weight { // Error - 0 + Weight::zero() } fn transfer() -> Weight { @@ -100,10 +101,16 @@ impl CommonWeightInfo for CommonWeights { fn burn_recursively_breadth_raw(_amount: u32) -> Weight { // Fungible tokens can't have children - 0 + Weight::zero() + } + + fn token_owner() -> Weight { + Weight::zero() } } +/// Implementation of `CommonCollectionOperations` for `FungibleHandle`. It wraps FungibleHandle Pallete +/// methods and adds weight info. impl CommonCollectionOperations for FungibleHandle { fn create_item( &self, @@ -359,6 +366,11 @@ impl CommonCollectionOperations for FungibleHandle { None } + /// Returns 10 tokens owners in no particular order. + fn token_owners(&self, token: TokenId) -> Vec { + >::token_owners(self.id, token).unwrap_or_default() + } + fn token_property(&self, _token_id: TokenId, _key: &PropertyKey) -> Option { None } @@ -405,4 +417,11 @@ impl CommonCollectionOperations for FungibleHandle { fn refungible_extensions(&self) -> Option<&dyn RefungibleExtensions> { None } + + fn total_pieces(&self, token: TokenId) -> Option { + if token != TokenId::default() { + return None; + } + >::try_get(self.id).ok() + } } diff --git a/pallets/fungible/src/erc.rs b/pallets/fungible/src/erc.rs index 8364a08612..088f841b34 100644 --- a/pallets/fungible/src/erc.rs +++ b/pallets/fungible/src/erc.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! ERC-20 standart support implementation. + use core::char::{REPLACEMENT_CHARACTER, decode_utf16}; use core::convert::TryInto; use evm_coder::{ToLog, execution::*, generate_stubgen, solidity_interface, types::*, weight}; @@ -48,7 +50,7 @@ pub enum ERC20Events { }, } -#[solidity_interface(name = "ERC20", events(ERC20Events))] +#[solidity_interface(name = ERC20, events(ERC20Events))] impl FungibleHandle { fn name(&self) -> Result { Ok(decode_utf16(self.name.iter().copied()) @@ -127,8 +129,32 @@ impl FungibleHandle { } } -#[solidity_interface(name = "ERC20UniqueExtensions")] +#[solidity_interface(name = ERC20Mintable)] impl FungibleHandle { + /// Mint tokens for `to` account. + /// @param to account that will receive minted tokens + /// @param amount amount of tokens to mint + #[weight(>::create_item())] + fn mint(&mut self, caller: caller, to: address, amount: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + >::create_item(&self, &caller, (to, amount), &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } +} + +#[solidity_interface(name = ERC20UniqueExtensions)] +impl FungibleHandle { + /// Burn tokens from account + /// @dev Function that burns an `amount` of the tokens of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. #[weight(>::burn_from())] fn burn_from(&mut self, caller: caller, from: address, amount: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); @@ -142,24 +168,48 @@ impl FungibleHandle { .map_err(dispatch_to_evm::)?; Ok(true) } + + /// Mint tokens for multiple accounts. + /// @param amounts array of pairs of account address and amount + #[weight(>::create_multiple_items_ex(amounts.len() as u32))] + fn mint_bulk(&mut self, caller: caller, amounts: Vec<(address, uint256)>) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + let amounts = amounts + .into_iter() + .map(|(to, amount)| { + Ok(( + T::CrossAccountId::from_eth(to), + amount.try_into().map_err(|_| "amount overflow")?, + )) + }) + .collect::>()?; + + >::create_multiple_items(&self, &caller, amounts, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } } #[solidity_interface( - name = "UniqueFungible", + name = UniqueFungible, is( ERC20, + ERC20Mintable, ERC20UniqueExtensions, - via("CollectionHandle", common_mut, Collection) + Collection(via(common_mut returns CollectionHandle)), ) )] -impl FungibleHandle where T::AccountId: From<[u8; 32]> {} +impl FungibleHandle where T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]> {} generate_stubgen!(gen_impl, UniqueFungibleCall<()>, true); generate_stubgen!(gen_iface, UniqueFungibleCall<()>, false); impl CommonEvmHandler for FungibleHandle where - T::AccountId: From<[u8; 32]>, + T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]>, { const CODE: &'static [u8] = include_bytes!("./stubs/UniqueFungible.raw"); diff --git a/pallets/fungible/src/lib.rs b/pallets/fungible/src/lib.rs index d3a9d97c10..4fc62fbb78 100644 --- a/pallets/fungible/src/lib.rs +++ b/pallets/fungible/src/lib.rs @@ -14,6 +14,68 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Fungible Pallet +//! +//! The Fungible pallet provides functionality for dealing with fungible assets. +//! +//! - [`CreateItemData`] +//! - [`Config`] +//! - [`FungibleHandle`] +//! - [`Pallet`] +//! - [`TotalSupply`] +//! - [`Balance`] +//! - [`Allowance`] +//! - [`Error`] +//! +//! ## Fungible tokens +//! +//! Fungible tokens or assets are divisible and non-unique. For instance, +//! fiat currencies like the dollar are fungible: A $1 bill +//! in New York City has the same value as a $1 bill in Miami. +//! A fungible token can also be a cryptocurrency like Bitcoin: 1 BTC is worth 1 BTC, +//! no matter where it is issued. Thus, the fungibility refers to a specific currency’s +//! ability to maintain one standard value. As well, it needs to have uniform acceptance. +//! This means that a currency’s history should not be able to affect its value, +//! and this is due to the fact that each piece that is a part of the currency is equal +//! in value when compared to every other piece of that exact same currency. +//! In the world of cryptocurrencies, this is essentially a coin or a token +//! that can be replaced by another identical coin or token, and they are +//! both mutually interchangeable. A popular implementation of fungible tokens is +//! the ERC-20 token standard. +//! +//! ### ERC-20 +//! +//! The [ERC-20](https://ethereum.org/en/developers/docs/standards/tokens/erc-20/) (Ethereum Request for Comments 20), proposed by Fabian Vogelsteller in November 2015, +//! is a Token Standard that implements an API for tokens within Smart Contracts. +//! +//! Example functionalities ERC-20 provides: +//! +//! * transfer tokens from one account to another +//! * get the current token balance of an account +//! * get the total supply of the token available on the network +//! * approve whether an amount of token from an account can be spent by a third-party account +//! +//! ## Overview +//! +//! The module provides functionality for asset management of fungible asset, supports ERC-20 standart, includes: +//! +//! * Asset Issuance +//! * Asset Transferal +//! * Asset Destruction +//! * Delegated Asset Transfers +//! +//! **NOTE:** The created fungible asset always has `token_id` = 0. +//! So `tokenA` and `tokenB` will have different `collection_id`. +//! +//! ### Implementations +//! +//! The Fungible pallet provides implementations for the following traits. +//! +//! - [`WithRecorder`](pallet_evm_coder_substrate::WithRecorder): Trait for EVM support +//! - [`CommonCollectionOperations`](pallet_common::CommonCollectionOperations): Functions for dealing with collections +//! - [`CommonWeightInfo`](pallet_common::CommonWeightInfo): Functions for retrieval of transaction weight +//! - [`CommonEvmHandler`](pallet_common::erc::CommonEvmHandler): Function for handling EVM runtime calls + #![cfg_attr(not(feature = "std"), no_std)] use core::ops::Deref; @@ -21,8 +83,8 @@ use evm_coder::ToLog; use frame_support::{ensure}; use pallet_evm::account::CrossAccountId; use up_data_structs::{ - AccessMode, CollectionId, TokenId, CreateCollectionData, mapping::TokenAddressMapping, - budget::Budget, + AccessMode, CollectionId, CollectionFlags, TokenId, CreateCollectionData, + mapping::TokenAddressMapping, budget::Budget, }; use pallet_common::{ Error as CommonError, Event as CommonEvent, Pallet as PalletCommon, @@ -33,7 +95,7 @@ use pallet_structure::Pallet as PalletStructure; use pallet_evm_coder_substrate::WithRecorder; use sp_core::H160; use sp_runtime::{ArithmeticError, DispatchError, DispatchResult}; -use sp_std::{collections::btree_map::BTreeMap}; +use sp_std::{collections::btree_map::BTreeMap, vec::Vec}; pub use pallet::*; @@ -57,13 +119,13 @@ pub mod pallet { pub enum Error { /// Not Fungible item data used to mint in Fungible collection. NotFungibleDataUsedToMintFungibleCollectionToken, - /// Not default id passed as TokenId argument + /// Fungible tokens hold no ID, and the default value of TokenId for Fungible collection is 0. FungibleItemsHaveNoId, - /// Tried to set data for fungible item + /// Tried to set data for fungible item. FungibleItemsDontHaveData, - /// Fungible token does not support nested + /// Fungible token does not support nesting. FungibleDisallowsNesting, - /// Setting item properties is not allowed + /// Setting item properties is not allowed. SettingPropertiesNotAllowed, } @@ -78,10 +140,12 @@ pub mod pallet { #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); + /// Total amount of fungible tokens inside a collection. #[pallet::storage] pub type TotalSupply = StorageMap; + /// Amount of tokens owned by an account inside a collection. #[pallet::storage] pub type Balance = StorageNMap< Key = ( @@ -92,6 +156,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Storage for assets delegated to a limited extent to other users. #[pallet::storage] pub type Allowance = StorageNMap< Key = ( @@ -104,14 +169,22 @@ pub mod pallet { >; } +/// Wrapper around untyped collection handle, asserting inner collection is of fungible type. +/// Required for interaction with Fungible collections, type safety and implementation [`solidity_interface`][`evm_coder::solidity_interface`]. pub struct FungibleHandle(pallet_common::CollectionHandle); + +/// Implementation of methods required for dispatching during runtime. impl FungibleHandle { + /// Casts [`CollectionHandle`][`pallet_common::CollectionHandle`] into [`FungibleHandle`]. pub fn cast(inner: pallet_common::CollectionHandle) -> Self { Self(inner) } + + /// Casts [`FungibleHandle`] into [`CollectionHandle`][`pallet_common::CollectionHandle`]. pub fn into_inner(self) -> pallet_common::CollectionHandle { self.0 } + /// Returns a mutable reference to the internal [`CollectionHandle`][`pallet_common::CollectionHandle`]. pub fn common_mut(&mut self) -> &mut pallet_common::CollectionHandle { &mut self.0 } @@ -132,13 +205,37 @@ impl Deref for FungibleHandle { } } +/// Pallet implementation for fungible assets impl Pallet { + /// Initializes the collection. Returns [CollectionId] on success, [DispatchError] otherwise. pub fn init_collection( owner: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, + flags: CollectionFlags, ) -> Result { - >::init_collection(owner, data, false) + >::init_collection(owner, payer, data, flags) } + + /// Initializes the collection with ForeignCollection flag. Returns [CollectionId] on success, [DispatchError] otherwise. + pub fn init_foreign_collection( + owner: T::CrossAccountId, + payer: T::CrossAccountId, + data: CreateCollectionData, + ) -> Result { + let id = >::init_collection( + owner, + payer, + data, + CollectionFlags { + foreign: true, + ..Default::default() + }, + )?; + Ok(id) + } + + /// Destroys a collection. pub fn destroy_collection( collection: FungibleHandle, sender: &T::CrossAccountId, @@ -154,15 +251,19 @@ impl Pallet { PalletCommon::destroy_collection(collection.0, sender)?; >::remove(id); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); Ok(()) } + ///Checks if collection has tokens. Return `true` if it has. fn collection_has_tokens(collection_id: CollectionId) -> bool { >::get(collection_id) != 0 } + /// Burns the specified amount of the token. If the token balance + /// or total supply is less than the given value, + /// it will return [DispatchError]. pub fn burn( collection: &FungibleHandle, owner: &T::CrossAccountId, @@ -176,6 +277,9 @@ impl Pallet { .checked_sub(amount) .ok_or(>::TokenValueTooLow)?; + // Foreign collection check + ensure!(!collection.flags.foreign, >::NoPermission); + if collection.permissions.access() == AccessMode::AllowList { collection.check_allowlist(owner)?; } @@ -207,6 +311,53 @@ impl Pallet { Ok(()) } + /// Burns the specified amount of the token. + pub fn burn_foreign( + collection: &FungibleHandle, + owner: &T::CrossAccountId, + amount: u128, + ) -> DispatchResult { + let total_supply = >::get(collection.id) + .checked_sub(amount) + .ok_or(>::TokenValueTooLow)?; + + let balance = >::get((collection.id, owner)) + .checked_sub(amount) + .ok_or(>::TokenValueTooLow)?; + // ========= + + if balance == 0 { + >::remove((collection.id, owner)); + >::unnest_if_nested(owner, collection.id, TokenId::default()); + } else { + >::insert((collection.id, owner), balance); + } + >::insert(collection.id, total_supply); + + >::deposit_log( + ERC20Events::Transfer { + from: *owner.as_eth(), + to: H160::default(), + value: amount.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + >::deposit_event(CommonEvent::ItemDestroyed( + collection.id, + TokenId::default(), + owner.clone(), + amount, + )); + Ok(()) + } + + /// Transfers the specified amount of tokens. Will check that + /// the transfer is allowed for the token. + /// + /// - `from`: Owner of tokens to transfer. + /// - `to`: Recepient of transfered tokens. + /// - `amount`: Amount of tokens to transfer. + /// - `collection`: Collection that contains the token pub fn transfer( collection: &FungibleHandle, from: &T::CrossAccountId, @@ -277,24 +428,15 @@ impl Pallet { Ok(()) } - pub fn create_multiple_items( + /// Minting tokens for multiple IDs. + /// It is a utility function used in [`create_multiple_items`][`Pallet::create_multiple_items`] + /// and [`create_multiple_items_foreign`][`Pallet::create_multiple_items_foreign`] + pub fn create_multiple_items_common( collection: &FungibleHandle, sender: &T::CrossAccountId, data: BTreeMap, nesting_budget: &dyn Budget, ) -> DispatchResult { - if !collection.is_owner_or_admin(sender) { - ensure!( - collection.permissions.mint_mode(), - >::PublicMintingNotAllowed - ); - collection.check_allowlist(sender)?; - - for (owner, _) in data.iter() { - collection.check_allowlist(owner)?; - } - } - let total_supply = data .iter() .map(|(_, v)| *v) @@ -303,14 +445,7 @@ impl Pallet { }) .ok_or(ArithmeticError::Overflow)?; - let mut balances = data; - for (k, v) in balances.iter_mut() { - *v = >::get((collection.id, &k)) - .checked_add(*v) - .ok_or(ArithmeticError::Overflow)?; - } - - for (to, _) in balances.iter() { + for (to, _) in data.iter() { >::check_nesting( sender.clone(), to, @@ -320,11 +455,21 @@ impl Pallet { )?; } + let updated_balances = data + .into_iter() + .map(|(user, amount)| { + let updated_balance = >::get((collection.id, &user)) + .checked_add(amount) + .ok_or(ArithmeticError::Overflow)?; + Ok((user, amount, updated_balance)) + }) + .collect::, DispatchError>>()?; + // ========= >::insert(collection.id, total_supply); - for (user, amount) in balances { - >::insert((collection.id, &user), amount); + for (user, amount, updated_balance) in updated_balances { + >::insert((collection.id, &user), updated_balance); >::nest_if_sent_to_token_unchecked( &user, collection.id, @@ -349,6 +494,43 @@ impl Pallet { Ok(()) } + /// Minting tokens for multiple IDs. + /// See [`create_item`][`Pallet::create_item`] for more details. + pub fn create_multiple_items( + collection: &FungibleHandle, + sender: &T::CrossAccountId, + data: BTreeMap, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + // Foreign collection check + ensure!(!collection.flags.foreign, >::NoPermission); + + if !collection.is_owner_or_admin(sender) { + ensure!( + collection.permissions.mint_mode(), + >::PublicMintingNotAllowed + ); + collection.check_allowlist(sender)?; + + for (owner, _) in data.iter() { + collection.check_allowlist(owner)?; + } + } + + Self::create_multiple_items_common(collection, sender, data, nesting_budget) + } + + /// Minting tokens for multiple IDs. + /// See [`create_item_foreign`][`Pallet::create_item_foreign`] for more details. + pub fn create_multiple_items_foreign( + collection: &FungibleHandle, + sender: &T::CrossAccountId, + data: BTreeMap, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + Self::create_multiple_items_common(collection, sender, data, nesting_budget) + } + fn set_allowance_unchecked( collection: &FungibleHandle, owner: &T::CrossAccountId, @@ -378,6 +560,12 @@ impl Pallet { )); } + /// Set allowance for the spender to `transfer` or `burn` owner's tokens. + /// + /// - `collection`: Collection that contains the token + /// - `owner`: Owner of tokens that sets the allowance. + /// - `spender`: Recipient of the allowance rights. + /// - `amount`: Amount of tokens the spender is allowed to `transfer` or `burn`. pub fn set_allowance( collection: &FungibleHandle, owner: &T::CrossAccountId, @@ -402,6 +590,13 @@ impl Pallet { Ok(()) } + /// Checks if a non-owner has (enough) allowance from the owner to perform operations on the tokens. + /// Returns the expected remaining allowance - it should be set manually if the transaction proceeds. + /// + /// - `collection`: Collection that contains the token. + /// - `spender`: CrossAccountId who has the allowance rights. + /// - `from`: The owner of the tokens who sets the allowance. + /// - `amount`: Amount of tokens by which the allowance sholud be reduced. fn check_allowed( collection: &FungibleHandle, spender: &T::CrossAccountId, @@ -441,6 +636,11 @@ impl Pallet { Ok(allowance) } + /// Transfer fungible tokens from one account to another. + /// Same as the [`transfer`][`Pallet::transfer`] but spender doesn't needs to be an owner of the token pieces. + /// The owner should set allowance for the spender to transfer pieces. + /// See [`set_allowance`][`Pallet::set_allowance`] for more details. + pub fn transfer_from( collection: &FungibleHandle, spender: &T::CrossAccountId, @@ -460,6 +660,11 @@ impl Pallet { Ok(()) } + /// Burn fungible tokens from the account. + /// + /// Same as the [`burn`][`Pallet::burn`] but spender doesn't need to be an owner of the tokens. The `from` should + /// set allowance for the spender to burn tokens. + /// See [`set_allowance`][`Pallet::set_allowance`] for more details. pub fn burn_from( collection: &FungibleHandle, spender: &T::CrossAccountId, @@ -478,7 +683,13 @@ impl Pallet { Ok(()) } - /// Delegated to `create_multiple_items` + /// Creates fungible token. + /// + /// The sender should be the owner/admin of the collection or collection should be configured + /// to allow public minting. + /// + /// - `data`: Contains user who will become the owners of the tokens and amount + /// of tokens he will receive. pub fn create_item( collection: &FungibleHandle, sender: &T::CrossAccountId, @@ -492,4 +703,44 @@ impl Pallet { nesting_budget, ) } + + /// Creates fungible token. + /// + /// - `data`: Contains user who will become the owners of the tokens and amount + /// of tokens he will receive. + pub fn create_item_foreign( + collection: &FungibleHandle, + sender: &T::CrossAccountId, + data: CreateItemData, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + Self::create_multiple_items_foreign( + collection, + sender, + [(data.0, data.1)].into_iter().collect(), + nesting_budget, + ) + } + + /// Returns 10 tokens owners in no particular order + /// + /// There is no direct way to get token holders in ascending order, + /// since `iter_prefix` returns values in no particular order. + /// Therefore, getting the 10 largest holders with a large value of holders + /// can lead to impact memory allocation + sorting with `n * log (n)`. + pub fn token_owners( + collection: CollectionId, + _token: TokenId, + ) -> Option> { + let res: Vec = >::iter_prefix((collection,)) + .map(|(owner, _amount)| owner) + .take(10) + .collect(); + + if res.is_empty() { + None + } else { + Some(res) + } + } } diff --git a/pallets/fungible/src/stubs/UniqueFungible.raw b/pallets/fungible/src/stubs/UniqueFungible.raw index 627405d41f..6a25cedd15 100644 Binary files a/pallets/fungible/src/stubs/UniqueFungible.raw and b/pallets/fungible/src/stubs/UniqueFungible.raw differ diff --git a/pallets/fungible/src/stubs/UniqueFungible.sol b/pallets/fungible/src/stubs/UniqueFungible.sol index 4dda643292..91d55ee0e4 100644 --- a/pallets/fungible/src/stubs/UniqueFungible.sol +++ b/pallets/fungible/src/stubs/UniqueFungible.sol @@ -3,37 +3,317 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder contract Dummy { uint8 dummy; string stub_error = "this contract is implemented in native"; } contract ERC165 is Dummy { - function supportsInterface(bytes4 interfaceID) - external - view - returns (bool) - { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { require(false, stub_error); interfaceID; return true; } } -// Inline -contract ERC20Events { - event Transfer(address indexed from, address indexed to, uint256 value); - event Approval( - address indexed owner, - address indexed spender, - uint256 value - ); +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +contract Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) public { + require(false, stub_error); + key; + value; + dummy = 0; + } + + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) public { + require(false, stub_error); + key; + dummy = 0; + } + + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) public view returns (bytes memory) { + require(false, stub_error); + key; + dummy; + return hex""; + } + + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) public { + require(false, stub_error); + sponsor; + dummy = 0; + } + + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() public view returns (bool) { + require(false, stub_error); + dummy; + return false; + } + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() public { + require(false, stub_error); + dummy = 0; + } + + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() public { + require(false, stub_error); + dummy = 0; + } + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() public view returns (Tuple6 memory) { + require(false, stub_error); + dummy; + return Tuple6(0x0000000000000000000000000000000000000000, 0); + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) public { + require(false, stub_error); + limit; + value; + dummy = 0; + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) public { + require(false, stub_error); + limit; + value; + dummy = 0; + } + + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() public view returns (address) { + require(false, stub_error); + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) public { + require(false, stub_error); + newAdmin; + dummy = 0; + } + + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) public { + require(false, stub_error); + admin; + dummy = 0; + } + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) public { + require(false, stub_error); + enable; + dummy = 0; + } + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) public { + require(false, stub_error); + enable; + collections; + dummy = 0; + } + + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) public { + require(false, stub_error); + mode; + dummy = 0; + } + + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) public view returns (bool) { + require(false, stub_error); + user; + dummy; + return false; + } + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) public { + require(false, stub_error); + user; + dummy = 0; + } + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) public { + require(false, stub_error); + user; + dummy = 0; + } + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) public { + require(false, stub_error); + mode; + dummy = 0; + } + + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) public view returns (bool) { + require(false, stub_error); + user; + dummy; + return false; + } + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() public view returns (Tuple6 memory) { + require(false, stub_error); + dummy; + return Tuple6(0x0000000000000000000000000000000000000000, 0); + } + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) public { + require(false, stub_error); + newOwner; + dummy = 0; + } } -// Selector: 79cc6790 +/// @dev the ERC-165 identifier for this interface is 0x63034ac5 contract ERC20UniqueExtensions is Dummy, ERC165 { - // Selector: burnFrom(address,uint256) 79cc6790 + /// Burn tokens from account + /// @dev Function that burns an `amount` of the tokens of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) function burnFrom(address from, uint256 amount) public returns (bool) { require(false, stub_error); from; @@ -41,39 +321,83 @@ contract ERC20UniqueExtensions is Dummy, ERC165 { dummy = 0; return false; } + + /// Mint tokens for multiple accounts. + /// @param amounts array of pairs of account address and amount + /// @dev EVM selector for this function is: 0x1acf2d55, + /// or in textual repr: mintBulk((address,uint256)[]) + function mintBulk(Tuple6[] memory amounts) public returns (bool) { + require(false, stub_error); + amounts; + dummy = 0; + return false; + } +} + +/// @dev anonymous struct +struct Tuple6 { + address field_0; + uint256 field_1; +} + +/// @dev the ERC-165 identifier for this interface is 0x40c10f19 +contract ERC20Mintable is Dummy, ERC165 { + /// Mint tokens for `to` account. + /// @param to account that will receive minted tokens + /// @param amount amount of tokens to mint + /// @dev EVM selector for this function is: 0x40c10f19, + /// or in textual repr: mint(address,uint256) + function mint(address to, uint256 amount) public returns (bool) { + require(false, stub_error); + to; + amount; + dummy = 0; + return false; + } } -// Selector: 942e8b22 +/// @dev inlined interface +contract ERC20Events { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +/// @dev the ERC-165 identifier for this interface is 0x942e8b22 contract ERC20 is Dummy, ERC165, ERC20Events { - // Selector: name() 06fdde03 + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() function name() public view returns (string memory) { require(false, stub_error); dummy; return ""; } - // Selector: symbol() 95d89b41 + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() function symbol() public view returns (string memory) { require(false, stub_error); dummy; return ""; } - // Selector: totalSupply() 18160ddd + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() function totalSupply() public view returns (uint256) { require(false, stub_error); dummy; return 0; } - // Selector: decimals() 313ce567 + /// @dev EVM selector for this function is: 0x313ce567, + /// or in textual repr: decimals() function decimals() public view returns (uint8) { require(false, stub_error); dummy; return 0; } - // Selector: balanceOf(address) 70a08231 + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) function balanceOf(address owner) public view returns (uint256) { require(false, stub_error); owner; @@ -81,7 +405,8 @@ contract ERC20 is Dummy, ERC165, ERC20Events { return 0; } - // Selector: transfer(address,uint256) a9059cbb + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) function transfer(address to, uint256 amount) public returns (bool) { require(false, stub_error); to; @@ -90,7 +415,8 @@ contract ERC20 is Dummy, ERC165, ERC20Events { return false; } - // Selector: transferFrom(address,address,uint256) 23b872dd + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) function transferFrom( address from, address to, @@ -104,7 +430,8 @@ contract ERC20 is Dummy, ERC165, ERC20Events { return false; } - // Selector: approve(address,uint256) 095ea7b3 + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) function approve(address spender, uint256 amount) public returns (bool) { require(false, stub_error); spender; @@ -113,12 +440,9 @@ contract ERC20 is Dummy, ERC165, ERC20Events { return false; } - // Selector: allowance(address,address) dd62ed3e - function allowance(address owner, address spender) - public - view - returns (uint256) - { + /// @dev EVM selector for this function is: 0xdd62ed3e, + /// or in textual repr: allowance(address,address) + function allowance(address owner, address spender) public view returns (uint256) { require(false, stub_error); owner; spender; @@ -127,80 +451,4 @@ contract ERC20 is Dummy, ERC165, ERC20Events { } } -// Selector: c894dc35 -contract Collection is Dummy, ERC165 { - // Selector: setCollectionProperty(string,bytes) 2f073f66 - function setCollectionProperty(string memory key, bytes memory value) - public - { - require(false, stub_error); - key; - value; - dummy = 0; - } - - // Selector: deleteCollectionProperty(string) 7b7debce - function deleteCollectionProperty(string memory key) public { - require(false, stub_error); - key; - dummy = 0; - } - - // Throws error if key not found - // - // Selector: collectionProperty(string) cf24fd6d - function collectionProperty(string memory key) - public - view - returns (bytes memory) - { - require(false, stub_error); - key; - dummy; - return hex""; - } - - // Selector: ethSetSponsor(address) 8f9af356 - function ethSetSponsor(address sponsor) public { - require(false, stub_error); - sponsor; - dummy = 0; - } - - // Selector: ethConfirmSponsorship() a8580d1a - function ethConfirmSponsorship() public { - require(false, stub_error); - dummy = 0; - } - - // Selector: setLimit(string,uint32) 68db30ca - function setLimit(string memory limit, uint32 value) public { - require(false, stub_error); - limit; - value; - dummy = 0; - } - - // Selector: setLimit(string,bool) ea67e4c2 - function setLimit(string memory limit, bool value) public { - require(false, stub_error); - limit; - value; - dummy = 0; - } - - // Selector: contractAddress() f6b4dfb4 - function contractAddress() public view returns (address) { - require(false, stub_error); - dummy; - return 0x0000000000000000000000000000000000000000; - } -} - -contract UniqueFungible is - Dummy, - ERC165, - ERC20, - ERC20UniqueExtensions, - Collection -{} +contract UniqueFungible is Dummy, ERC165, ERC20, ERC20Mintable, ERC20UniqueExtensions, Collection {} diff --git a/pallets/fungible/src/weights.rs b/pallets/fungible/src/weights.rs index c89e930a9e..68ee906092 100644 --- a/pallets/fungible/src/weights.rs +++ b/pallets/fungible/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_fungible //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -48,55 +48,55 @@ impl WeightInfo for SubstrateWeight { // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn create_item() -> Weight { - (17_828_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(18_195_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:4 w:4) fn create_multiple_items_ex(b: u32, ) -> Weight { - (17_574_000 as Weight) + Weight::from_ref_time(19_218_000) // Standard Error: 3_000 - .saturating_add((4_288_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + .saturating_add(Weight::from_ref_time(4_516_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn burn_item() -> Weight { - (18_417_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(18_719_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Fungible Balance (r:2 w:2) fn transfer() -> Weight { - (20_090_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(20_563_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Fungible Balance (r:1 w:0) // Storage: Fungible Allowance (r:0 w:1) fn approve() -> Weight { - (17_532_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_583_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Fungible Allowance (r:1 w:1) // Storage: Fungible Balance (r:2 w:2) fn transfer_from() -> Weight { - (29_869_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(29_845_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(3 as u64)) } // Storage: Fungible Allowance (r:1 w:1) // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn burn_from() -> Weight { - (27_835_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(28_248_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(3 as u64)) } } @@ -105,54 +105,54 @@ impl WeightInfo for () { // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn create_item() -> Weight { - (17_828_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(18_195_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:4 w:4) fn create_multiple_items_ex(b: u32, ) -> Weight { - (17_574_000 as Weight) + Weight::from_ref_time(19_218_000) // Standard Error: 3_000 - .saturating_add((4_288_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + .saturating_add(Weight::from_ref_time(4_516_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn burn_item() -> Weight { - (18_417_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(18_719_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Fungible Balance (r:2 w:2) fn transfer() -> Weight { - (20_090_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(20_563_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Fungible Balance (r:1 w:0) // Storage: Fungible Allowance (r:0 w:1) fn approve() -> Weight { - (17_532_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_583_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Fungible Allowance (r:1 w:1) // Storage: Fungible Balance (r:2 w:2) fn transfer_from() -> Weight { - (29_869_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(29_845_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(3 as u64)) } // Storage: Fungible Allowance (r:1 w:1) // Storage: Fungible TotalSupply (r:1 w:1) // Storage: Fungible Balance (r:1 w:1) fn burn_from() -> Weight { - (27_835_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(28_248_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(3 as u64)) } } diff --git a/pallets/inflation/CHANGELOG.md b/pallets/inflation/CHANGELOG.md new file mode 100644 index 0000000000..732ab15dc7 --- /dev/null +++ b/pallets/inflation/CHANGELOG.md @@ -0,0 +1,10 @@ + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/inflation/Cargo.toml b/pallets/inflation/Cargo.toml index dcb94ff2e2..b4d4e6dc56 100644 --- a/pallets/inflation/Cargo.toml +++ b/pallets/inflation/Cargo.toml @@ -9,7 +9,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'pallet-inflation' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.1.0' +version = "0.1.1" [package.metadata.docs.rs] targets = ['x86_64-unknown-linux-gnu'] @@ -29,6 +29,7 @@ std = [ 'sp-runtime/std', 'frame-benchmarking/std', ] +try-runtime = ["frame-support/try-runtime"] ################################################################################ # Substrate Dependencies @@ -43,37 +44,37 @@ version = '3.1.2' default-features = false optional = true git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-support] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-balances] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-timestamp] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-randomness-collective-flip] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-std] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.serde] default-features = false @@ -83,17 +84,17 @@ version = '1.0.130' [dependencies.sp-runtime] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-io] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies] scale-info = { version = "2.0.1", default-features = false, features = [ diff --git a/pallets/inflation/src/lib.rs b/pallets/inflation/src/lib.rs index d4ddfe7853..1e2f984a38 100644 --- a/pallets/inflation/src/lib.rs +++ b/pallets/inflation/src/lib.rs @@ -14,6 +14,20 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Inflation +//! +//! The inflation pallet is designed to increase the number of tokens at certain intervals. +//! With each iteration, increases the `total_issuance` value for the native token. +//! Executing an `on_initialize` hook at the beginning of each block, causing inflation to begin. +//! +//! ## Interface +//! +//! ### Dispatchable Functions +//! +//! * `start_inflation` - This method sets the inflation start date. Can be only called once. +//! Inflation start block can be backdated and will catch up. The method will create Treasury +//! account if it does not exist and perform the first inflation deposit. + // #![recursion_limit = "1024"] #![cfg_attr(not(feature = "std"), no_std)] @@ -96,7 +110,7 @@ pub mod pallet { where ::BlockNumber: From, { - let mut consumed_weight = 0; + let mut consumed_weight = Weight::zero(); let mut add_weight = |reads, writes, weight| { consumed_weight += T::DbWeight::get().reads_writes(reads, writes); consumed_weight += weight; @@ -105,7 +119,7 @@ pub mod pallet { let block_interval: u32 = T::InflationBlockInterval::get().try_into().unwrap_or(0); let current_relay_block = T::BlockNumberProvider::current_block_number(); let next_inflation: T::BlockNumber = >::get(); - add_weight(1, 0, 5_000_000); + add_weight(1, 0, Weight::from_ref_time(5_000_000)); // Apply inflation every InflationBlockInterval blocks // If next_inflation == 0, this means inflation wasn't yet initialized @@ -114,10 +128,10 @@ pub mod pallet { // Do the "current_relay_block >= next_recalculation" check in the "current_relay_block >= next_inflation" // block because it saves InflationBlockInterval DB reads for NextRecalculationBlock. let next_recalculation: T::BlockNumber = >::get(); - add_weight(1, 0, 0); + add_weight(1, 0, Weight::zero()); if current_relay_block >= next_recalculation { Self::recalculate_inflation(next_recalculation); - add_weight(0, 4, 5_000_000); + add_weight(0, 4, Weight::from_ref_time(5_000_000)); } T::Currency::deposit_into_existing( @@ -129,7 +143,7 @@ pub mod pallet { // Update inflation block >::set(next_inflation + block_interval.into()); - add_weight(3, 3, 10_000_000); + add_weight(3, 3, Weight::from_ref_time(10_000_000)); } consumed_weight diff --git a/pallets/inflation/src/tests.rs b/pallets/inflation/src/tests.rs index dd63f4b164..5ce553c2ad 100644 --- a/pallets/inflation/src/tests.rs +++ b/pallets/inflation/src/tests.rs @@ -21,6 +21,7 @@ use crate as pallet_inflation; use frame_support::{ assert_ok, parameter_types, traits::{Currency, OnInitialize, Everything, ConstU32}, + weights::Weight, }; use frame_system::RawOrigin; use sp_core::H256; @@ -46,7 +47,7 @@ impl pallet_balances::Config for Test { type AccountStore = System; type Balance = u64; type DustRemoval = (); - type Event = (); + type RuntimeEvent = (); type ExistentialDeposit = ExistentialDeposit; type WeightInfo = (); type MaxLocks = MaxLocks; @@ -69,7 +70,7 @@ frame_support::construct_runtime!( parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = - frame_system::limits::BlockWeights::simple_max(1024); + frame_system::limits::BlockWeights::simple_max(Weight::from_ref_time(1024)); pub const SS58Prefix: u8 = 42; } @@ -78,8 +79,8 @@ impl frame_system::Config for Test { type BlockWeights = (); type BlockLength = (); type DbWeight = (); - type Origin = Origin; - type Call = Call; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeCall = RuntimeCall; type Index = u64; type BlockNumber = u64; type Hash = H256; @@ -87,7 +88,7 @@ impl frame_system::Config for Test { type AccountId = u64; type Lookup = IdentityLookup; type Header = Header; - type Event = (); + type RuntimeEvent = (); type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; diff --git a/pallets/maintenance/Cargo.toml b/pallets/maintenance/Cargo.toml index 6b54f5a98d..3b19834ca3 100644 --- a/pallets/maintenance/Cargo.toml +++ b/pallets/maintenance/Cargo.toml @@ -12,10 +12,10 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } [features] default = ["std"] diff --git a/pallets/maintenance/src/lib.rs b/pallets/maintenance/src/lib.rs index 0baaed9aa5..6d08840d64 100644 --- a/pallets/maintenance/src/lib.rs +++ b/pallets/maintenance/src/lib.rs @@ -31,7 +31,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - type Event: From> + IsType<::Event>; + type RuntimeEvent: From> + IsType<::RuntimeEvent>; type WeightInfo: WeightInfo; } diff --git a/pallets/maintenance/src/weights.rs b/pallets/maintenance/src/weights.rs index daa20850f6..7eca8fe93c 100644 --- a/pallets/maintenance/src/weights.rs +++ b/pallets/maintenance/src/weights.rs @@ -42,13 +42,13 @@ pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { // Storage: Maintenance Enabled (r:0 w:1) fn enable() -> Weight { - (7_367_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(7_367_000) + .saturating_add(T::DbWeight::get().writes(1)) } // Storage: Maintenance Enabled (r:0 w:1) fn disable() -> Weight { - (7_273_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(7_273_000) + .saturating_add(T::DbWeight::get().writes(1)) } } @@ -56,12 +56,12 @@ impl WeightInfo for SubstrateWeight { impl WeightInfo for () { // Storage: Maintenance Enabled (r:0 w:1) fn enable() -> Weight { - (7_367_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(7_367_000) + .saturating_add(RocksDbWeight::get().writes(1)) } // Storage: Maintenance Enabled (r:0 w:1) fn disable() -> Weight { - (7_273_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(7_273_000) + .saturating_add(RocksDbWeight::get().writes(1)) } } diff --git a/pallets/nonfungible/CHANGELOG.md b/pallets/nonfungible/CHANGELOG.md new file mode 100644 index 0000000000..90cd277bff --- /dev/null +++ b/pallets/nonfungible/CHANGELOG.md @@ -0,0 +1,46 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## [v0.1.5] - 2022-08-24 + +### Change + - Add bound `AsRef<[u8; 32]>` to `T::CrossAccountId`. + + +## [v0.1.4] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Switch to new prefix removal methods 26734e9567589d75cdd99e404eabf11d5a97d975 + +New methods allows to call `remove_prefix` with limit multiple times +in the same block +However, we don't use prefix removal limits, so upgrade is +straightforward + +Upstream-Change: https://github.com/paritytech/substrate/pull/11490 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.2] - 2022-07-25 +### Changed +- New `token_uri` retrieval logic: + + If the collection has a `url` property and it is not empty, it is returned. + Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + + If the property `baseURI` is empty or absent, return "" (empty string) + otherwise, if property `suffix` present and is non-empty, return concatenation of baseURI and suffix + otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + +## [0.1.1] - 2022-07-14 +### Added + +- Implementation of RPC method `token_owners`. + For reasons of compatibility with this pallet, returns only one owner if token exists. + This was an internal request to improve the web interface and support fractionalization event. diff --git a/pallets/nonfungible/Cargo.toml b/pallets/nonfungible/Cargo.toml index 4cbd5cc9f3..a392482e60 100644 --- a/pallets/nonfungible/Cargo.toml +++ b/pallets/nonfungible/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-nonfungible" -version = "0.1.0" +version = "0.1.5" license = "GPLv3" edition = "2021" @@ -11,19 +11,19 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-common = { default-features = false, path = '../common' } pallet-structure = { default-features = false, path = '../structure' } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } evm-coder = { default-features = false, path = '../../crates/evm-coder' } pallet-evm-coder-substrate = { default-features = false, path = '../evm-coder-substrate' } ethereum = { version = "0.12.0", default-features = false } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } @@ -43,7 +43,7 @@ std = [ "ethereum/std", "pallet-evm-coder-substrate/std", 'frame-benchmarking/std', - "pallet-evm/std" + "pallet-evm/std", ] runtime-benchmarks = [ 'frame-benchmarking', @@ -51,3 +51,4 @@ runtime-benchmarks = [ 'frame-system/runtime-benchmarks', 'up-data-structs/runtime-benchmarks', ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/nonfungible/src/benchmarking.rs b/pallets/nonfungible/src/benchmarking.rs index 5e8b51be2b..256276d88c 100644 --- a/pallets/nonfungible/src/benchmarking.rs +++ b/pallets/nonfungible/src/benchmarking.rs @@ -17,11 +17,14 @@ use super::*; use crate::{Pallet, Config, NonfungibleHandle}; -use sp_std::prelude::*; -use pallet_common::benchmarking::{create_collection_raw, property_key, property_value}; use frame_benchmarking::{benchmarks, account}; +use pallet_common::{ + bench_init, + benchmarking::{create_collection_raw, property_key, property_value}, + CommonCollectionOperations, +}; +use sp_std::prelude::*; use up_data_structs::{CollectionMode, MAX_ITEMS_PER_BATCH, MAX_PROPERTIES_PER_ITEM, budget::Unlimited}; -use pallet_common::bench_init; const SEED: u32 = 1; @@ -51,7 +54,9 @@ fn create_collection( create_collection_raw( owner, CollectionMode::NFT, - |owner, data| >::init_collection(owner, data, true), + |owner: T::CrossAccountId, data| { + >::init_collection(owner.clone(), owner, data, Default::default()) + }, NonfungibleHandle::cast, ) } @@ -183,7 +188,7 @@ benchmarks! { value: property_value(), }).collect::>(); let item = create_max_item(&collection, &owner, owner.clone())?; - }: {>::set_token_properties(&collection, &owner, item, props, false, &Unlimited)?} + }: {>::set_token_properties(&collection, &owner, item, props.into_iter(), false, &Unlimited)?} delete_token_properties { let b in 0..MAX_PROPERTIES_PER_ITEM; @@ -205,7 +210,16 @@ benchmarks! { value: property_value(), }).collect::>(); let item = create_max_item(&collection, &owner, owner.clone())?; - >::set_token_properties(&collection, &owner, item, props, false, &Unlimited)?; + >::set_token_properties(&collection, &owner, item, props.into_iter(), false, &Unlimited)?; let to_delete = (0..b).map(|k| property_key(k as usize)).collect::>(); - }: {>::delete_token_properties(&collection, &owner, item, to_delete, &Unlimited)?} + }: {>::delete_token_properties(&collection, &owner, item, to_delete.into_iter(), &Unlimited)?} + + token_owner { + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + let item = create_max_item(&collection, &owner, owner.clone())?; + + }: {collection.token_owner(item)} } diff --git a/pallets/nonfungible/src/common.rs b/pallets/nonfungible/src/common.rs index c545903ed6..a91828ae9c 100644 --- a/pallets/nonfungible/src/common.rs +++ b/pallets/nonfungible/src/common.rs @@ -26,7 +26,7 @@ use pallet_common::{ weights::WeightInfo as _, }; use sp_runtime::DispatchError; -use sp_std::vec::Vec; +use sp_std::{vec::Vec, vec}; use crate::{ AccountBalance, Allowance, Config, CreateItemData, Error, NonfungibleHandle, Owned, Pallet, @@ -44,16 +44,16 @@ impl CommonWeightInfo for CommonWeights { CreateItemExData::NFT(t) => { >::create_multiple_items_ex(t.len() as u32) + t.iter() - .map(|t| { + .filter_map(|t| { if t.properties.len() > 0 { - Self::set_token_properties(t.properties.len() as u32) + Some(Self::set_token_properties(t.properties.len() as u32)) } else { - 0 + None } }) - .sum::() + .fold(Weight::zero(), |a, b| a.saturating_add(b)) } - _ => 0, + _ => Weight::zero(), } } @@ -67,7 +67,7 @@ impl CommonWeightInfo for CommonWeights { } _ => None, }) - .sum::() + .fold(Weight::zero(), |a, b| a.saturating_add(b)) } fn burn_item() -> Weight { @@ -118,6 +118,10 @@ impl CommonWeightInfo for CommonWeights { >::burn_recursively_breadth_plus_self_plus_self_per_each_raw(amount) .saturating_sub(Self::burn_recursively_self_raw().saturating_mul(amount as u64 + 1)) } + + fn token_owner() -> Weight { + >::token_owner() + } } fn map_create_data( @@ -133,6 +137,8 @@ fn map_create_data( } } +/// Implementation of `CommonCollectionOperations` for `NonfungibleHandle`. It wraps Nonfungible Pallete +/// methods and adds weight info. impl CommonCollectionOperations for NonfungibleHandle { fn create_item( &self, @@ -420,6 +426,11 @@ impl CommonCollectionOperations for NonfungibleHandle { >::get((self.id, token)).map(|t| t.owner) } + /// Returns token owners. + fn token_owners(&self, token: TokenId) -> Vec { + self.token_owner(token).map_or_else(|| vec![], |t| vec![t]) + } + fn token_property(&self, token_id: TokenId, key: &PropertyKey) -> Option { >::token_properties((self.id, token_id)) .get(key) @@ -487,4 +498,12 @@ impl CommonCollectionOperations for NonfungibleHandle { fn refungible_extensions(&self) -> Option<&dyn RefungibleExtensions> { None } + + fn total_pieces(&self, token: TokenId) -> Option { + if >::contains_key((self.id, token)) { + Some(1) + } else { + None + } + } } diff --git a/pallets/nonfungible/src/erc.rs b/pallets/nonfungible/src/erc.rs index 5b32660a63..677efc6c0d 100644 --- a/pallets/nonfungible/src/erc.rs +++ b/pallets/nonfungible/src/erc.rs @@ -14,6 +14,11 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Nonfungible Pallet EVM API +//! +//! Provides ERC-721 standart support implementation and EVM API for unique extensions for Nonfungible Pallet. +//! Method implementations are mostly doing parameter conversion and calling Nonfungible Pallet methods. + extern crate alloc; use core::{ char::{REPLACEMENT_CHARACTER, decode_utf16}, @@ -28,7 +33,7 @@ use up_data_structs::{ use pallet_evm_coder_substrate::dispatch_to_evm; use sp_std::vec::Vec; use pallet_common::{ - erc::{CommonEvmHandler, PrecompileResult, CollectionCall, token_uri_key}, + erc::{CommonEvmHandler, PrecompileResult, CollectionCall, static_property::key}, CollectionHandle, CollectionPropertyPermissions, }; use pallet_evm::{account::CrossAccountId, PrecompileHandle}; @@ -40,8 +45,15 @@ use crate::{ SelfWeightOf, weights::WeightInfo, TokenProperties, }; -#[solidity_interface(name = "TokenProperties")] +/// @title A contract that allows to set and delete token properties and change token property permissions. +#[solidity_interface(name = TokenProperties)] impl NonfungibleHandle { + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. fn set_token_property_permission( &mut self, caller: caller, @@ -68,6 +80,11 @@ impl NonfungibleHandle { .map_err(dispatch_to_evm::) } + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. fn set_property( &mut self, caller: caller, @@ -96,6 +113,10 @@ impl NonfungibleHandle { .map_err(dispatch_to_evm::) } + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. fn delete_property(&mut self, token_id: uint256, caller: caller, key: string) -> Result<()> { let caller = T::CrossAccountId::from_eth(caller); let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; @@ -111,7 +132,11 @@ impl NonfungibleHandle { .map_err(dispatch_to_evm::) } - /// Throws error if key not found + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes fn property(&self, token_id: uint256, key: string) -> Result { let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; let key = >::from(key) @@ -127,6 +152,11 @@ impl NonfungibleHandle { #[derive(ToLog)] pub enum ERC721Events { + /// @dev This emits when ownership of any NFT changes by any mechanism. + /// This event emits when NFTs are created (`from` == 0) and destroyed + /// (`to` == 0). Exception: during contract creation, any number of NFTs + /// may be created and assigned without emitting Transfer. At the time of + /// any transfer, the approved address for that NFT (if any) is reset to none. Transfer { #[indexed] from: address, @@ -135,6 +165,10 @@ pub enum ERC721Events { #[indexed] token_id: uint256, }, + /// @dev This emits when the approved address for an NFT is changed or + /// reaffirmed. The zero address indicates there is no approved address. + /// When a Transfer event emits, this also indicates that the approved + /// address for that NFT (if any) is reset to none. Approval { #[indexed] owner: address, @@ -143,6 +177,8 @@ pub enum ERC721Events { #[indexed] token_id: uint256, }, + /// @dev This emits when an operator is enabled or disabled for an owner. + /// The operator can manage all NFTs of the owner. #[allow(dead_code)] ApprovalForAll { #[indexed] @@ -154,70 +190,124 @@ pub enum ERC721Events { } #[derive(ToLog)] -pub enum ERC721MintableEvents { +pub enum ERC721UniqueMintableEvents { #[allow(dead_code)] MintingFinished {}, } -#[solidity_interface(name = "ERC721Metadata")] +/// @title ERC-721 Non-Fungible Token Standard, optional metadata extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +#[solidity_interface(name = ERC721Metadata, expect_selector = 0x5b5e139f)] impl NonfungibleHandle { - fn name(&self) -> Result { - Ok(decode_utf16(self.name.iter().copied()) - .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) - .collect::()) + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + #[solidity(hide, rename_selector = "name")] + fn name_proxy(&self) -> Result { + self.name() } - fn symbol(&self) -> Result { - Ok(string::from_utf8_lossy(&self.token_prefix).into()) + /// @notice An abbreviated name for NFTs in this contract + /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + #[solidity(hide, rename_selector = "symbol")] + fn symbol_proxy(&self) -> Result { + self.symbol() } - /// Returns token's const_metadata + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata #[solidity(rename_selector = "tokenURI")] fn token_uri(&self, token_id: uint256) -> Result { - let key = token_uri_key(); - if !has_token_permission::(self.id, &key) { - return Err("No tokenURI permission".into()); - } + let token_id_u32: u32 = token_id.try_into().map_err(|_| "token id overflow")?; - self.consume_store_reads(1)?; - let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; - - let properties = >::try_get((self.id, token_id)) - .map_err(|_| Error::Revert("Token properties not found".into()))?; - if let Some(property) = properties.get(&key) { - return Ok(string::from_utf8_lossy(property).into()); - } + match get_token_property(self, token_id_u32, &key::url()).as_deref() { + Err(_) | Ok("") => (), + Ok(url) => { + return Ok(url.into()); + } + }; + + let base_uri = + pallet_common::Pallet::::get_collection_property(self.id, &key::base_uri()) + .map(BoundedVec::into_inner) + .map(string::from_utf8) + .transpose() + .map_err(|e| { + Error::Revert(alloc::format!( + "Can not convert value \"baseURI\" to string with error \"{}\"", + e + )) + })?; + + let base_uri = match base_uri.as_deref() { + None | Some("") => { + return Ok("".into()); + } + Some(base_uri) => base_uri.into(), + }; - Err("Property tokenURI not found".into()) + Ok( + match get_token_property(self, token_id_u32, &key::suffix()).as_deref() { + Err(_) | Ok("") => base_uri, + Ok(suffix) => base_uri + suffix, + }, + ) } } -#[solidity_interface(name = "ERC721Enumerable")] +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +#[solidity_interface(name = ERC721Enumerable, expect_selector = 0x780e9d63)] impl NonfungibleHandle { + /// @notice Enumerate valid NFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) fn token_by_index(&self, index: uint256) -> Result { Ok(index) } - /// Not implemented + /// @dev Not implemented fn token_of_owner_by_index(&self, _owner: address, _index: uint256) -> Result { // TODO: Not implemetable Err("not implemented".into()) } + /// @notice Count NFTs tracked by this contract + /// @return A count of valid NFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address fn total_supply(&self) -> Result { self.consume_store_reads(1)?; Ok(>::total_supply(self).into()) } } -#[solidity_interface(name = "ERC721", events(ERC721Events))] +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +#[solidity_interface(name = ERC721, events(ERC721Events), expect_selector = 0x80ac58cd)] impl NonfungibleHandle { + /// @notice Count all NFTs assigned to an owner + /// @dev NFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of NFTs owned by `owner`, possibly zero fn balance_of(&self, owner: address) -> Result { self.consume_store_reads(1)?; let owner = T::CrossAccountId::from_eth(owner); let balance = >::get((self.id, owner)); Ok(balance.into()) } + /// @notice Find the owner of an NFT + /// @dev NFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// @param tokenId The identifier for an NFT + /// @return The address of the owner of the NFT fn owner_of(&self, token_id: uint256) -> Result
{ self.consume_store_reads(1)?; let token: TokenId = token_id.try_into()?; @@ -226,30 +316,38 @@ impl NonfungibleHandle { .owner .as_eth()) } - /// Not implemented + /// @dev Not implemented + #[solidity(rename_selector = "safeTransferFrom")] fn safe_transfer_from_with_data( &mut self, _from: address, _to: address, _token_id: uint256, _data: bytes, - _value: value, ) -> Result { // TODO: Not implemetable Err("not implemented".into()) } - /// Not implemented + /// @dev Not implemented fn safe_transfer_from( &mut self, _from: address, _to: address, _token_id: uint256, - _value: value, ) -> Result { // TODO: Not implemetable Err("not implemented".into()) } + /// @notice Transfer ownership of an NFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer #[weight(>::transfer_from())] fn transfer_from( &mut self, @@ -257,7 +355,6 @@ impl NonfungibleHandle { from: address, to: address, token_id: uint256, - _value: value, ) -> Result { let caller = T::CrossAccountId::from_eth(caller); let from = T::CrossAccountId::from_eth(from); @@ -272,14 +369,14 @@ impl NonfungibleHandle { Ok(()) } + /// @notice Set or reaffirm the approved address for an NFT + /// @dev The zero address indicates there is no approved address. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param approved The new approved NFT controller + /// @param tokenId The NFT to approve #[weight(>::approve())] - fn approve( - &mut self, - caller: caller, - approved: address, - token_id: uint256, - _value: value, - ) -> Result { + fn approve(&mut self, caller: caller, approved: address, token_id: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); let approved = T::CrossAccountId::from_eth(approved); let token = token_id.try_into()?; @@ -289,7 +386,7 @@ impl NonfungibleHandle { Ok(()) } - /// Not implemented + /// @dev Not implemented fn set_approval_for_all( &mut self, _caller: caller, @@ -300,21 +397,26 @@ impl NonfungibleHandle { Err("not implemented".into()) } - /// Not implemented + /// @dev Not implemented fn get_approved(&self, _token_id: uint256) -> Result
{ // TODO: Not implemetable Err("not implemented".into()) } - /// Not implemented + /// @dev Not implemented fn is_approved_for_all(&self, _owner: address, _operator: address) -> Result
{ // TODO: Not implemetable Err("not implemented".into()) } } -#[solidity_interface(name = "ERC721Burnable")] +/// @title ERC721 Token that can be irreversibly burned (destroyed). +#[solidity_interface(name = ERC721Burnable)] impl NonfungibleHandle { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The NFT to approve #[weight(>::burn_item())] fn burn(&mut self, caller: caller, token_id: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); @@ -325,16 +427,34 @@ impl NonfungibleHandle { } } -#[solidity_interface(name = "ERC721Mintable", events(ERC721MintableEvents))] +/// @title ERC721 minting logic. +#[solidity_interface(name = ERC721UniqueMintable, events(ERC721UniqueMintableEvents))] impl NonfungibleHandle { fn minting_finished(&self) -> Result { Ok(false) } - /// `token_id` should be obtained with `next_token_id` method, - /// unlike standard, you can't specify it manually + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token #[weight(>::create_item())] - fn mint(&mut self, caller: caller, to: address, token_id: uint256) -> Result { + fn mint(&mut self, caller: caller, to: address) -> Result { + let token_id: uint256 = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + .into(); + self.mint_check_id(caller, to, token_id)?; + Ok(token_id) + } + + /// @notice Function to mint token. + /// @dev `tokenId` should be obtained with `nextTokenId` method, + /// unlike standard, you can't specify it manually + /// @param to The new owner + /// @param tokenId ID of the minted NFT + #[solidity(hide, rename_selector = "mint")] + #[weight(>::create_item())] + fn mint_check_id(&mut self, caller: caller, to: address, token_id: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); let to = T::CrossAccountId::from_eth(to); let token_id: u32 = token_id.try_into()?; @@ -364,18 +484,42 @@ impl NonfungibleHandle { Ok(true) } - /// `token_id` should be obtained with `next_token_id` method, - /// unlike standard, you can't specify it manually + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token #[solidity(rename_selector = "mintWithTokenURI")] #[weight(>::create_item())] fn mint_with_token_uri( + &mut self, + caller: caller, + to: address, + token_uri: string, + ) -> Result { + let token_id: uint256 = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + .into(); + self.mint_with_token_uri_check_id(caller, to, token_id, token_uri)?; + Ok(token_id) + } + + /// @notice Function to mint token with the given tokenUri. + /// @dev `tokenId` should be obtained with `nextTokenId` method, + /// unlike standard, you can't specify it manually + /// @param to The new owner + /// @param tokenId ID of the minted NFT + /// @param tokenUri Token URI that would be stored in the NFT properties + #[solidity(hide, rename_selector = "mintWithTokenURI")] + #[weight(>::create_item())] + fn mint_with_token_uri_check_id( &mut self, caller: caller, to: address, token_id: uint256, token_uri: string, ) -> Result { - let key = token_uri_key(); + let key = key::url(); let permission = get_token_permission::(self.id, &key)?; if !permission.collection_admin { return Err("Operation is not allowed".into()); @@ -420,12 +564,27 @@ impl NonfungibleHandle { Ok(true) } - /// Not implemented + /// @dev Not implemented fn finish_minting(&mut self, _caller: caller) -> Result { Err("not implementable".into()) } } +fn get_token_property( + collection: &CollectionHandle, + token_id: u32, + key: &up_data_structs::PropertyKey, +) -> Result { + collection.consume_store_reads(1)?; + let properties = >::try_get((collection.id, token_id)) + .map_err(|_| Error::Revert("Token properties not found".into()))?; + if let Some(property) = properties.get(key) { + return Ok(string::from_utf8_lossy(property).into()); + } + + Err("Property tokenURI not found".into()) +} + fn get_token_permission( collection_id: CollectionId, key: &PropertyKey, @@ -434,31 +593,36 @@ fn get_token_permission( .map_err(|_| Error::Revert("No permissions for collection".into()))?; let a = token_property_permissions .get(key) - .map(|p| p.clone()) - .ok_or_else(|| Error::Revert("No permission".into()))?; + .map(Clone::clone) + .ok_or_else(|| { + let key = string::from_utf8(key.clone().into_inner()).unwrap_or_default(); + Error::Revert(alloc::format!("No permission for key {}", key)) + })?; Ok(a) } -fn has_token_permission(collection_id: CollectionId, key: &PropertyKey) -> bool { - if let Ok(token_property_permissions) = - CollectionPropertyPermissions::::try_get(collection_id) - { - return token_property_permissions.contains_key(key); +/// @title Unique extensions for ERC721. +#[solidity_interface(name = ERC721UniqueExtensions)] +impl NonfungibleHandle { + /// @notice A descriptive name for a collection of NFTs in this contract + fn name(&self) -> Result { + Ok(decode_utf16(self.name.iter().copied()) + .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) + .collect::()) } - false -} + /// @notice An abbreviated name for NFTs in this contract + fn symbol(&self) -> Result { + Ok(string::from_utf8_lossy(&self.token_prefix).into()) + } -#[solidity_interface(name = "ERC721UniqueExtensions")] -impl NonfungibleHandle { + /// @notice Transfer ownership of an NFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param to The new owner + /// @param tokenId The NFT to transfer #[weight(>::transfer())] - fn transfer( - &mut self, - caller: caller, - to: address, - token_id: uint256, - _value: value, - ) -> Result { + fn transfer(&mut self, caller: caller, to: address, token_id: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); let to = T::CrossAccountId::from_eth(to); let token = token_id.try_into()?; @@ -470,14 +634,14 @@ impl NonfungibleHandle { Ok(()) } + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param tokenId The NFT to transfer #[weight(>::burn_from())] - fn burn_from( - &mut self, - caller: caller, - from: address, - token_id: uint256, - _value: value, - ) -> Result { + fn burn_from(&mut self, caller: caller, from: address, token_id: uint256) -> Result { let caller = T::CrossAccountId::from_eth(caller); let from = T::CrossAccountId::from_eth(from); let token = token_id.try_into()?; @@ -490,6 +654,7 @@ impl NonfungibleHandle { Ok(()) } + /// @notice Returns next free NFT ID. fn next_token_id(&self) -> Result { self.consume_store_reads(1)?; Ok(>::get(self.id) @@ -498,6 +663,12 @@ impl NonfungibleHandle { .into()) } + /// @notice Function to mint multiple tokens. + /// @dev `tokenIds` should be an array of consecutive numbers and first number + /// should be obtained with `nextTokenId` method + /// @param to The new owner + /// @param tokenIds IDs of the minted NFTs + #[solidity(hide)] #[weight(>::create_multiple_items(token_ids.len() as u32))] fn mint_bulk(&mut self, caller: caller, to: address, token_ids: Vec) -> Result { let caller = T::CrossAccountId::from_eth(caller); @@ -529,7 +700,12 @@ impl NonfungibleHandle { Ok(true) } - #[solidity(rename_selector = "mintBulkWithTokenURI")] + /// @notice Function to mint multiple tokens with the given tokenUris. + /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + /// numbers and first number should be obtained with `nextTokenId` method + /// @param to The new owner + /// @param tokens array of pairs of token ID and token URI for minted tokens + #[solidity(hide, rename_selector = "mintBulkWithTokenURI")] #[weight(>::create_multiple_items(tokens.len() as u32))] fn mint_bulk_with_token_uri( &mut self, @@ -537,7 +713,7 @@ impl NonfungibleHandle { to: address, tokens: Vec<(uint256, string)>, ) -> Result { - let key = token_uri_key(); + let key = key::url(); let caller = T::CrossAccountId::from_eth(caller); let to = T::CrossAccountId::from_eth(to); let mut expected_index = >::get(self.id) @@ -579,19 +755,19 @@ impl NonfungibleHandle { } #[solidity_interface( - name = "UniqueNFT", + name = UniqueNFT, is( ERC721, - ERC721Metadata, ERC721Enumerable, ERC721UniqueExtensions, - ERC721Mintable, + ERC721UniqueMintable, ERC721Burnable, - via("CollectionHandle", common_mut, Collection), + ERC721Metadata(if(this.flags.erc721metadata)), + Collection(via(common_mut returns CollectionHandle)), TokenProperties, ) )] -impl NonfungibleHandle where T::AccountId: From<[u8; 32]> {} +impl NonfungibleHandle where T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]> {} // Not a tests, but code generators generate_stubgen!(gen_impl, UniqueNFTCall<()>, true); @@ -599,7 +775,7 @@ generate_stubgen!(gen_iface, UniqueNFTCall<()>, false); impl CommonEvmHandler for NonfungibleHandle where - T::AccountId: From<[u8; 32]>, + T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]>, { const CODE: &'static [u8] = include_bytes!("./stubs/UniqueNFT.raw"); diff --git a/pallets/nonfungible/src/lib.rs b/pallets/nonfungible/src/lib.rs index 273ec03418..4da741500f 100644 --- a/pallets/nonfungible/src/lib.rs +++ b/pallets/nonfungible/src/lib.rs @@ -14,6 +14,80 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Nonfungible Pallet +//! +//! The Nonfungible pallet provides functionality for handling nonfungible collections and tokens. +//! +//! - [`Config`] +//! - [`NonfungibleHandle`] +//! - [`Pallet`] +//! - [`CommonWeights`](common::CommonWeights) +//! +//! ## Overview +//! +//! The Nonfungible pallet provides functions for: +//! +//! - NFT collection creation and removal +//! - Minting and burning of NFT tokens +//! - Retrieving account balances +//! - Transfering NFT tokens +//! - Setting and checking allowance for NFT tokens +//! - Setting properties and permissions for NFT collections and tokens +//! - Nesting and unnesting tokens +//! +//! ### Terminology +//! +//! - **NFT token:** Non fungible token. +//! +//! - **NFT Collection:** A collection of NFT tokens. All NFT tokens are part of a collection. +//! Each collection can define it's own properties, properties for it's tokens and set of permissions. +//! +//! - **Balance:** Number of NFT tokens owned by an account +//! +//! - **Allowance:** NFT tokens owned by one account that another account is allowed to make operations on +//! +//! - **Burning:** The process of “deleting” a token from a collection and from +//! an account balance of the owner. +//! +//! - **Nesting:** Setting up parent-child relationship between tokens. Nested tokens are inhereting +//! owner from their parent. There could be multiple levels of nesting. Token couldn't be nested in +//! it's child token i.e. parent-child relationship graph shouldn't have cycles. +//! +//! - **Properties:** Key-Values pairs. Token properties are attached to a token. Collection properties are +//! attached to a collection. Set of permissions could be defined for each property. +//! +//! ### Implementations +//! +//! The Nonfungible pallet provides implementations for the following traits. If these traits provide +//! the functionality that you need, then you can avoid coupling with the Nonfungible pallet. +//! +//! - [`CommonWeightInfo`](pallet_common::CommonWeightInfo): Functions for retrieval of transaction weight +//! - [`CommonCollectionOperations`](pallet_common::CommonCollectionOperations): Functions for dealing +//! with collections +//! +//! ## Interface +//! +//! ### Dispatchable Functions +//! +//! - `init_collection` - Create NFT collection. NFT collection can be configured to allow or deny access for +//! some accounts. +//! - `destroy_collection` - Destroy exising NFT collection. There should be no tokens in the collection. +//! - `burn` - Burn NFT token owned by account. +//! - `transfer` - Transfer NFT token. Transfers should be enabled for NFT collection. +//! Nests the NFT token if it is sent to another token. +//! - `create_item` - Mint NFT token in collection. Sender should have permission to mint tokens. +//! - `set_allowance` - Set allowance for another account. +//! - `set_token_property` - Set token property value. +//! - `delete_token_property` - Remove property from the token. +//! - `set_collection_properties` - Set collection properties. +//! - `delete_collection_properties` - Remove properties from the collection. +//! - `set_property_permission` - Set collection property permission. +//! - `set_token_property_permissions` - Set token property permissions. +//! +//! ## Assumptions +//! +//! * To perform operations on tokens sender should be in collection's allow list if collection access mode is `AllowList`. + #![cfg_attr(not(feature = "std"), no_std)] use erc::ERC721Events; @@ -23,13 +97,13 @@ use frame_support::{ storage::with_transaction, pallet_prelude::DispatchResultWithPostInfo, pallet_prelude::Weight, - weights::{PostDispatchInfo, Pays}, + dispatch::{PostDispatchInfo, Pays}, }; use up_data_structs::{ - AccessMode, CollectionId, CustomDataLimit, TokenId, CreateCollectionData, CreateNftExData, - mapping::TokenAddressMapping, budget::Budget, Property, PropertyPermission, PropertyKey, - PropertyValue, PropertyKeyPermission, Properties, PropertyScope, TrySetProperty, TokenChild, - AuxPropertyValue, + AccessMode, CollectionId, CollectionFlags, CustomDataLimit, TokenId, CreateCollectionData, + CreateNftExData, mapping::TokenAddressMapping, budget::Budget, Property, PropertyPermission, + PropertyKey, PropertyValue, PropertyKeyPermission, Properties, PropertyScope, TrySetProperty, + TokenChild, AuxPropertyValue, }; use pallet_evm::{account::CrossAccountId, Pallet as PalletEvm}; use pallet_common::{ @@ -40,7 +114,7 @@ use pallet_structure::{Pallet as PalletStructure, Error as StructureError}; use pallet_evm_coder_substrate::{SubstrateRecorder, WithRecorder}; use sp_core::H160; use sp_runtime::{ArithmeticError, DispatchError, DispatchResult, TransactionOutcome}; -use sp_std::{vec::Vec, vec, collections::btree_map::BTreeMap, collections::btree_set::BTreeSet}; +use sp_std::{vec::Vec, vec, collections::btree_map::BTreeMap}; use core::ops::Deref; use codec::{Encode, Decode, MaxEncodedLen}; use scale_info::TypeInfo; @@ -56,6 +130,8 @@ pub mod weights; pub type CreateItemData = CreateNftExData<::CrossAccountId>; pub(crate) type SelfWeightOf = ::WeightInfo; +/// Token data, stored independently from other data used to describe it +/// for the convenience of database access. Notably contains the owner account address. #[struct_versioning::versioned(version = 2, upper)] #[derive(Encode, Decode, TypeInfo, MaxEncodedLen)] pub struct ItemData { @@ -102,13 +178,17 @@ pub mod pallet { #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); + /// Total amount of minted tokens in a collection. #[pallet::storage] pub type TokensMinted = StorageMap; + + /// Amount of burnt tokens in a collection. #[pallet::storage] pub type TokensBurnt = StorageMap; + /// Token data, used to partially describe a token. #[pallet::storage] pub type TokenData = StorageNMap< Key = (Key, Key), @@ -116,6 +196,7 @@ pub mod pallet { QueryKind = OptionQuery, >; + /// Map of key-value pairs, describing the metadata of a token. #[pallet::storage] #[pallet::getter(fn token_properties)] pub type TokenProperties = StorageNMap< @@ -125,6 +206,15 @@ pub mod pallet { OnEmpty = up_data_structs::TokenProperties, >; + /// Custom data of a token that is serialized to bytes, + /// primarily reserved for on-chain operations, + /// normally obscured from the external users. + /// + /// Auxiliary properties are slightly different from + /// usual [`TokenProperties`] due to an unlimited number + /// and separately stored and written-to key-value pairs. + /// + /// Currently used to store RMRK data. #[pallet::storage] #[pallet::getter(fn token_aux_property)] pub type TokenAuxProperties = StorageNMap< @@ -138,7 +228,7 @@ pub mod pallet { QueryKind = OptionQuery, >; - /// Used to enumerate tokens owned by account + /// Used to enumerate tokens owned by account. #[pallet::storage] pub type Owned = StorageNMap< Key = ( @@ -150,7 +240,7 @@ pub mod pallet { QueryKind = ValueQuery, >; - /// Used to enumerate token's children + /// Used to enumerate token's children. #[pallet::storage] #[pallet::getter(fn token_children)] pub type TokenChildren = StorageNMap< @@ -163,6 +253,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Amount of tokens owned by an account in a collection. #[pallet::storage] pub type AccountBalance = StorageNMap< Key = ( @@ -173,6 +264,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Allowance set by a token owner for another user to perform one of certain transactions on a token. #[pallet::storage] pub type Allowance = StorageNMap< Key = (Key, Key), @@ -180,12 +272,13 @@ pub mod pallet { QueryKind = OptionQuery, >; + /// Upgrade from the old schema to properties. #[pallet::hooks] impl Hooks> for Pallet { fn on_runtime_upgrade() -> Weight { StorageVersion::new(1).put::>(); - 0 + Weight::zero() } } } @@ -202,6 +295,7 @@ impl NonfungibleHandle { &mut self.0 } } + impl WithRecorder for NonfungibleHandle { fn recorder(&self) -> &SubstrateRecorder { self.0.recorder() @@ -219,13 +313,21 @@ impl Deref for NonfungibleHandle { } impl Pallet { + /// Get number of NFT tokens in collection. pub fn total_supply(collection: &NonfungibleHandle) -> u32 { >::get(collection.id) - >::get(collection.id) } + + /// Check that NFT token exists. + /// + /// - `token`: Token ID. pub fn token_exists(collection: &NonfungibleHandle, token: TokenId) -> bool { >::contains_key((collection.id, token)) } + /// Set the token property with the scope. + /// + /// - `property`: Contains key-value pair. pub fn set_scoped_token_property( collection_id: CollectionId, token_id: TokenId, @@ -240,6 +342,7 @@ impl Pallet { Ok(()) } + /// Batch operation to set multiple properties with the same scope. pub fn set_scoped_token_properties( collection_id: CollectionId, token_id: TokenId, @@ -254,6 +357,9 @@ impl Pallet { Ok(()) } + /// Add or edit auxiliary data for the property. + /// + /// - `f`: function that adds or edits auxiliary data. pub fn try_mutate_token_aux_property( collection_id: CollectionId, token_id: TokenId, @@ -264,6 +370,7 @@ impl Pallet { >::try_mutate((collection_id, token_id, scope, key), f) } + /// Remove auxiliary data for the property. pub fn remove_token_aux_property( collection_id: CollectionId, token_id: TokenId, @@ -273,6 +380,9 @@ impl Pallet { >::remove((collection_id, token_id, scope, key)); } + /// Get all auxiliary data in a given scope. + /// + /// Returns iterator over Property Key - Data pairs. pub fn iterate_token_aux_properties( collection_id: CollectionId, token_id: TokenId, @@ -281,6 +391,7 @@ impl Pallet { >::iter_prefix((collection_id, token_id, scope)) } + /// Get ID of the last minted token pub fn current_token_id(collection_id: CollectionId) -> TokenId { TokenId(>::get(collection_id)) } @@ -288,13 +399,24 @@ impl Pallet { // unchecked calls skips any permission checks impl Pallet { + /// Create NFT collection + /// + /// `init_collection` will take non-refundable deposit for collection creation. + /// + /// - `data`: Contains settings for collection limits and permissions. pub fn init_collection( owner: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, - is_external: bool, + flags: CollectionFlags, ) -> Result { - >::init_collection(owner, data, is_external) + >::init_collection(owner, payer, data, flags) } + + /// Destroy NFT collection + /// + /// `destroy_collection` will throw error if collection contains any tokens. + /// Only owner can destroy collection. pub fn destroy_collection( collection: NonfungibleHandle, sender: &T::CrossAccountId, @@ -309,16 +431,25 @@ impl Pallet { PalletCommon::destroy_collection(collection.0, sender)?; - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); >::remove(id); >::remove(id); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); Ok(()) } + /// Burn NFT token + /// + /// `burn` removes `token` from the `collection`, from it's owner and from the parent token + /// if the token is nested. + /// Only the owner can `burn` the token. The `token` shouldn't have any nested tokens. + /// Also removes all corresponding properties and auxiliary properties. + /// + /// - `token`: Token that should be burned + /// - `collection`: Collection that contains the token pub fn burn( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -358,7 +489,7 @@ impl Pallet { >::insert(collection.id, burnt); >::remove((collection.id, token)); >::remove((collection.id, token)); - >::remove_prefix((collection.id, token), None); + let _ = >::clear_prefix((collection.id, token), u32::MAX, None); let old_spender = >::take((collection.id, token)); if let Some(old_spender) = old_spender { @@ -388,6 +519,12 @@ impl Pallet { Ok(()) } + /// Same as [`burn`] but burns all the tokens that are nested in the token first + /// + /// - `self_budget`: Limit for searching children in depth. + /// - `breadth_budget`: Limit of breadth of searching children. + /// + /// [`burn`]: struct.Pallet.html#method.burn #[transactional] pub fn burn_recursively( collection: &NonfungibleHandle, @@ -401,7 +538,7 @@ impl Pallet { let current_token_account = T::CrossTokenAddressMapping::token_to_address(collection.id, token); - let mut weight = 0 as Weight; + let mut weight = Weight::zero(); // This method is transactional, if user in fact doesn't have permissions to remove token - // tokens removed here will be restored after rejected transaction @@ -427,6 +564,14 @@ impl Pallet { }) } + /// Batch operation to add, edit or remove properties for the token + /// + /// All affected properties should have mutable permission and sender should have + /// permission to edit those properties. + /// + /// - `nesting_budget`: Limit for searching parents in depth to check ownership. + /// - `is_token_create`: Indicates that method is called during token initialization. + /// Allows to bypass ownership check. #[transactional] fn modify_token_properties( collection: &NonfungibleHandle, @@ -520,6 +665,11 @@ impl Pallet { Ok(()) } + /// Batch operation to add or edit properties for the token + /// + /// Same as [`modify_token_properties`] but doesn't allow to remove properties + /// + /// [`modify_token_properties`]: struct.Pallet.html#method.modify_token_properties pub fn set_token_properties( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -538,6 +688,11 @@ impl Pallet { ) } + /// Add or edit single property for the token + /// + /// Calls [`set_token_properties`] internally + /// + /// [`set_token_properties`]: struct.Pallet.html#method.set_token_properties pub fn set_token_property( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -557,6 +712,11 @@ impl Pallet { ) } + /// Batch operation to remove properties from the token + /// + /// Same as [`modify_token_properties`] but doesn't allow to add or edit properties + /// + /// [`modify_token_properties`]: struct.Pallet.html#method.modify_token_properties pub fn delete_token_properties( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -576,6 +736,11 @@ impl Pallet { ) } + /// Remove single property from the token + /// + /// Calls [`delete_token_properties`] internally + /// + /// [`delete_token_properties`]: struct.Pallet.html#method.delete_token_properties pub fn delete_token_property( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -592,6 +757,7 @@ impl Pallet { ) } + /// Add or edit properties for the collection pub fn set_collection_properties( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -600,6 +766,7 @@ impl Pallet { >::set_collection_properties(collection, sender, properties) } + /// Remove properties from the collection pub fn delete_collection_properties( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -608,6 +775,9 @@ impl Pallet { >::delete_collection_properties(collection, sender, property_keys) } + /// Set property permissions for the token. + /// + /// Sender should be the owner or admin of token's collection. pub fn set_token_property_permissions( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -616,6 +786,26 @@ impl Pallet { >::set_token_property_permissions(collection, sender, property_permissions) } + /// Set property permissions for the token with scope. + /// + /// Sender should be the owner or admin of token's collection. + pub fn set_scoped_token_property_permissions( + collection: &CollectionHandle, + sender: &T::CrossAccountId, + scope: PropertyScope, + property_permissions: Vec, + ) -> DispatchResult { + >::set_scoped_token_property_permissions( + collection, + sender, + scope, + property_permissions, + ) + } + + /// Set property permissions for the collection. + /// + /// Sender should be the owner or admin of the collection. pub fn set_property_permission( collection: &CollectionHandle, sender: &T::CrossAccountId, @@ -624,6 +814,15 @@ impl Pallet { >::set_property_permission(collection, sender, permission) } + /// Transfer NFT token from one account to another. + /// + /// `from` account stops being the owner and `to` account becomes the owner of the token. + /// If `to` is token than `to` becomes owner of the token and the token become nested. + /// Unnests token from previous parent if it was nested before. + /// Removes allowance for the token if there was any. + /// Throws if transfers aren't allowed for collection or if receiver reached token ownership limit. + /// + /// - `nesting_budget`: Limit for token nesting depth pub fn transfer( collection: &NonfungibleHandle, from: &T::CrossAccountId, @@ -715,6 +914,16 @@ impl Pallet { Ok(()) } + /// Batch operation to mint multiple NFT tokens. + /// + /// The sender should be the owner/admin of the collection or collection should be configured + /// to allow public minting. + /// Throws if amount of tokens reached it's limit for the collection or if caller reached + /// token ownership limit. + /// + /// - `data`: Contains list of token properties and users who will become the owners of the + /// corresponging tokens. + /// - `nesting_budget`: Limit for token nesting depth pub fn create_multiple_items( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -899,6 +1108,9 @@ impl Pallet { } } + /// Set allowance for the spender to `transfer` or `burn` sender's token. + /// + /// - `token`: Token the spender is allowed to `transfer` or `burn`. pub fn set_allowance( collection: &NonfungibleHandle, sender: &T::CrossAccountId, @@ -931,6 +1143,7 @@ impl Pallet { Ok(()) } + /// Checks allowance for the spender to use the token. fn check_allowed( collection: &NonfungibleHandle, spender: &T::CrossAccountId, @@ -973,6 +1186,12 @@ impl Pallet { Ok(()) } + /// Transfer NFT token from one account to another. + /// + /// Same as the [`transfer`] but spender doesn't needs to be the owner of the token. + /// The owner should set allowance for the spender to transfer token. + /// + /// [`transfer`]: struct.Pallet.html#method.transfer pub fn transfer_from( collection: &NonfungibleHandle, spender: &T::CrossAccountId, @@ -989,6 +1208,12 @@ impl Pallet { Self::transfer(collection, from, to, token, nesting_budget) } + /// Burn NFT token for `from` account. + /// + /// Same as the [`burn`] but spender doesn't need to be an owner of the token. The owner should + /// set allowance for the spender to burn token. + /// + /// [`burn`]: struct.Pallet.html#method.burn pub fn burn_from( collection: &NonfungibleHandle, spender: &T::CrossAccountId, @@ -1003,6 +1228,8 @@ impl Pallet { Self::burn(collection, from, token) } + /// Check that `from` token could be nested in `under` token. + /// pub fn check_nesting( handle: &NonfungibleHandle, sender: T::CrossAccountId, @@ -1072,7 +1299,11 @@ impl Pallet { .collect() } - /// Delegated to `create_multiple_items` + /// Mint single NFT token. + /// + /// Delegated to [`create_multiple_items`] + /// + /// [`create_multiple_items`]: struct.Pallet.html#method.create_multiple_items pub fn create_item( collection: &NonfungibleHandle, sender: &T::CrossAccountId, diff --git a/pallets/nonfungible/src/stubs/UniqueNFT.raw b/pallets/nonfungible/src/stubs/UniqueNFT.raw index b2b4ee8cf0..7447cb23a3 100644 Binary files a/pallets/nonfungible/src/stubs/UniqueNFT.raw and b/pallets/nonfungible/src/stubs/UniqueNFT.raw differ diff --git a/pallets/nonfungible/src/stubs/UniqueNFT.sol b/pallets/nonfungible/src/stubs/UniqueNFT.sol index 0e8a051dc8..29c312b420 100644 --- a/pallets/nonfungible/src/stubs/UniqueNFT.sol +++ b/pallets/nonfungible/src/stubs/UniqueNFT.sol @@ -3,57 +3,31 @@ pragma solidity >=0.8.0 <0.9.0; -// Anonymous struct -struct Tuple0 { - uint256 field_0; - string field_1; -} - -// Common stubs holder +/// @dev common stubs holder contract Dummy { uint8 dummy; string stub_error = "this contract is implemented in native"; } contract ERC165 is Dummy { - function supportsInterface(bytes4 interfaceID) - external - view - returns (bool) - { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { require(false, stub_error); interfaceID; return true; } } -// Inline -contract ERC721Events { - event Transfer( - address indexed from, - address indexed to, - uint256 indexed tokenId - ); - event Approval( - address indexed owner, - address indexed approved, - uint256 indexed tokenId - ); - event ApprovalForAll( - address indexed owner, - address indexed operator, - bool approved - ); -} - -// Inline -contract ERC721MintableEvents { - event MintingFinished(); -} - -// Selector: 41369377 +/// @title A contract that allows to set and delete token properties and change token property permissions. +/// @dev the ERC-165 identifier for this interface is 0x41369377 contract TokenProperties is Dummy, ERC165 { - // Selector: setTokenPropertyPermission(string,bool,bool,bool) 222d97fa + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. + /// @dev EVM selector for this function is: 0x222d97fa, + /// or in textual repr: setTokenPropertyPermission(string,bool,bool,bool) function setTokenPropertyPermission( string memory key, bool isMutable, @@ -68,7 +42,13 @@ contract TokenProperties is Dummy, ERC165 { dummy = 0; } - // Selector: setProperty(uint256,string,bytes) 1752d67b + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. + /// @dev EVM selector for this function is: 0x1752d67b, + /// or in textual repr: setProperty(uint256,string,bytes) function setProperty( uint256 tokenId, string memory key, @@ -81,7 +61,12 @@ contract TokenProperties is Dummy, ERC165 { dummy = 0; } - // Selector: deleteProperty(uint256,string) 066111d1 + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @dev EVM selector for this function is: 0x066111d1, + /// or in textual repr: deleteProperty(uint256,string) function deleteProperty(uint256 tokenId, string memory key) public { require(false, stub_error); tokenId; @@ -89,14 +74,14 @@ contract TokenProperties is Dummy, ERC165 { dummy = 0; } - // Throws error if key not found - // - // Selector: property(uint256,string) 7228c327 - function property(uint256 tokenId, string memory key) - public - view - returns (bytes memory) - { + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes + /// @dev EVM selector for this function is: 0x7228c327, + /// or in textual repr: property(uint256,string) + function property(uint256 tokenId, string memory key) public view returns (bytes memory) { require(false, stub_error); tokenId; key; @@ -105,421 +90,717 @@ contract TokenProperties is Dummy, ERC165 { } } -// Selector: 42966c68 -contract ERC721Burnable is Dummy, ERC165 { - // Selector: burn(uint256) 42966c68 - function burn(uint256 tokenId) public { +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +contract Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) public { require(false, stub_error); - tokenId; + key; + value; dummy = 0; } -} -// Selector: 58800161 -contract ERC721 is Dummy, ERC165, ERC721Events { - // Selector: balanceOf(address) 70a08231 - function balanceOf(address owner) public view returns (uint256) { + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) public { require(false, stub_error); - owner; - dummy; - return 0; + key; + dummy = 0; } - // Selector: ownerOf(uint256) 6352211e - function ownerOf(uint256 tokenId) public view returns (address) { + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) public view returns (bytes memory) { require(false, stub_error); - tokenId; + key; dummy; - return 0x0000000000000000000000000000000000000000; + return hex""; } - // Not implemented - // - // Selector: safeTransferFromWithData(address,address,uint256,bytes) 60a11672 - function safeTransferFromWithData( - address from, - address to, - uint256 tokenId, - bytes memory data - ) public { + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) public { require(false, stub_error); - from; - to; - tokenId; - data; + sponsor; dummy = 0; } - // Not implemented - // - // Selector: safeTransferFrom(address,address,uint256) 42842e0e - function safeTransferFrom( - address from, - address to, - uint256 tokenId - ) public { + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() public view returns (bool) { + require(false, stub_error); + dummy; + return false; + } + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() public { require(false, stub_error); - from; - to; - tokenId; dummy = 0; } - // Selector: transferFrom(address,address,uint256) 23b872dd - function transferFrom( - address from, - address to, - uint256 tokenId - ) public { + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() public { require(false, stub_error); - from; - to; - tokenId; dummy = 0; } - // Selector: approve(address,uint256) 095ea7b3 - function approve(address approved, uint256 tokenId) public { + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() public view returns (Tuple17 memory) { require(false, stub_error); - approved; - tokenId; + dummy; + return Tuple17(0x0000000000000000000000000000000000000000, 0); + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) public { + require(false, stub_error); + limit; + value; dummy = 0; } - // Not implemented - // - // Selector: setApprovalForAll(address,bool) a22cb465 - function setApprovalForAll(address operator, bool approved) public { + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) public { require(false, stub_error); - operator; - approved; + limit; + value; dummy = 0; } - // Not implemented - // - // Selector: getApproved(uint256) 081812fc - function getApproved(uint256 tokenId) public view returns (address) { + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() public view returns (address) { require(false, stub_error); - tokenId; dummy; return 0x0000000000000000000000000000000000000000; } - // Not implemented - // - // Selector: isApprovedForAll(address,address) e985e9c5 - function isApprovedForAll(address owner, address operator) - public - view - returns (address) - { + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) public { require(false, stub_error); - owner; - operator; - dummy; - return 0x0000000000000000000000000000000000000000; + newAdmin; + dummy = 0; } -} -// Selector: 5b5e139f -contract ERC721Metadata is Dummy, ERC165 { - // Selector: name() 06fdde03 - function name() public view returns (string memory) { + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) public { require(false, stub_error); - dummy; - return ""; + admin; + dummy = 0; } - // Selector: symbol() 95d89b41 - function symbol() public view returns (string memory) { + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) public { require(false, stub_error); - dummy; - return ""; + enable; + dummy = 0; } - // Returns token's const_metadata - // - // Selector: tokenURI(uint256) c87b56dd - function tokenURI(uint256 tokenId) public view returns (string memory) { + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) public { require(false, stub_error); - tokenId; - dummy; - return ""; + enable; + collections; + dummy = 0; } -} -// Selector: 68ccfe89 -contract ERC721Mintable is Dummy, ERC165, ERC721MintableEvents { - // Selector: mintingFinished() 05d2035b - function mintingFinished() public view returns (bool) { + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) public { require(false, stub_error); + mode; + dummy = 0; + } + + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) public view returns (bool) { + require(false, stub_error); + user; dummy; return false; } - // `token_id` should be obtained with `next_token_id` method, - // unlike standard, you can't specify it manually - // - // Selector: mint(address,uint256) 40c10f19 - function mint(address to, uint256 tokenId) public returns (bool) { + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) public { require(false, stub_error); - to; - tokenId; + user; dummy = 0; - return false; } - // `token_id` should be obtained with `next_token_id` method, - // unlike standard, you can't specify it manually - // - // Selector: mintWithTokenURI(address,uint256,string) 50bb4e7f - function mintWithTokenURI( - address to, - uint256 tokenId, - string memory tokenUri - ) public returns (bool) { + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) public { require(false, stub_error); - to; - tokenId; - tokenUri; + user; dummy = 0; - return false; } - // Not implemented - // - // Selector: finishMinting() 7d64bcb4 - function finishMinting() public returns (bool) { + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) public { require(false, stub_error); + mode; dummy = 0; - return false; } -} -// Selector: 780e9d63 -contract ERC721Enumerable is Dummy, ERC165 { - // Selector: tokenByIndex(uint256) 4f6ccce7 - function tokenByIndex(uint256 index) public view returns (uint256) { + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) public view returns (bool) { require(false, stub_error); - index; + user; dummy; - return 0; + return false; } - // Not implemented - // - // Selector: tokenOfOwnerByIndex(address,uint256) 2f745c59 - function tokenOfOwnerByIndex(address owner, uint256 index) - public - view - returns (uint256) - { + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() public view returns (string memory) { require(false, stub_error); - owner; - index; dummy; - return 0; + return ""; } - // Selector: totalSupply() 18160ddd - function totalSupply() public view returns (uint256) { + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() public view returns (Tuple17 memory) { require(false, stub_error); dummy; - return 0; + return Tuple17(0x0000000000000000000000000000000000000000, 0); } -} -// Selector: 7d9262e6 -contract Collection is Dummy, ERC165 { - // Selector: setCollectionProperty(string,bytes) 2f073f66 - function setCollectionProperty(string memory key, bytes memory value) - public - { + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) public { require(false, stub_error); - key; - value; + newOwner; dummy = 0; } +} - // Selector: deleteCollectionProperty(string) 7b7debce - function deleteCollectionProperty(string memory key) public { - require(false, stub_error); - key; - dummy = 0; - } +/// @dev anonymous struct +struct Tuple17 { + address field_0; + uint256 field_1; +} - // Throws error if key not found - // - // Selector: collectionProperty(string) cf24fd6d - function collectionProperty(string memory key) - public - view - returns (bytes memory) - { +/// @title ERC-721 Non-Fungible Token Standard, optional metadata extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x5b5e139f +contract ERC721Metadata is Dummy, ERC165 { + // /// @notice A descriptive name for a collection of NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x06fdde03, + // /// or in textual repr: name() + // function name() public view returns (string memory) { + // require(false, stub_error); + // dummy; + // return ""; + // } + + // /// @notice An abbreviated name for NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x95d89b41, + // /// or in textual repr: symbol() + // function symbol() public view returns (string memory) { + // require(false, stub_error); + // dummy; + // return ""; + // } + + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata + /// @dev EVM selector for this function is: 0xc87b56dd, + /// or in textual repr: tokenURI(uint256) + function tokenURI(uint256 tokenId) public view returns (string memory) { require(false, stub_error); - key; + tokenId; dummy; - return hex""; + return ""; } +} - // Selector: setCollectionSponsor(address) 7623402e - function setCollectionSponsor(address sponsor) public { +/// @title ERC721 Token that can be irreversibly burned (destroyed). +/// @dev the ERC-165 identifier for this interface is 0x42966c68 +contract ERC721Burnable is Dummy, ERC165 { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The NFT to approve + /// @dev EVM selector for this function is: 0x42966c68, + /// or in textual repr: burn(uint256) + function burn(uint256 tokenId) public { require(false, stub_error); - sponsor; + tokenId; dummy = 0; } +} - // Selector: confirmCollectionSponsorship() 3c50e97a - function confirmCollectionSponsorship() public { +/// @dev inlined interface +contract ERC721UniqueMintableEvents { + event MintingFinished(); +} + +/// @title ERC721 minting logic. +/// @dev the ERC-165 identifier for this interface is 0x476ff149 +contract ERC721UniqueMintable is Dummy, ERC165, ERC721UniqueMintableEvents { + /// @dev EVM selector for this function is: 0x05d2035b, + /// or in textual repr: mintingFinished() + function mintingFinished() public view returns (bool) { require(false, stub_error); + dummy; + return false; + } + + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x6a627842, + /// or in textual repr: mint(address) + function mint(address to) public returns (uint256) { + require(false, stub_error); + to; dummy = 0; + return 0; } - // Selector: setCollectionLimit(string,uint32) 6a3841db - function setCollectionLimit(string memory limit, uint32 value) public { + // /// @notice Function to mint token. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted NFT + // /// @dev EVM selector for this function is: 0x40c10f19, + // /// or in textual repr: mint(address,uint256) + // function mint(address to, uint256 tokenId) public returns (bool) { + // require(false, stub_error); + // to; + // tokenId; + // dummy = 0; + // return false; + // } + + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x45c17782, + /// or in textual repr: mintWithTokenURI(address,string) + function mintWithTokenURI(address to, string memory tokenUri) public returns (uint256) { require(false, stub_error); - limit; - value; + to; + tokenUri; dummy = 0; + return 0; } - // Selector: setCollectionLimit(string,bool) 993b7fba - function setCollectionLimit(string memory limit, bool value) public { + // /// @notice Function to mint token with the given tokenUri. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted NFT + // /// @param tokenUri Token URI that would be stored in the NFT properties + // /// @dev EVM selector for this function is: 0x50bb4e7f, + // /// or in textual repr: mintWithTokenURI(address,uint256,string) + // function mintWithTokenURI(address to, uint256 tokenId, string memory tokenUri) public returns (bool) { + // require(false, stub_error); + // to; + // tokenId; + // tokenUri; + // dummy = 0; + // return false; + // } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x7d64bcb4, + /// or in textual repr: finishMinting() + function finishMinting() public returns (bool) { require(false, stub_error); - limit; - value; dummy = 0; + return false; } +} - // Selector: contractAddress() f6b4dfb4 - function contractAddress() public view returns (address) { +/// @title Unique extensions for ERC721. +/// @dev the ERC-165 identifier for this interface is 0x4468500d +contract ERC721UniqueExtensions is Dummy, ERC165 { + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() public view returns (string memory) { require(false, stub_error); dummy; - return 0x0000000000000000000000000000000000000000; + return ""; } - // Selector: addCollectionAdminSubstrate(uint256) 5730062b - function addCollectionAdminSubstrate(uint256 newAdmin) public view { + /// @notice An abbreviated name for NFTs in this contract + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() public view returns (string memory) { require(false, stub_error); - newAdmin; dummy; + return ""; } - // Selector: removeCollectionAdminSubstrate(uint256) 4048fcf9 - function removeCollectionAdminSubstrate(uint256 newAdmin) public view { + /// @notice Transfer ownership of an NFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 tokenId) public { require(false, stub_error); - newAdmin; - dummy; + to; + tokenId; + dummy = 0; } - // Selector: addCollectionAdmin(address) 92e462c7 - function addCollectionAdmin(address newAdmin) public view { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 tokenId) public { require(false, stub_error); - newAdmin; - dummy; + from; + tokenId; + dummy = 0; } - // Selector: removeCollectionAdmin(address) fafd7b42 - function removeCollectionAdmin(address admin) public view { + /// @notice Returns next free NFT ID. + /// @dev EVM selector for this function is: 0x75794a3c, + /// or in textual repr: nextTokenId() + function nextTokenId() public view returns (uint256) { require(false, stub_error); - admin; dummy; + return 0; } + // /// @notice Function to mint multiple tokens. + // /// @dev `tokenIds` should be an array of consecutive numbers and first number + // /// should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokenIds IDs of the minted NFTs + // /// @dev EVM selector for this function is: 0x44a9945e, + // /// or in textual repr: mintBulk(address,uint256[]) + // function mintBulk(address to, uint256[] memory tokenIds) public returns (bool) { + // require(false, stub_error); + // to; + // tokenIds; + // dummy = 0; + // return false; + // } + + // /// @notice Function to mint multiple tokens with the given tokenUris. + // /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + // /// numbers and first number should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokens array of pairs of token ID and token URI for minted tokens + // /// @dev EVM selector for this function is: 0x36543006, + // /// or in textual repr: mintBulkWithTokenURI(address,(uint256,string)[]) + // function mintBulkWithTokenURI(address to, Tuple6[] memory tokens) public returns (bool) { + // require(false, stub_error); + // to; + // tokens; + // dummy = 0; + // return false; + // } - // Selector: setCollectionNesting(bool) 112d4586 - function setCollectionNesting(bool enable) public { +} + +/// @dev anonymous struct +struct Tuple6 { + uint256 field_0; + string field_1; +} + +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x780e9d63 +contract ERC721Enumerable is Dummy, ERC165 { + /// @notice Enumerate valid NFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) + /// @dev EVM selector for this function is: 0x4f6ccce7, + /// or in textual repr: tokenByIndex(uint256) + function tokenByIndex(uint256 index) public view returns (uint256) { require(false, stub_error); - enable; - dummy = 0; + index; + dummy; + return 0; } - // Selector: setCollectionNesting(bool,address[]) 64872396 - function setCollectionNesting(bool enable, address[] memory collections) - public - { + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x2f745c59, + /// or in textual repr: tokenOfOwnerByIndex(address,uint256) + function tokenOfOwnerByIndex(address owner, uint256 index) public view returns (uint256) { require(false, stub_error); - enable; - collections; - dummy = 0; + owner; + index; + dummy; + return 0; } - // Selector: setCollectionAccess(uint8) 41835d4c - function setCollectionAccess(uint8 mode) public { + /// @notice Count NFTs tracked by this contract + /// @return A count of valid NFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() public view returns (uint256) { require(false, stub_error); - mode; - dummy = 0; + dummy; + return 0; } +} + +/// @dev inlined interface +contract ERC721Events { + event Transfer(address indexed from, address indexed to, uint256 indexed tokenId); + event Approval(address indexed owner, address indexed approved, uint256 indexed tokenId); + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); +} - // Selector: addToCollectionAllowList(address) 67844fe6 - function addToCollectionAllowList(address user) public view { +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +/// @dev the ERC-165 identifier for this interface is 0x80ac58cd +contract ERC721 is Dummy, ERC165, ERC721Events { + /// @notice Count all NFTs assigned to an owner + /// @dev NFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of NFTs owned by `owner`, possibly zero + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) public view returns (uint256) { require(false, stub_error); - user; + owner; dummy; + return 0; } - // Selector: removeFromCollectionAllowList(address) 85c51acb - function removeFromCollectionAllowList(address user) public view { + /// @notice Find the owner of an NFT + /// @dev NFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// @param tokenId The identifier for an NFT + /// @return The address of the owner of the NFT + /// @dev EVM selector for this function is: 0x6352211e, + /// or in textual repr: ownerOf(uint256) + function ownerOf(uint256 tokenId) public view returns (address) { require(false, stub_error); - user; + tokenId; dummy; + return 0x0000000000000000000000000000000000000000; } - // Selector: setCollectionMintMode(bool) 00018e84 - function setCollectionMintMode(bool mode) public { + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xb88d4fde, + /// or in textual repr: safeTransferFrom(address,address,uint256,bytes) + function safeTransferFrom( + address from, + address to, + uint256 tokenId, + bytes memory data + ) public { require(false, stub_error); - mode; + from; + to; + tokenId; + data; dummy = 0; } -} -// Selector: d74d154f -contract ERC721UniqueExtensions is Dummy, ERC165 { - // Selector: transfer(address,uint256) a9059cbb - function transfer(address to, uint256 tokenId) public { + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x42842e0e, + /// or in textual repr: safeTransferFrom(address,address,uint256) + function safeTransferFrom( + address from, + address to, + uint256 tokenId + ) public { require(false, stub_error); + from; to; tokenId; dummy = 0; } - // Selector: burnFrom(address,uint256) 79cc6790 - function burnFrom(address from, uint256 tokenId) public { + /// @notice Transfer ownership of an NFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 tokenId + ) public { require(false, stub_error); from; + to; tokenId; dummy = 0; } - // Selector: nextTokenId() 75794a3c - function nextTokenId() public view returns (uint256) { + /// @notice Set or reaffirm the approved address for an NFT + /// @dev The zero address indicates there is no approved address. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param approved The new approved NFT controller + /// @param tokenId The NFT to approve + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address approved, uint256 tokenId) public { require(false, stub_error); - dummy; - return 0; + approved; + tokenId; + dummy = 0; } - // Selector: mintBulk(address,uint256[]) 44a9945e - function mintBulk(address to, uint256[] memory tokenIds) - public - returns (bool) - { + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xa22cb465, + /// or in textual repr: setApprovalForAll(address,bool) + function setApprovalForAll(address operator, bool approved) public { require(false, stub_error); - to; - tokenIds; + operator; + approved; dummy = 0; - return false; } - // Selector: mintBulkWithTokenURI(address,(uint256,string)[]) 36543006 - function mintBulkWithTokenURI(address to, Tuple0[] memory tokens) - public - returns (bool) - { + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x081812fc, + /// or in textual repr: getApproved(uint256) + function getApproved(uint256 tokenId) public view returns (address) { require(false, stub_error); - to; - tokens; - dummy = 0; - return false; + tokenId; + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xe985e9c5, + /// or in textual repr: isApprovedForAll(address,address) + function isApprovedForAll(address owner, address operator) public view returns (address) { + require(false, stub_error); + owner; + operator; + dummy; + return 0x0000000000000000000000000000000000000000; } } @@ -527,11 +808,11 @@ contract UniqueNFT is Dummy, ERC165, ERC721, - ERC721Metadata, ERC721Enumerable, ERC721UniqueExtensions, - ERC721Mintable, + ERC721UniqueMintable, ERC721Burnable, + ERC721Metadata, Collection, TokenProperties {} diff --git a/pallets/nonfungible/src/weights.rs b/pallets/nonfungible/src/weights.rs index af5de78ee5..5af18ac9e3 100644 --- a/pallets/nonfungible/src/weights.rs +++ b/pallets/nonfungible/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_nonfungible //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -46,6 +46,7 @@ pub trait WeightInfo { fn set_token_property_permissions(b: u32, ) -> Weight; fn set_token_properties(b: u32, ) -> Weight; fn delete_token_properties(b: u32, ) -> Weight; + fn token_owner() -> Weight; } /// Weights for pallet_nonfungible using the Substrate node and recommended hardware. @@ -56,34 +57,34 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn create_item() -> Weight { - (24_135_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(25_905_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) } // Storage: Nonfungible TokensMinted (r:1 w:1) // Storage: Nonfungible AccountBalance (r:1 w:1) // Storage: Nonfungible TokenData (r:0 w:4) // Storage: Nonfungible Owned (r:0 w:4) fn create_multiple_items(b: u32, ) -> Weight { - (21_952_000 as Weight) - // Standard Error: 5_000 - .saturating_add((4_727_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) - .saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(24_955_000) + // Standard Error: 3_000 + .saturating_add(Weight::from_ref_time(5_340_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + .saturating_add(T::DbWeight::get().writes((2 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokensMinted (r:1 w:1) // Storage: Nonfungible AccountBalance (r:4 w:4) // Storage: Nonfungible TokenData (r:0 w:4) // Storage: Nonfungible Owned (r:0 w:4) fn create_multiple_items_ex(b: u32, ) -> Weight { - (10_432_000 as Weight) - // Standard Error: 6_000 - .saturating_add((7_383_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) - .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(13_666_000) + // Standard Error: 5_000 + .saturating_add(Weight::from_ref_time(8_299_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + .saturating_add(T::DbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible TokenChildren (r:1 w:0) @@ -93,9 +94,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_item() -> Weight { - (29_798_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(36_205_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenChildren (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:1) @@ -105,9 +106,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_recursively_self_raw() -> Weight { - (37_955_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(44_550_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenChildren (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:1) @@ -118,38 +119,38 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:0 w:1) // Storage: Common CollectionById (r:1 w:0) fn burn_recursively_breadth_plus_self_plus_self_per_each_raw(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 1_349_000 - .saturating_add((275_145_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(7 as Weight)) - .saturating_add(T::DbWeight::get().reads((4 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) - .saturating_add(T::DbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + (Weight::from_ref_time(0)) + // Standard Error: 1_536_000 + .saturating_add(Weight::from_ref_time(312_125_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(7 as u64)) + .saturating_add(T::DbWeight::get().reads((4 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(6 as u64)) + .saturating_add(T::DbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible AccountBalance (r:2 w:2) // Storage: Nonfungible Allowance (r:1 w:0) // Storage: Nonfungible Owned (r:0 w:2) fn transfer() -> Weight { - (27_867_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(31_116_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible Allowance (r:1 w:1) fn approve() -> Weight { - (18_824_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(20_802_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Nonfungible Allowance (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible AccountBalance (r:2 w:2) // Storage: Nonfungible Owned (r:0 w:2) fn transfer_from() -> Weight { - (32_879_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(36_083_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Nonfungible Allowance (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:1) @@ -159,37 +160,40 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_from() -> Weight { - (37_061_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(41_781_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:1) fn set_token_property_permissions(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 57_000 - .saturating_add((15_149_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 58_000 + .saturating_add(Weight::from_ref_time(15_705_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:0) - // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn set_token_properties(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 2_278_000 - .saturating_add((409_613_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 3_595_000 + .saturating_add(Weight::from_ref_time(590_344_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:0) - // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn delete_token_properties(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 2_234_000 - .saturating_add((408_185_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 3_664_000 + .saturating_add(Weight::from_ref_time(605_836_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: Nonfungible TokenData (r:1 w:0) + fn token_owner() -> Weight { + Weight::from_ref_time(4_366_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) } } @@ -200,34 +204,34 @@ impl WeightInfo for () { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn create_item() -> Weight { - (24_135_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(25_905_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) } // Storage: Nonfungible TokensMinted (r:1 w:1) // Storage: Nonfungible AccountBalance (r:1 w:1) // Storage: Nonfungible TokenData (r:0 w:4) // Storage: Nonfungible Owned (r:0 w:4) fn create_multiple_items(b: u32, ) -> Weight { - (21_952_000 as Weight) - // Standard Error: 5_000 - .saturating_add((4_727_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(24_955_000) + // Standard Error: 3_000 + .saturating_add(Weight::from_ref_time(5_340_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + .saturating_add(RocksDbWeight::get().writes((2 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokensMinted (r:1 w:1) // Storage: Nonfungible AccountBalance (r:4 w:4) // Storage: Nonfungible TokenData (r:0 w:4) // Storage: Nonfungible Owned (r:0 w:4) fn create_multiple_items_ex(b: u32, ) -> Weight { - (10_432_000 as Weight) - // Standard Error: 6_000 - .saturating_add((7_383_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(13_666_000) + // Standard Error: 5_000 + .saturating_add(Weight::from_ref_time(8_299_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + .saturating_add(RocksDbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible TokenChildren (r:1 w:0) @@ -237,9 +241,9 @@ impl WeightInfo for () { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_item() -> Weight { - (29_798_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(36_205_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenChildren (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:1) @@ -249,9 +253,9 @@ impl WeightInfo for () { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_recursively_self_raw() -> Weight { - (37_955_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(44_550_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenChildren (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:1) @@ -262,38 +266,38 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:0 w:1) // Storage: Common CollectionById (r:1 w:0) fn burn_recursively_breadth_plus_self_plus_self_per_each_raw(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 1_349_000 - .saturating_add((275_145_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(7 as Weight)) - .saturating_add(RocksDbWeight::get().reads((4 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + (Weight::from_ref_time(0)) + // Standard Error: 1_536_000 + .saturating_add(Weight::from_ref_time(312_125_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(7 as u64)) + .saturating_add(RocksDbWeight::get().reads((4 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) + .saturating_add(RocksDbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible AccountBalance (r:2 w:2) // Storage: Nonfungible Allowance (r:1 w:0) // Storage: Nonfungible Owned (r:0 w:2) fn transfer() -> Weight { - (27_867_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(31_116_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible Allowance (r:1 w:1) fn approve() -> Weight { - (18_824_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(20_802_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Nonfungible Allowance (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:1) // Storage: Nonfungible AccountBalance (r:2 w:2) // Storage: Nonfungible Owned (r:0 w:2) fn transfer_from() -> Weight { - (32_879_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(36_083_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Nonfungible Allowance (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:1) @@ -303,36 +307,39 @@ impl WeightInfo for () { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_from() -> Weight { - (37_061_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(41_781_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:1) fn set_token_property_permissions(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 57_000 - .saturating_add((15_149_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 58_000 + .saturating_add(Weight::from_ref_time(15_705_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:0) - // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn set_token_properties(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 2_278_000 - .saturating_add((409_613_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 3_595_000 + .saturating_add(Weight::from_ref_time(590_344_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionPropertyPermissions (r:1 w:0) - // Storage: Nonfungible TokenData (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn delete_token_properties(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 2_234_000 - .saturating_add((408_185_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (Weight::from_ref_time(0)) + // Standard Error: 3_664_000 + .saturating_add(Weight::from_ref_time(605_836_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: Nonfungible TokenData (r:1 w:0) + fn token_owner() -> Weight { + Weight::from_ref_time(4_366_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) } } diff --git a/pallets/proxy-rmrk-core/CHANGELOG.md b/pallets/proxy-rmrk-core/CHANGELOG.md new file mode 100644 index 0000000000..e8229c5612 --- /dev/null +++ b/pallets/proxy-rmrk-core/CHANGELOG.md @@ -0,0 +1,17 @@ + +## [v0.1.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Remove `#[transactional]` from extrinsics 7fd36cea2f6e00c02c67ccc1de9649ae404efd31 + +Every extrinsic now runs in transaction implicitly, and +`#[transactional]` on pallet dispatchable is now meaningless + +Upstream-Change: https://github.com/paritytech/substrate/issues/10806 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/proxy-rmrk-core/Cargo.toml b/pallets/proxy-rmrk-core/Cargo.toml index 57cc4b4373..16bd4c987f 100644 --- a/pallets/proxy-rmrk-core/Cargo.toml +++ b/pallets/proxy-rmrk-core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-rmrk-core" -version = "0.1.0" +version = "0.1.2" license = "GPLv3" edition = "2021" @@ -11,19 +11,21 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } pallet-common = { default-features = false, path = '../common' } pallet-nonfungible = { default-features = false, path = "../../pallets/nonfungible" } pallet-structure = { default-features = false, path = "../../pallets/structure" } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } rmrk-traits = { default-features = false, path = "../../primitives/rmrk-traits" } -scale-info = { version = "2.0.1", default-features = false, features = ["derive"] } +scale-info = { version = "2.0.1", default-features = false, features = [ + "derive", +] } derivative = { version = "2.2.0", features = ["use_core"] } [features] @@ -46,3 +48,4 @@ runtime-benchmarks = [ 'frame-support/runtime-benchmarks', 'frame-system/runtime-benchmarks', ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/proxy-rmrk-core/src/benchmarking.rs b/pallets/proxy-rmrk-core/src/benchmarking.rs index 2f86c67d9f..26b1ffa79d 100644 --- a/pallets/proxy-rmrk-core/src/benchmarking.rs +++ b/pallets/proxy-rmrk-core/src/benchmarking.rs @@ -1,3 +1,19 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + use sp_std::vec; use frame_benchmarking::{benchmarks, account}; diff --git a/pallets/proxy-rmrk-core/src/lib.rs b/pallets/proxy-rmrk-core/src/lib.rs index 99945492ed..33db5f30b0 100644 --- a/pallets/proxy-rmrk-core/src/lib.rs +++ b/pallets/proxy-rmrk-core/src/lib.rs @@ -14,9 +14,138 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # RMRK Core Proxy Pallet +//! +//! A pallet used as proxy for RMRK Core (). +//! +//! - [`Config`] +//! - [`Call`] +//! - [`Pallet`] +//! +//! ## Overview +//! +//! The RMRK Core Proxy pallet mirrors the functionality of RMRK Core, +//! binding its externalities to Unique's own underlying structure. +//! It is purposed to mimic RMRK Core exactly, allowing seamless integrations +//! of solutions based on RMRK. +//! +//! RMRK Core itself contains essential functionality for RMRK's nested and +//! multi-resourced NFTs. +//! +//! *Note*, that while RMRK itself is subject to active development and restructuring, +//! the proxy may be caught temporarily out of date. +//! +//! ### What is RMRK? +//! +//! RMRK is a set of NFT standards which compose several "NFT 2.0 lego" primitives. +//! Putting these legos together allows a user to create NFT systems of arbitrary complexity. +//! +//! Meaning, RMRK NFTs are dynamic, able to nest into each other and form a hierarchy, +//! make use of specific changeable and partially shared metadata in the form of resources, +//! and more. +//! +//! Visit RMRK documentation and repositories to learn more: +//! - Docs: +//! - FAQ: +//! - Substrate code repository: +//! - RMRK specification repository: +//! +//! ## Terminology +//! +//! For more information on RMRK, see RMRK's own documentation. +//! +//! ### Intro to RMRK +//! +//! - **Resource:** Additional piece of metadata of an NFT usually serving to add +//! a piece of media on top of the root metadata (NFT's own), be it a different wing +//! on the root template bird or something entirely unrelated. +//! +//! - **Base:** A list of possible "components" - Parts, a combination of which can +//! be appended/equipped to/on an NFT. +//! +//! - **Part:** Something that, together with other Parts, can constitute an NFT. +//! Parts are defined in the Base to which they belong. Parts can be either +//! of the `slot` type or `fixed` type. Slots are intended for equippables. +//! Note that "part of something" and "Part of a Base" can be easily confused, +//! and so in this documentation these words are distinguished by the capital letter. +//! +//! - **Theme:** Named objects of variable => value pairs which get interpolated into +//! the Base's `themable` Parts. Themes can hold any value, but are often represented +//! in RMRK's examples as colors applied to visible Parts. +//! +//! ### Peculiarities in Unique +//! +//! - **Scoped properties:** Properties that are normally obscured from users. +//! Their purpose is to contain structured metadata that was not included in the Unique standard +//! for collections and tokens, meant to be operated on by proxies and other outliers. +//! Scoped property keys are prefixed with `some-scope:`, where `some-scope` is +//! an arbitrary keyword, like "rmrk". `:` is considered an unacceptable symbol in user-defined +//! properties, which, along with other safeguards, makes scoped ones impossible to tamper with. +//! +//! - **Auxiliary properties:** A slightly different structure of properties, +//! trading universality of use for more convenient storage, writes and access. +//! Meant to be inaccessible to end users. +//! +//! ## Proxy Implementation +//! +//! An external user is supposed to be able to utilize this proxy as they would +//! utilize RMRK, and get exactly the same results. Normally, Unique transactions +//! are off-limits to RMRK collections and tokens, and vice versa. However, +//! the information stored on chain can be freely interpreted by storage reads and Unique RPCs. +//! +//! ### ID Mapping +//! +//! RMRK's collections' IDs are counted independently of Unique's and start at 0. +//! Note that tokens' IDs still start at 1. +//! The collections themselves, as well as tokens, are stored as Unique collections, +//! and thus RMRK IDs are mapped to Unique IDs (but not vice versa). +//! +//! ### External/Internal Collection Insulation +//! +//! A Unique transaction cannot target collections purposed for RMRK, +//! and they are flagged as `external` to specify that. On the other hand, +//! due to the mapping, RMRK transactions and RPCs simply cannot reach Unique collections. +//! +//! ### Native Properties +//! +//! Many of RMRK's native parameters are stored as scoped properties of a collection +//! or an NFT on the chain. Scoped properties are prefixed with `rmrk:`, where `:` +//! is an unacceptable symbol in user-defined properties, which, along with other safeguards, +//! makes them impossible to tamper with. +//! +//! ### Collection and NFT Types, or Base, Parts and Themes Handling +//! +//! RMRK introduces the concept of a Base, which is a catalogue of Parts, +//! possible components of an NFT. Due to its similarity with the functionality +//! of a token collection, a Base is stored and handled as one, and the Base's Parts and Themes +//! are this collection's NFTs. See [`CollectionType`] and [`NftType`]. +//! +//! ## Interface +//! +//! ### Dispatchables +//! +//! - `create_collection` - Create a new collection of NFTs. +//! - `destroy_collection` - Destroy a collection. +//! - `change_collection_issuer` - Change the issuer of a collection. +//! Analogous to Unique's collection's [`owner`](up_data_structs::Collection). +//! - `lock_collection` - "Lock" the collection and prevent new token creation. **Cannot be undone.** +//! - `mint_nft` - Mint an NFT in a specified collection. +//! - `burn_nft` - Burn an NFT, destroying it and its nested tokens. +//! - `send` - Transfer an NFT from an account/NFT A to another account/NFT B. +//! - `accept_nft` - Accept an NFT sent from another account to self or an owned NFT. +//! - `reject_nft` - Reject an NFT sent from another account to self or owned NFT and **burn it**. +//! - `accept_resource` - Accept the addition of a newly created pending resource to an existing NFT. +//! - `accept_resource_removal` - Accept the removal of a removal-pending resource from an NFT. +//! - `set_property` - Add or edit a custom user property of a token or a collection. +//! - `set_priority` - Set a different order of resource priorities for an NFT. +//! - `add_basic_resource` - Create and set/propose a basic resource for an NFT. +//! - `add_composable_resource` - Create and set/propose a composable resource for an NFT. +//! - `add_slot_resource` - Create and set/propose a slot resource for an NFT. +//! - `remove_resource` - Remove and erase a resource from an NFT. + #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::{pallet_prelude::*, transactional, BoundedVec, dispatch::DispatchResult}; +use frame_support::{pallet_prelude::*, BoundedVec, dispatch::DispatchResult}; use frame_system::{pallet_prelude::*, ensure_signed}; use sp_runtime::{DispatchError, Permill, traits::StaticLookup}; use sp_std::{ @@ -49,6 +178,7 @@ pub use property::*; use RmrkProperty::*; +/// Maximum number of levels of depth in the token nesting tree. pub const NESTING_BUDGET: u32 = 5; type PendingTarget = (CollectionId, TokenId); @@ -66,14 +196,19 @@ pub mod pallet { pub trait Config: frame_system::Config + pallet_common::Config + pallet_nonfungible::Config + account::Config { - type Event: From> + IsType<::Event>; + /// Overarching event type. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + + /// The weight information of this pallet. type WeightInfo: WeightInfo; } + /// Latest yet-unused collection ID. #[pallet::storage] #[pallet::getter(fn collection_index)] pub type CollectionIndex = StorageValue<_, RmrkCollectionId, ValueQuery>; + /// Mapping from RMRK collection ID to Unique's. #[pallet::storage] pub type UniqueCollectionId = StorageMap<_, Twox64Concat, RmrkCollectionId, CollectionId, ValueQuery>; @@ -159,35 +294,67 @@ pub mod pallet { #[pallet::error] pub enum Error { - /* Unique-specific events */ + /* Unique proxy-specific events */ + /// Property of the type of RMRK collection could not be read successfully. CorruptedCollectionType, - NftTypeEncodeError, + // NftTypeEncodeError, + /// Too many symbols supplied as the property key. The maximum is [256](up_data_structs::MAX_PROPERTY_KEY_LENGTH). RmrkPropertyKeyIsTooLong, + /// Too many bytes supplied as the property value. The maximum is [32768](up_data_structs::MAX_PROPERTY_VALUE_LENGTH). RmrkPropertyValueIsTooLong, + /// Could not find a property by the supplied key. RmrkPropertyIsNotFound, + /// Something went wrong when decoding encoded data from the storage. + /// Perhaps, there was a wrong key supplied for the type, or the data was improperly stored. UnableToDecodeRmrkData, /* RMRK compatible events */ + /// Only destroying collections without tokens is allowed. CollectionNotEmpty, + /// Could not find an ID for a collection. It is likely there were too many collections created on the chain, causing an overflow. NoAvailableCollectionId, + /// Token does not exist, or there is no suitable ID for it, likely too many tokens were created in a collection, causing an overflow. NoAvailableNftId, + /// Collection does not exist, has a wrong type, or does not map to a Unique ID. CollectionUnknown, + /// No permission to perform action. NoPermission, + /// Token is marked as non-transferable, and thus cannot be transferred. NonTransferable, + /// Too many tokens created in the collection, no new ones are allowed. CollectionFullOrLocked, + /// No such resource found. ResourceDoesntExist, + /// If an NFT is sent to a descendant, that would form a nesting loop, an ouroboros. + /// Sending to self is redundant. CannotSendToDescendentOrSelf, + /// Not the target owner of the sent NFT. CannotAcceptNonOwnedNft, + /// Not the target owner of the sent NFT. CannotRejectNonOwnedNft, + /// NFT was not sent and is not pending. CannotRejectNonPendingNft, + /// Resource is not pending for the operation. ResourceNotPending, + /// Could not find an ID for the resource. It is likely there were too many resources created on an NFT, causing an overflow. NoAvailableResourceId, } #[pallet::call] impl Pallet { - /// Create a collection - #[transactional] + // todo :refactor replace every collection_id with rmrk_collection_id (and nft_id) in arguments for uniformity? + + /// Create a new collection of NFTs. + /// + /// # Permissions: + /// * Anyone - will be assigned as the issuer of the collection. + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `metadata`: Metadata describing the collection, e.g. IPFS hash. Cannot be changed. + /// - `max`: Optional maximum number of tokens. + /// - `symbol`: UTF-8 string with token prefix, by which to represent the token in wallets and UIs. + /// Analogous to Unique's [`token_prefix`](up_data_structs::Collection). Cannot be changed. #[pallet::weight(>::create_collection())] pub fn create_collection( origin: OriginFor, @@ -226,8 +393,8 @@ pub mod pallet { T::CrossAccountId::from_sub(sender.clone()), data, [ - Self::rmrk_property(Metadata, &metadata)?, - Self::rmrk_property(CollectionType, &misc::CollectionType::Regular)?, + Self::encode_rmrk_property(Metadata, &metadata)?, + Self::encode_rmrk_property(CollectionType, &misc::CollectionType::Regular)?, ] .into_iter(), )?; @@ -237,8 +404,8 @@ pub mod pallet { >::set_scoped_collection_property( unique_collection_id, - PropertyScope::Rmrk, - Self::rmrk_property(RmrkInternalCollectionId, &rmrk_collection_id)?, + RMRK_SCOPE, + Self::encode_rmrk_property(RmrkInternalCollectionId, &rmrk_collection_id)?, )?; >::mutate(|n| *n += 1); @@ -251,8 +418,16 @@ pub mod pallet { Ok(()) } - /// destroy collection - #[transactional] + /// Destroy a collection. + /// + /// Only empty collections can be destroyed. If it has any tokens, they must be burned first. + /// + /// # Permissions: + /// * Collection issuer + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `collection_id`: RMRK ID of the collection to destroy. #[pallet::weight(>::destroy_collection())] pub fn destroy_collection( origin: OriginFor, @@ -278,13 +453,15 @@ pub mod pallet { Ok(()) } - /// Change the issuer of a collection + /// Change the issuer of a collection. Analogous to Unique's collection's [`owner`](up_data_structs::Collection). + /// + /// # Permissions: + /// * Collection issuer /// - /// Parameters: + /// # Arguments: /// - `origin`: sender of the transaction - /// - `collection_id`: collection id of the nft to change issuer of - /// - `new_issuer`: Collection's new issuer - #[transactional] + /// - `collection_id`: RMRK collection ID to change the issuer of. + /// - `new_issuer`: Collection's new issuer. #[pallet::weight(>::change_collection_issuer())] pub fn change_collection_issuer( origin: OriginFor, @@ -314,8 +491,14 @@ pub mod pallet { Ok(()) } - /// lock collection - #[transactional] + /// "Lock" the collection and prevent new token creation. Cannot be undone. + /// + /// # Permissions: + /// * Collection issuer + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `collection_id`: RMRK ID of the collection to lock. #[pallet::weight(>::lock_collection())] pub fn lock_collection( origin: OriginFor, @@ -346,17 +529,20 @@ pub mod pallet { Ok(()) } - /// Mints an NFT in the specified collection - /// Sets metadata and the royalty attribute + /// Mint an NFT in a specified collection. + /// + /// # Permissions: + /// * Collection issuer /// - /// Parameters: - /// - `collection_id`: The class of the asset to be minted. - /// - `nft_id`: The nft value of the asset to be minted. - /// - `recipient`: Receiver of the royalty - /// - `royalty`: Permillage reward from each trade for the Recipient - /// - `metadata`: Arbitrary data about an nft, e.g. IPFS hash - /// - `transferable`: Ability to transfer this NFT - #[transactional] + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `owner`: Owner account of the NFT. If set to None, defaults to the sender (collection issuer). + /// - `collection_id`: RMRK collection ID for the NFT to be minted within. Cannot be changed. + /// - `recipient`: Receiver account of the royalty. Has no effect if the `royalty_amount` is not set. Cannot be changed. + /// - `royalty_amount`: Optional permillage reward from each trade for the `recipient`. Cannot be changed. + /// - `metadata`: Arbitrary data about an NFT, e.g. IPFS hash. Cannot be changed. + /// - `transferable`: Can this NFT be transferred? Cannot be changed. + /// - `resources`: Resource data to be added to the NFT immediately after minting. #[pallet::weight(>::mint_nft(resources.as_ref().map(|r| r.len() as u32).unwrap_or(0)))] pub fn mint_nft( origin: OriginFor, @@ -390,16 +576,16 @@ pub mod pallet { &cross_owner, &collection, [ - Self::rmrk_property(TokenType, &NftType::Regular)?, - Self::rmrk_property(Transferable, &transferable)?, - Self::rmrk_property(PendingNftAccept, &None::)?, - Self::rmrk_property(RoyaltyInfo, &royalty_info)?, - Self::rmrk_property(Metadata, &metadata)?, - Self::rmrk_property(Equipped, &false)?, - Self::rmrk_property(ResourcePriorities, &>::new())?, - Self::rmrk_property(NextResourceId, &(0 as RmrkResourceId))?, - Self::rmrk_property(PendingChildren, &PendingChildrenSet::new())?, - Self::rmrk_property(AssociatedBases, &BasesMap::new())?, + Self::encode_rmrk_property(TokenType, &NftType::Regular)?, + Self::encode_rmrk_property(Transferable, &transferable)?, + Self::encode_rmrk_property(PendingNftAccept, &None::)?, + Self::encode_rmrk_property(RoyaltyInfo, &royalty_info)?, + Self::encode_rmrk_property(Metadata, &metadata)?, + Self::encode_rmrk_property(Equipped, &false)?, + Self::encode_rmrk_property(ResourcePriorities, &>::new())?, + Self::encode_rmrk_property(NextResourceId, &(0 as RmrkResourceId))?, + Self::encode_rmrk_property(PendingChildren, &PendingChildrenSet::new())?, + Self::encode_rmrk_property(AssociatedBases, &BasesMap::new())?, ] .into_iter(), ) @@ -423,8 +609,23 @@ pub mod pallet { Ok(()) } - /// burn nft - #[transactional] + /// Burn an NFT, destroying it and its nested tokens up to the specified limit. + /// If the burning budget is exceeded, the transaction is reverted. + /// + /// This is the way to burn a nested token as well. + /// + /// For more information, see [`burn_recursively`](pallet_nonfungible::pallet::Pallet::burn_recursively). + /// + /// # Permissions: + /// * Token owner + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `collection_id`: RMRK ID of the collection in which the NFT to burn belongs to. + /// - `nft_id`: ID of the NFT to be destroyed. + /// - `max_burns`: Maximum number of tokens to burn, assuming nesting. The transaction + /// is reverted if there are more tokens to burn in the nesting tree than this number. + /// This is primarily a mechanism of transaction weight control. #[pallet::weight(>::burn_nft(*max_burns))] pub fn burn_nft( origin: OriginFor, @@ -458,14 +659,20 @@ pub mod pallet { Ok(()) } - /// Transfers a NFT from an Account or NFT A to another Account or NFT B + /// Transfer an NFT from an account/NFT A to another account/NFT B. + /// The token must be transferable. Nesting cannot occur deeper than the [`NESTING_BUDGET`]. /// - /// Parameters: + /// If the target owner is an NFT owned by another account, then the NFT will enter + /// the pending state and will have to be accepted by the other account. + /// + /// # Permissions: + /// - Token owner + /// + /// # Arguments: /// - `origin`: sender of the transaction - /// - `rmrk_collection_id`: collection id of the nft to be transferred - /// - `rmrk_nft_id`: nft id of the nft to be transferred - /// - `new_owner`: new owner of the nft which can be either an account or a NFT - #[transactional] + /// - `rmrk_collection_id`: RMRK ID of the collection of the NFT to be transferred. + /// - `rmrk_nft_id`: ID of the NFT to be transferred. + /// - `new_owner`: New owner of the nft which can be either an account or a NFT. #[pallet::weight(>::send())] pub fn send( origin: OriginFor, @@ -535,8 +742,8 @@ pub mod pallet { >::set_scoped_token_property( collection.id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property::>( + RMRK_SCOPE, + Self::encode_rmrk_property::>( PendingNftAccept, &Some((target_collection_id, target_nft_id.into())), )?, @@ -578,15 +785,19 @@ pub mod pallet { Ok(()) } - /// Accepts an NFT sent from another account to self or owned NFT + /// Accept an NFT sent from another account to self or an owned NFT. + /// + /// The NFT in question must be pending, and, thus, be [sent](`Pallet::send`) first. + /// + /// # Permissions: + /// - Token-owner-to-be /// - /// Parameters: + /// # Arguments: /// - `origin`: sender of the transaction - /// - `rmrk_collection_id`: collection id of the nft to be accepted - /// - `rmrk_nft_id`: nft id of the nft to be accepted - /// - `new_owner`: either origin's account ID or origin-owned NFT, whichever the NFT was - /// sent to - #[transactional] + /// - `rmrk_collection_id`: RMRK collection ID of the NFT to be accepted. + /// - `rmrk_nft_id`: ID of the NFT to be accepted. + /// - `new_owner`: Either the sender's account ID or a sender-owned NFT, + /// whichever the accepted NFT was sent to. #[pallet::weight(>::accept_nft())] pub fn accept_nft( origin: OriginFor, @@ -650,8 +861,8 @@ pub mod pallet { >::set_scoped_token_property( collection.id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property(PendingNftAccept, &None::)?, + RMRK_SCOPE, + Self::encode_rmrk_property(PendingNftAccept, &None::)?, )?; } @@ -665,13 +876,18 @@ pub mod pallet { Ok(()) } - /// Rejects an NFT sent from another account to self or owned NFT + /// Reject an NFT sent from another account to self or owned NFT. + /// The NFT in question will not be sent back and burnt instead. /// - /// Parameters: + /// The NFT in question must be pending, and, thus, be [sent](`Pallet::send`) first. + /// + /// # Permissions: + /// - Token-owner-to-be-not + /// + /// # Arguments: /// - `origin`: sender of the transaction - /// - `rmrk_collection_id`: collection id of the nft to be accepted - /// - `rmrk_nft_id`: nft id of the nft to be accepted - #[transactional] + /// - `rmrk_collection_id`: RMRK ID of the NFT to be rejected. + /// - `rmrk_nft_id`: ID of the NFT to be rejected. #[pallet::weight(>::reject_nft())] pub fn reject_nft( origin: OriginFor, @@ -724,8 +940,21 @@ pub mod pallet { Ok(()) } + /// Accept the addition of a newly created pending resource to an existing NFT. + /// + /// This transaction is needed when a resource is created and assigned to an NFT + /// by a non-owner, i.e. the collection issuer, with one of the + /// [`add_...` transactions](Pallet::add_basic_resource). + /// + /// # Permissions: + /// - Token owner + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `rmrk_nft_id`: ID of the NFT with a pending resource to be accepted. + /// - `resource_id`: ID of the newly created pending resource. /// accept the addition of a new resource to an existing NFT - #[transactional] #[pallet::weight(>::accept_resource())] pub fn accept_resource( origin: OriginFor, @@ -767,8 +996,19 @@ pub mod pallet { Ok(()) } - /// accept the removal of a resource of an existing NFT - #[transactional] + /// Accept the removal of a removal-pending resource from an NFT. + /// + /// This transaction is needed when a non-owner, i.e. the collection issuer, + /// requests a [removal](`Pallet::remove_resource`) of a resource from an NFT. + /// + /// # Permissions: + /// - Token owner + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `rmrk_nft_id`: ID of the NFT with a resource to be removed. + /// - `resource_id`: ID of the removal-pending resource. #[pallet::weight(>::accept_resource_removal())] pub fn accept_resource_removal( origin: OriginFor, @@ -795,17 +1035,17 @@ pub mod pallet { ensure!(cross_sender == nft_owner, >::NoPermission); - let resource_id_key = Self::rmrk_property_key(ResourceId(resource_id))?; + let resource_id_key = Self::get_scoped_property_key(ResourceId(resource_id))?; let resource_info = >::token_aux_property(( collection_id, nft_id, - PropertyScope::Rmrk, + RMRK_SCOPE, resource_id_key.clone(), )) .ok_or(>::ResourceDoesntExist)?; - let resource_info: RmrkResourceInfo = Self::decode_property(&resource_info)?; + let resource_info: RmrkResourceInfo = Self::decode_property_value(&resource_info)?; ensure!( resource_info.pending_removal, @@ -815,7 +1055,7 @@ pub mod pallet { >::remove_token_aux_property( collection_id, nft_id, - PropertyScope::Rmrk, + RMRK_SCOPE, resource_id_key, ); @@ -833,8 +1073,23 @@ pub mod pallet { Ok(()) } - /// set a custom value on an NFT - #[transactional] + /// Add or edit a custom user property, a key-value pair, describing the metadata + /// of a token or a collection, on either one of these. + /// + /// Note that in this proxy implementation many details regarding RMRK are stored + /// as scoped properties prefixed with "rmrk:", normally inaccessible + /// to external transactions and RPCs. + /// + /// # Permissions: + /// - Collection issuer - in case of collection property + /// - Token owner - in case of NFT property + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID. + /// - `maybe_nft_id`: Optional ID of the NFT. If left empty, then the property is set for the collection. + /// - `key`: Key of the custom property to be referenced by. + /// - `value`: Value of the custom property to be stored. #[pallet::weight(>::set_property())] pub fn set_property( origin: OriginFor, @@ -863,8 +1118,8 @@ pub mod pallet { >::set_scoped_token_property( collection_id, token_id, - PropertyScope::Rmrk, - Self::rmrk_property(UserProperty(key.as_slice()), &value)?, + RMRK_SCOPE, + Self::encode_rmrk_property(UserProperty(key.as_slice()), &value)?, )?; } None => { @@ -877,8 +1132,8 @@ pub mod pallet { >::set_scoped_collection_property( collection_id, - PropertyScope::Rmrk, - Self::rmrk_property(UserProperty(key.as_slice()), &value)?, + RMRK_SCOPE, + Self::encode_rmrk_property(UserProperty(key.as_slice()), &value)?, )?; } } @@ -893,8 +1148,21 @@ pub mod pallet { Ok(()) } - /// set a different order of resource priority - #[transactional] + /// Set a different order of resource priorities for an NFT. Priorities can be used, + /// for example, for order of rendering. + /// + /// Note that the priorities are not updated automatically, and are an empty vector + /// by default. There is no pre-set definition for the order to be particular, + /// it can be interpreted arbitrarily use-case by use-case. + /// + /// # Permissions: + /// - Token owner + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `rmrk_nft_id`: ID of the NFT to rearrange resource priorities for. + /// - `priorities`: Ordered vector of resource IDs. #[pallet::weight(>::set_priority())] pub fn set_priority( origin: OriginFor, @@ -920,8 +1188,8 @@ pub mod pallet { >::set_scoped_token_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property(ResourcePriorities, &priorities.into_inner())?, + RMRK_SCOPE, + Self::encode_rmrk_property(ResourcePriorities, &priorities.into_inner())?, )?; Self::deposit_event(Event::::PrioritySet { @@ -932,8 +1200,20 @@ pub mod pallet { Ok(()) } - /// Create basic resource - #[transactional] + /// Create and set/propose a basic resource for an NFT. + /// + /// A basic resource is the simplest, lacking a Base and anything that comes with it. + /// See RMRK docs for more information and examples. + /// + /// # Permissions: + /// - Collection issuer - if not the token owner, adding the resource will warrant + /// the owner's [acceptance](Pallet::accept_resource). + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `nft_id`: ID of the NFT to assign a resource to. + /// - `resource`: Data of the resource to be created. #[pallet::weight(>::add_basic_resource())] pub fn add_basic_resource( origin: OriginFor, @@ -962,8 +1242,20 @@ pub mod pallet { Ok(()) } - /// Create composable resource - #[transactional] + /// Create and set/propose a composable resource for an NFT. + /// + /// A composable resource links to a Base and has a subset of its Parts it is composed of. + /// See RMRK docs for more information and examples. + /// + /// # Permissions: + /// - Collection issuer - if not the token owner, adding the resource will warrant + /// the owner's [acceptance](Pallet::accept_resource). + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `nft_id`: ID of the NFT to assign a resource to. + /// - `resource`: Data of the resource to be created. #[pallet::weight(>::add_composable_resource())] pub fn add_composable_resource( origin: OriginFor, @@ -990,17 +1282,17 @@ pub mod pallet { >::try_mutate_token_aux_property( collection_id, nft_id.into(), - PropertyScope::Rmrk, - Self::rmrk_property_key(AssociatedBases)?, + RMRK_SCOPE, + Self::get_scoped_property_key(AssociatedBases)?, |value| -> DispatchResult { let mut bases: BasesMap = match value { - Some(value) => Self::decode_property(value)?, + Some(value) => Self::decode_property_value(value)?, None => BasesMap::new(), }; *bases.entry(base_id).or_insert(0) += 1; - *value = Some(Self::encode_property(&bases)?); + *value = Some(Self::encode_property_value(&bases)?); Ok(()) }, )?; @@ -1012,8 +1304,20 @@ pub mod pallet { Ok(()) } - /// Create slot resource - #[transactional] + /// Create and set/propose a slot resource for an NFT. + /// + /// A slot resource links to a Base and a slot ID in it which it can fit into. + /// See RMRK docs for more information and examples. + /// + /// # Permissions: + /// - Collection issuer - if not the token owner, adding the resource will warrant + /// the owner's [acceptance](Pallet::accept_resource). + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK collection ID of the NFT. + /// - `nft_id`: ID of the NFT to assign a resource to. + /// - `resource`: Data of the resource to be created. #[pallet::weight(>::add_slot_resource())] pub fn add_slot_resource( origin: OriginFor, @@ -1042,8 +1346,19 @@ pub mod pallet { Ok(()) } - /// remove resource - #[transactional] + /// Remove and erase a resource from an NFT. + /// + /// If the sender does not own the NFT, then it will be pending confirmation, + /// and will have to be [accepted](Pallet::accept_resource_removal) by the token owner. + /// + /// # Permissions + /// - Collection issuer + /// + /// # Arguments + /// - `origin`: sender of the transaction + /// - `rmrk_collection_id`: RMRK ID of a collection to which the NFT making use of the resource belongs to. + /// - `nft_id`: ID of the NFT with a resource to be removed. + /// - `resource_id`: ID of the resource to be removed. #[pallet::weight(>::remove_resource())] pub fn remove_resource( origin: OriginFor, @@ -1070,31 +1385,34 @@ pub mod pallet { } impl Pallet { - pub fn rmrk_property_key(rmrk_key: RmrkProperty) -> Result { + /// Transform one of possible RMRK keys into a byte key with a RMRK scope. + pub fn get_scoped_property_key(rmrk_key: RmrkProperty) -> Result { let key = rmrk_key.to_key::()?; - let scoped_key = PropertyScope::Rmrk + let scoped_key = RMRK_SCOPE .apply(key) .map_err(|_| >::RmrkPropertyKeyIsTooLong)?; Ok(scoped_key) } - // todo think about renaming these - pub fn rmrk_property( + /// Form a Unique property, transforming a RMRK key into bytes (without assigning the scope yet) + /// and encoding the value from an arbitrary type into bytes. + pub fn encode_rmrk_property( rmrk_key: RmrkProperty, value: &E, ) -> Result { let key = rmrk_key.to_key::()?; - let value = Self::encode_property(value)?; + let value = Self::encode_property_value(value)?; let property = Property { key, value }; Ok(property) } - pub fn encode_property>( + /// Encode property value from an arbitrary type into bytes for storage. + pub fn encode_property_value>( value: &E, ) -> Result, DispatchError> { let value = value @@ -1105,13 +1423,15 @@ impl Pallet { Ok(value) } - pub fn decode_property>( + /// Decode property value from bytes into an arbitrary type. + pub fn decode_property_value>( vec: &BoundedBytes, ) -> Result { vec.decode() .map_err(|_| >::UnableToDecodeRmrkData.into()) } + /// Change the limit of a property value byte vector. pub fn rebind(vec: &BoundedVec) -> Result, DispatchError> where BoundedVec: TryFrom>, @@ -1120,12 +1440,23 @@ impl Pallet { .map_err(|_| >::RmrkPropertyValueIsTooLong.into()) } + /// Initialize a new NFT collection with certain RMRK-scoped properties. + /// + /// See [`init_collection`](pallet_nonfungible::pallet::Pallet::init_collection) for more details. fn init_collection( sender: T::CrossAccountId, data: CreateCollectionData, properties: impl Iterator, ) -> Result { - let collection_id = >::init_collection(sender, data, true); + let collection_id = >::init_collection( + sender.clone(), + sender, + data, + up_data_structs::CollectionFlags { + external: true, + ..Default::default() + }, + ); if let Err(DispatchError::Arithmetic(_)) = &collection_id { return Err(>::NoAvailableCollectionId.into()); @@ -1133,13 +1464,16 @@ impl Pallet { >::set_scoped_collection_properties( collection_id?, - PropertyScope::Rmrk, + RMRK_SCOPE, properties, )?; collection_id } + /// Mint a new NFT with certain RMRK-scoped properties. Sender must be the collection owner. + /// + /// See [`create_item`](pallet_nonfungible::pallet::Pallet::create_item) for more details. pub fn create_nft( sender: &T::CrossAccountId, owner: &T::CrossAccountId, @@ -1157,16 +1491,14 @@ impl Pallet { let nft_id = >::current_token_id(collection.id); - >::set_scoped_token_properties( - collection.id, - nft_id, - PropertyScope::Rmrk, - properties, - )?; + >::set_scoped_token_properties(collection.id, nft_id, RMRK_SCOPE, properties)?; Ok(nft_id) } + /// Burn an NFT, along with its nested children, limited by `max_burns`. The sender must be the token owner. + /// + /// See [`burn_recursively`](pallet_nonfungible::pallet::Pallet::burn_recursively) for more details. fn destroy_nft( sender: T::CrossAccountId, collection_id: CollectionId, @@ -1207,48 +1539,54 @@ impl Pallet { ) } + /// Add a sent token pending acceptance to the target owning token as a property. fn insert_pending_child( target: (CollectionId, TokenId), child: (RmrkCollectionId, RmrkNftId), ) -> DispatchResult { - Self::mutate_pending_child(target, |pending_children| { + Self::mutate_pending_children(target, |pending_children| { pending_children.insert(child); }) } + /// Remove a sent token pending acceptance from the target token's properties. fn remove_pending_child( target: (CollectionId, TokenId), child: (RmrkCollectionId, RmrkNftId), ) -> DispatchResult { - Self::mutate_pending_child(target, |pending_children| { + Self::mutate_pending_children(target, |pending_children| { pending_children.remove(&child); }) } - fn mutate_pending_child( + /// Apply a mutation to the property of a token containing sent tokens + /// that are currently pending acceptance. + fn mutate_pending_children( (target_collection_id, target_nft_id): (CollectionId, TokenId), f: impl FnOnce(&mut PendingChildrenSet), ) -> DispatchResult { >::try_mutate_token_aux_property( target_collection_id, target_nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(PendingChildren)?, + RMRK_SCOPE, + Self::get_scoped_property_key(PendingChildren)?, |pending_children| -> DispatchResult { let mut map = match pending_children { - Some(map) => Self::decode_property(map)?, + Some(map) => Self::decode_property_value(map)?, None => PendingChildrenSet::new(), }; f(&mut map); - *pending_children = Some(Self::encode_property(&map)?); + *pending_children = Some(Self::encode_property_value(&map)?); Ok(()) }, ) } + /// Get an iterator from a token's property containing tokens sent to it + /// that are currently pending acceptance. fn iterate_pending_children( collection_id: CollectionId, nft_id: TokenId, @@ -1256,18 +1594,22 @@ impl Pallet { let property = >::token_aux_property(( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(PendingChildren)?, + RMRK_SCOPE, + Self::get_scoped_property_key(PendingChildren)?, )); let pending_children = match property { - Some(map) => Self::decode_property(&map)?, + Some(map) => Self::decode_property_value(&map)?, None => PendingChildrenSet::new(), }; Ok(pending_children.into_iter()) } + /// Get incremented resource ID from within an NFT's properties and store the new latest ID. + /// Thus, the returned resource ID should be used. + /// + /// Resource IDs are unique only across an NFT. fn acquire_next_resource_id( collection_id: CollectionId, nft_id: TokenId, @@ -1282,13 +1624,15 @@ impl Pallet { >::set_scoped_token_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property(NextResourceId, &next_id)?, + RMRK_SCOPE, + Self::encode_rmrk_property(NextResourceId, &next_id)?, )?; Ok(resource_id) } + /// Create and add a resource for a regular NFT, mark it as pending if the sender + /// is not the token owner. The sender must be the collection owner. fn resource_add( sender: T::AccountId, collection_id: CollectionId, @@ -1319,10 +1663,10 @@ impl Pallet { >::try_mutate_token_aux_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(ResourceId(id))?, + RMRK_SCOPE, + Self::get_scoped_property_key(ResourceId(id))?, |value| -> DispatchResult { - *value = Some(Self::encode_property(&resource_info)?); + *value = Some(Self::encode_property_value(&resource_info)?); Ok(()) }, @@ -1331,6 +1675,8 @@ impl Pallet { Ok(id) } + /// Designate a resource for erasure from an NFT, and remove it if the sender is the token owner. + /// The sender must be the collection owner. fn resource_remove( sender: T::AccountId, collection_id: CollectionId, @@ -1341,18 +1687,17 @@ impl Pallet { Self::get_typed_nft_collection(collection_id, misc::CollectionType::Regular)?; ensure!(collection.owner == sender, Error::::NoPermission); - let resource_id_key = Self::rmrk_property_key(ResourceId(resource_id))?; - let scope = PropertyScope::Rmrk; + let resource_id_key = Self::get_scoped_property_key(ResourceId(resource_id))?; let resource = >::token_aux_property(( collection_id, nft_id, - scope, + RMRK_SCOPE, resource_id_key.clone(), )) .ok_or(>::ResourceDoesntExist)?; - let resource_info: RmrkResourceInfo = Self::decode_property(&resource)?; + let resource_info: RmrkResourceInfo = Self::decode_property_value(&resource)?; let budget = up_data_structs::budget::Value::new(NESTING_BUDGET); let topmost_owner = @@ -1363,8 +1708,8 @@ impl Pallet { >::remove_token_aux_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(ResourceId(resource_id))?, + RMRK_SCOPE, + Self::get_scoped_property_key(ResourceId(resource_id))?, ); if let RmrkResourceTypes::Composable(resource) = resource_info.resource { @@ -1383,6 +1728,8 @@ impl Pallet { Ok(()) } + /// Remove a Base ID from an NFT if they are associated. + /// The Base itself is deleted if the number of associated NFTs reaches 0. fn remove_associated_base_id( collection_id: CollectionId, nft_id: TokenId, @@ -1391,11 +1738,11 @@ impl Pallet { >::try_mutate_token_aux_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(AssociatedBases)?, + RMRK_SCOPE, + Self::get_scoped_property_key(AssociatedBases)?, |value| -> DispatchResult { let mut bases: BasesMap = match value { - Some(value) => Self::decode_property(value)?, + Some(value) => Self::decode_property_value(value)?, None => BasesMap::new(), }; @@ -1407,12 +1754,13 @@ impl Pallet { } } - *value = Some(Self::encode_property(&bases)?); + *value = Some(Self::encode_property_value(&bases)?); Ok(()) }, ) } + /// Apply a mutation to a resource stored in the token properties of an NFT. fn try_mutate_resource_info( collection_id: CollectionId, nft_id: TokenId, @@ -1422,15 +1770,15 @@ impl Pallet { >::try_mutate_token_aux_property( collection_id, nft_id, - PropertyScope::Rmrk, - Self::rmrk_property_key(ResourceId(resource_id))?, + RMRK_SCOPE, + Self::get_scoped_property_key(ResourceId(resource_id))?, |value| match value { Some(value) => { - let mut resource_info: RmrkResourceInfo = Self::decode_property(value)?; + let mut resource_info: RmrkResourceInfo = Self::decode_property_value(value)?; f(&mut resource_info)?; - *value = Self::encode_property(&resource_info)?; + *value = Self::encode_property_value(&resource_info)?; Ok(()) } @@ -1439,6 +1787,7 @@ impl Pallet { ) } + /// Change the owner of an NFT collection, ensuring that the sender is the current owner. fn change_collection_owner( collection_id: CollectionId, collection_type: misc::CollectionType, @@ -1454,6 +1803,7 @@ impl Pallet { collection.save() } + /// Ensure that an account is the collection owner/issuer, return an error if not. pub fn check_collection_owner( collection: &NonfungibleHandle, account: &T::CrossAccountId, @@ -1463,10 +1813,12 @@ impl Pallet { .map_err(Self::map_unique_err_to_proxy) } + /// Get the latest yet-unused RMRK collection index from the storage. pub fn last_collection_idx() -> RmrkCollectionId { >::get() } + /// Get a mapping from a RMRK collection ID to its corresponding Unique collection ID. pub fn unique_collection_id( rmrk_collection_id: RmrkCollectionId, ) -> Result { @@ -1474,12 +1826,14 @@ impl Pallet { .map_err(|_| >::CollectionUnknown.into()) } + /// Get a mapping from a Unique collection ID to its RMRK collection ID counterpart, if it exists. pub fn rmrk_collection_id( unique_collection_id: CollectionId, ) -> Result { Self::get_collection_property_decoded(unique_collection_id, RmrkInternalCollectionId) } + /// Fetch a Unique NFT collection. pub fn get_nft_collection( collection_id: CollectionId, ) -> Result, DispatchError> { @@ -1492,29 +1846,35 @@ impl Pallet { } } + /// Check if an NFT collection with such an ID exists. pub fn collection_exists(collection_id: CollectionId) -> bool { >::try_get(collection_id).is_ok() } + /// Fetch and decode a RMRK-scoped collection property value in bytes. pub fn get_collection_property( collection_id: CollectionId, key: RmrkProperty, ) -> Result { let collection_property = >::collection_properties(collection_id) - .get(&Self::rmrk_property_key(key)?) + .get(&Self::get_scoped_property_key(key)?) .ok_or(>::CollectionUnknown)? .clone(); Ok(collection_property) } + /// Fetch a RMRK-scoped collection property and decode it from bytes into an appropriate type. pub fn get_collection_property_decoded( collection_id: CollectionId, key: RmrkProperty, ) -> Result { - Self::decode_property(&Self::get_collection_property(collection_id, key)?) + Self::decode_property_value(&Self::get_collection_property(collection_id, key)?) } + /// Get the type of a collection stored as a scoped property. + /// + /// RMRK Core proxy differentiates between regular collections as well as RMRK Bases as collections. pub fn get_collection_type( collection_id: CollectionId, ) -> Result { @@ -1527,6 +1887,8 @@ impl Pallet { }) } + /// Ensure that the type of the collection equals the provided type, + /// otherwise return an error. pub fn ensure_collection_type( collection_id: CollectionId, collection_type: misc::CollectionType, @@ -1540,6 +1902,7 @@ impl Pallet { Ok(()) } + /// Fetch an NFT collection, but make sure it has the appropriate type. pub fn get_typed_nft_collection( collection_id: CollectionId, collection_type: misc::CollectionType, @@ -1549,6 +1912,8 @@ impl Pallet { Self::get_nft_collection(collection_id) } + /// Same as [`get_typed_nft_collection`](crate::pallet::Pallet::get_typed_nft_collection), + /// but also return the Unique collection ID. pub fn get_typed_nft_collection_mapped( rmrk_collection_id: RmrkCollectionId, collection_type: misc::CollectionType, @@ -1563,31 +1928,37 @@ impl Pallet { Ok((collection, unique_collection_id)) } + /// Fetch and decode a RMRK-scoped NFT property value in bytes. pub fn get_nft_property( collection_id: CollectionId, nft_id: TokenId, key: RmrkProperty, ) -> Result { let nft_property = >::token_properties((collection_id, nft_id)) - .get(&Self::rmrk_property_key(key)?) + .get(&Self::get_scoped_property_key(key)?) .ok_or(>::RmrkPropertyIsNotFound)? .clone(); Ok(nft_property) } + /// Fetch a RMRK-scoped NFT property and decode it from bytes into an appropriate type. pub fn get_nft_property_decoded( collection_id: CollectionId, nft_id: TokenId, key: RmrkProperty, ) -> Result { - Self::decode_property(&Self::get_nft_property(collection_id, nft_id, key)?) + Self::decode_property_value(&Self::get_nft_property(collection_id, nft_id, key)?) } + /// Check that an NFT exists. pub fn nft_exists(collection_id: CollectionId, nft_id: TokenId) -> bool { >::contains_key((collection_id, nft_id)) } + /// Get the type of an NFT stored as a scoped property. + /// + /// RMRK Core proxy differentiates between regular NFTs, and RMRK Parts and Themes. pub fn get_nft_type( collection_id: CollectionId, token_id: TokenId, @@ -1596,6 +1967,7 @@ impl Pallet { .map_err(|_| >::NoAvailableNftId.into()) } + /// Ensure that the type of the NFT equals the provided type, otherwise return an error. pub fn ensure_nft_type( collection_id: CollectionId, token_id: TokenId, @@ -1607,6 +1979,8 @@ impl Pallet { Ok(()) } + /// Ensure that an account is the owner of the token, either directly + /// or at the top of the nesting hierarchy; return an error if it is not. pub fn ensure_nft_owner( collection_id: CollectionId, token_id: TokenId, @@ -1627,6 +2001,8 @@ impl Pallet { Ok(()) } + /// Fetch non-scoped properties of a collection or a token that match the filter keys supplied, + /// or, if None are provided, return all non-scoped properties. pub fn filter_user_properties( collection_id: CollectionId, token_id: Option, @@ -1672,6 +2048,8 @@ impl Pallet { }) } + /// Get all non-scoped properties from a collection or a token, and apply some transformation, + /// supplied by `mapper`, to each key-value pair. pub fn iterate_user_properties( collection_id: CollectionId, token_id: Option, @@ -1699,6 +2077,7 @@ impl Pallet { Ok(properties) } + /// Match Unique errors to RMRK's own and return the RMRK error if a match is successful. fn map_unique_err_to_proxy(err: DispatchError) -> DispatchError { map_unique_err_to_proxy! { match err { diff --git a/pallets/proxy-rmrk-core/src/misc.rs b/pallets/proxy-rmrk-core/src/misc.rs index 0c911f96f3..9ff6291dce 100644 --- a/pallets/proxy-rmrk-core/src/misc.rs +++ b/pallets/proxy-rmrk-core/src/misc.rs @@ -14,9 +14,13 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Miscellaneous helpers and utilities used by the proxy pallet. + use super::*; use codec::{Encode, Decode, Error}; +/// Match an error to a provided pattern matcher and get +/// the corresponding error of another type if a match is successful. #[macro_export] macro_rules! map_unique_err_to_proxy { (match $err:ident { $($unique_err_ty:ident :: $unique_err:ident => $proxy_err:ident),+ $(,)? }) => { @@ -30,8 +34,10 @@ macro_rules! map_unique_err_to_proxy { }; } -// Utilize the RmrkCore pallet for access to Runtime errors. +/// Interface to decode some serialized bytes into an arbitrary type `T`, +/// preferably if these bytes were originally encoded from `T`. pub trait RmrkDecode { + /// Try to decode self into an arbitrary type `T`. fn decode(&self) -> Result; } @@ -43,8 +49,9 @@ impl RmrkDecode for BoundedVec { } } -// Utilize the RmrkCore pallet for access to Runtime errors. +/// Interface to "rebind" - change the limit of a bounded byte vector. pub trait RmrkRebind { + /// Try to change the limit of a bounded byte vector. fn rebind(&self) -> Result, Error>; } @@ -58,12 +65,16 @@ where } } +/// RMRK Base shares functionality with a regular collection, and is thus +/// stored as one, but they are used for different purposes and need to be differentiated. #[derive(Encode, Decode, PartialEq, Eq)] pub enum CollectionType { Regular, Base, } +/// RMRK Base, being stored as a collection, can have different kinds of tokens, +/// all except the `Regular` type, which is attributed to `Regular` collection. #[derive(Encode, Decode, PartialEq, Eq)] pub enum NftType { Regular, diff --git a/pallets/proxy-rmrk-core/src/property.rs b/pallets/proxy-rmrk-core/src/property.rs index 1d80b03d6a..a8e08552d7 100644 --- a/pallets/proxy-rmrk-core/src/property.rs +++ b/pallets/proxy-rmrk-core/src/property.rs @@ -14,13 +14,21 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Details of storing and handling RMRK properties. + use super::*; use up_data_structs::PropertyScope; use core::convert::AsRef; +/// Property prefix for storing resources. pub const RESOURCE_ID_PREFIX: &str = "rsid-"; +/// Property prefix for storing custom user-defined properties. pub const USER_PROPERTY_PREFIX: &str = "userprop-"; +/// Property scope for RMRK, used to signify that this property +/// was created and is used by RMRK. +pub const RMRK_SCOPE: PropertyScope = PropertyScope::Rmrk; +/// Predefined RMRK property keys for storage of RMRK data format on the Unique chain. pub enum RmrkProperty<'r> { Metadata, CollectionType, @@ -49,6 +57,7 @@ pub enum RmrkProperty<'r> { } impl<'r> RmrkProperty<'r> { + /// Convert a predefined RMRK property key enum into string bytes. pub fn to_key(self) -> Result> { fn get_bytes>(container: &T) -> &[u8] { container.as_ref() @@ -94,9 +103,10 @@ impl<'r> RmrkProperty<'r> { } } +/// Strip a property key of its prefix and RMRK scope. pub fn strip_key_prefix(key: &PropertyKey, prefix: &str) -> Option { let key_prefix = PropertyKey::try_from(prefix.as_bytes().to_vec()).ok()?; - let key_prefix = PropertyScope::Rmrk.apply(key_prefix).ok()?; + let key_prefix = RMRK_SCOPE.apply(key_prefix).ok()?; key.as_slice() .strip_prefix(key_prefix.as_slice())? @@ -105,6 +115,7 @@ pub fn strip_key_prefix(key: &PropertyKey, prefix: &str) -> Option .ok() } +/// Check that the key has the prefix. pub fn is_valid_key_prefix(key: &PropertyKey, prefix: &str) -> bool { strip_key_prefix(key, prefix).is_some() } diff --git a/pallets/proxy-rmrk-core/src/rpc.rs b/pallets/proxy-rmrk-core/src/rpc.rs index df38f71b1f..d82a8acdb1 100644 --- a/pallets/proxy-rmrk-core/src/rpc.rs +++ b/pallets/proxy-rmrk-core/src/rpc.rs @@ -1,9 +1,29 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! Realizations of RMRK RPCs (remote procedure calls) related to the Core pallet. + use super::*; +/// Get the latest created collection ID. pub fn last_collection_idx() -> Result { Ok(>::last_collection_idx()) } +/// Get collection info by ID. pub fn collection_by_id( collection_id: RmrkCollectionId, ) -> Result>, DispatchError> { @@ -29,6 +49,7 @@ pub fn collection_by_id( })) } +/// Get NFT info by collection and NFT IDs. pub fn nft_by_id( collection_id: RmrkCollectionId, nft_by_id: RmrkNftId, @@ -83,6 +104,7 @@ pub fn nft_by_id( })) } +/// Get tokens owned by an account in a collection. pub fn account_tokens( account_id: T::AccountId, collection_id: RmrkCollectionId, @@ -116,6 +138,7 @@ pub fn account_tokens( Ok(tokens) } +/// Get tokens nested in an NFT - its direct children (not the children's children). pub fn nft_children( collection_id: RmrkCollectionId, nft_id: RmrkNftId, @@ -152,6 +175,7 @@ pub fn nft_children( ) } +/// Get collection properties, created by the user - not the proxy-specific properties. pub fn collection_properties( collection_id: RmrkCollectionId, filter_keys: Option>, @@ -174,6 +198,7 @@ pub fn collection_properties( Ok(properties) } +/// Get NFT properties, created by the user - not the proxy-specific properties. pub fn nft_properties( collection_id: RmrkCollectionId, nft_id: RmrkNftId, @@ -199,6 +224,7 @@ pub fn nft_properties( Ok(properties) } +/// Get full information on each resource of an NFT, including pending. pub fn nft_resources( collection_id: RmrkCollectionId, nft_id: RmrkNftId, @@ -226,7 +252,7 @@ pub fn nft_resources( return None; } - let resource_info: RmrkResourceInfo = >::decode_property(&value).ok()?; + let resource_info: RmrkResourceInfo = >::decode_property_value(&value).ok()?; Some(resource_info) }) @@ -235,6 +261,7 @@ pub fn nft_resources( Ok(resources) } +/// Get the priority of a resource in an NFT. pub fn nft_resource_priority( collection_id: RmrkCollectionId, nft_id: RmrkNftId, diff --git a/pallets/proxy-rmrk-core/src/weights.rs b/pallets/proxy-rmrk-core/src/weights.rs index 033440cf43..6220185947 100644 --- a/pallets/proxy-rmrk-core/src/weights.rs +++ b/pallets/proxy-rmrk-core/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_proxy_rmrk_core //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-07-01, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -64,9 +64,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Common CollectionById (r:0 w:1) // Storage: RmrkCore UniqueCollectionId (r:0 w:1) fn create_collection() -> Weight { - (49_365_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(8 as Weight)) + Weight::from_ref_time(49_754_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(8 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:1) @@ -77,17 +77,17 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokensBurnt (r:0 w:1) // Storage: Common AdminAmount (r:0 w:1) fn destroy_collection() -> Weight { - (49_903_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(48_588_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionById (r:1 w:1) // Storage: Common CollectionProperties (r:1 w:0) fn change_collection_issuer() -> Weight { - (26_134_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(25_054_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -95,9 +95,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokensMinted (r:1 w:0) // Storage: Nonfungible TokensBurnt (r:1 w:0) fn lock_collection() -> Weight { - (28_020_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(26_483_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -109,13 +109,13 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenAuxProperties (r:2 w:2) fn mint_nft(b: u32, ) -> Weight { - (67_476_000 as Weight) - // Standard Error: 19_000 - .saturating_add((12_373_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(62_419_000) + // Standard Error: 7_000 + .saturating_add(Weight::from_ref_time(12_325_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(5 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -128,13 +128,13 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_nft(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 1_500_000 - .saturating_add((300_520_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().reads((4 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) - .saturating_add(T::DbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + (Weight::from_ref_time(0)) + // Standard Error: 1_488_000 + .saturating_add(Weight::from_ref_time(298_367_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().reads((4 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(6 as u64)) + .saturating_add(T::DbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -146,9 +146,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenChildren (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:2) fn send() -> Weight { - (84_023_000 as Weight) - .saturating_add(T::DbWeight::get().reads(12 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(81_942_000) + .saturating_add(T::DbWeight::get().reads(12 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:2 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -161,9 +161,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenChildren (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:2) fn accept_nft() -> Weight { - (98_502_000 as Weight) - .saturating_add(T::DbWeight::get().reads(16 as Weight)) - .saturating_add(T::DbWeight::get().writes(8 as Weight)) + Weight::from_ref_time(97_925_000) + .saturating_add(T::DbWeight::get().reads(16 as u64)) + .saturating_add(T::DbWeight::get().writes(8 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -177,9 +177,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible Allowance (r:5 w:0) // Storage: Nonfungible Owned (r:0 w:5) fn reject_nft() -> Weight { - (277_017_000 as Weight) - .saturating_add(T::DbWeight::get().reads(30 as Weight)) - .saturating_add(T::DbWeight::get().writes(26 as Weight)) + Weight::from_ref_time(277_794_000) + .saturating_add(T::DbWeight::get().reads(30 as u64)) + .saturating_add(T::DbWeight::get().writes(26 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -187,9 +187,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn set_property() -> Weight { - (55_408_000 as Weight) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(54_657_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -197,9 +197,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn set_priority() -> Weight { - (55_063_000 as Weight) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(55_056_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -208,9 +208,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn add_basic_resource() -> Weight { - (64_656_000 as Weight) - .saturating_add(T::DbWeight::get().reads(10 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(61_673_000) + .saturating_add(T::DbWeight::get().reads(10 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -219,9 +219,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:2 w:2) fn add_composable_resource() -> Weight { - (67_593_000 as Weight) - .saturating_add(T::DbWeight::get().reads(11 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(67_125_000) + .saturating_add(T::DbWeight::get().reads(11 as u64)) + .saturating_add(T::DbWeight::get().writes(3 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -230,9 +230,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn add_slot_resource() -> Weight { - (63_845_000 as Weight) - .saturating_add(T::DbWeight::get().reads(10 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(69_058_000) + .saturating_add(T::DbWeight::get().reads(10 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -240,9 +240,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenAuxProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn remove_resource() -> Weight { - (53_414_000 as Weight) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(53_427_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -250,9 +250,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenData (r:5 w:0) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn accept_resource() -> Weight { - (51_869_000 as Weight) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(50_623_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -260,9 +260,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenData (r:5 w:0) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn accept_resource_removal() -> Weight { - (53_168_000 as Weight) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(50_917_000) + .saturating_add(T::DbWeight::get().reads(9 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } } @@ -277,9 +277,9 @@ impl WeightInfo for () { // Storage: Common CollectionById (r:0 w:1) // Storage: RmrkCore UniqueCollectionId (r:0 w:1) fn create_collection() -> Weight { - (49_365_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(8 as Weight)) + Weight::from_ref_time(49_754_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(8 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:1) @@ -290,17 +290,17 @@ impl WeightInfo for () { // Storage: Nonfungible TokensBurnt (r:0 w:1) // Storage: Common AdminAmount (r:0 w:1) fn destroy_collection() -> Weight { - (49_903_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(48_588_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionById (r:1 w:1) // Storage: Common CollectionProperties (r:1 w:0) fn change_collection_issuer() -> Weight { - (26_134_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(25_054_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -308,9 +308,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokensMinted (r:1 w:0) // Storage: Nonfungible TokensBurnt (r:1 w:0) fn lock_collection() -> Weight { - (28_020_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(26_483_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -322,13 +322,13 @@ impl WeightInfo for () { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenAuxProperties (r:2 w:2) fn mint_nft(b: u32, ) -> Weight { - (67_476_000 as Weight) - // Standard Error: 19_000 - .saturating_add((12_373_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(62_419_000) + // Standard Error: 7_000 + .saturating_add(Weight::from_ref_time(12_325_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(b as u64))) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -341,13 +341,13 @@ impl WeightInfo for () { // Storage: Nonfungible Owned (r:0 w:1) // Storage: Nonfungible TokenProperties (r:0 w:1) fn burn_nft(b: u32, ) -> Weight { - (0 as Weight) - // Standard Error: 1_500_000 - .saturating_add((300_520_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().reads((4 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + (Weight::from_ref_time(0)) + // Standard Error: 1_488_000 + .saturating_add(Weight::from_ref_time(298_367_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().reads((4 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) + .saturating_add(RocksDbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -359,9 +359,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenChildren (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:2) fn send() -> Weight { - (84_023_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(12 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(81_942_000) + .saturating_add(RocksDbWeight::get().reads(12 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:2 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -374,9 +374,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenChildren (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:2) fn accept_nft() -> Weight { - (98_502_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(16 as Weight)) - .saturating_add(RocksDbWeight::get().writes(8 as Weight)) + Weight::from_ref_time(97_925_000) + .saturating_add(RocksDbWeight::get().reads(16 as u64)) + .saturating_add(RocksDbWeight::get().writes(8 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -390,9 +390,9 @@ impl WeightInfo for () { // Storage: Nonfungible Allowance (r:5 w:0) // Storage: Nonfungible Owned (r:0 w:5) fn reject_nft() -> Weight { - (277_017_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(30 as Weight)) - .saturating_add(RocksDbWeight::get().writes(26 as Weight)) + Weight::from_ref_time(277_794_000) + .saturating_add(RocksDbWeight::get().reads(30 as u64)) + .saturating_add(RocksDbWeight::get().writes(26 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -400,9 +400,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn set_property() -> Weight { - (55_408_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(54_657_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -410,9 +410,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn set_priority() -> Weight { - (55_063_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(55_056_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -421,9 +421,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn add_basic_resource() -> Weight { - (64_656_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(10 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(61_673_000) + .saturating_add(RocksDbWeight::get().reads(10 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -432,9 +432,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:2 w:2) fn add_composable_resource() -> Weight { - (67_593_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(11 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(67_125_000) + .saturating_add(RocksDbWeight::get().reads(11 as u64)) + .saturating_add(RocksDbWeight::get().writes(3 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -443,9 +443,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenProperties (r:1 w:1) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn add_slot_resource() -> Weight { - (63_845_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(10 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(69_058_000) + .saturating_add(RocksDbWeight::get().reads(10 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -453,9 +453,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenAuxProperties (r:1 w:1) // Storage: Nonfungible TokenData (r:5 w:0) fn remove_resource() -> Weight { - (53_414_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(53_427_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -463,9 +463,9 @@ impl WeightInfo for () { // Storage: Nonfungible TokenData (r:5 w:0) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn accept_resource() -> Weight { - (51_869_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(50_623_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: RmrkCore UniqueCollectionId (r:1 w:0) // Storage: Common CollectionProperties (r:1 w:0) @@ -473,8 +473,8 @@ impl WeightInfo for () { // Storage: Nonfungible TokenData (r:5 w:0) // Storage: Nonfungible TokenAuxProperties (r:1 w:1) fn accept_resource_removal() -> Weight { - (53_168_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(50_917_000) + .saturating_add(RocksDbWeight::get().reads(9 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } } diff --git a/pallets/proxy-rmrk-equip/CHANGELOG.md b/pallets/proxy-rmrk-equip/CHANGELOG.md new file mode 100644 index 0000000000..e8229c5612 --- /dev/null +++ b/pallets/proxy-rmrk-equip/CHANGELOG.md @@ -0,0 +1,17 @@ + +## [v0.1.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Remove `#[transactional]` from extrinsics 7fd36cea2f6e00c02c67ccc1de9649ae404efd31 + +Every extrinsic now runs in transaction implicitly, and +`#[transactional]` on pallet dispatchable is now meaningless + +Upstream-Change: https://github.com/paritytech/substrate/issues/10806 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/proxy-rmrk-equip/Cargo.toml b/pallets/proxy-rmrk-equip/Cargo.toml index 6dfa39f223..c786411b6a 100644 --- a/pallets/proxy-rmrk-equip/Cargo.toml +++ b/pallets/proxy-rmrk-equip/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-rmrk-equip" -version = "0.1.0" +version = "0.1.2" license = "GPLv3" edition = "2021" @@ -11,18 +11,20 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } pallet-common = { default-features = false, path = '../common' } pallet-nonfungible = { default-features = false, path = "../../pallets/nonfungible" } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } rmrk-traits = { default-features = false, path = "../../primitives/rmrk-traits" } -scale-info = { version = "2.0.1", default-features = false, features = ["derive"] } +scale-info = { version = "2.0.1", default-features = false, features = [ + "derive", +] } pallet-rmrk-core = { default-features = false, path = "../proxy-rmrk-core" } [features] @@ -45,3 +47,4 @@ runtime-benchmarks = [ 'frame-support/runtime-benchmarks', 'frame-system/runtime-benchmarks', ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/proxy-rmrk-equip/src/benchmarking.rs b/pallets/proxy-rmrk-equip/src/benchmarking.rs index a5e72e0a15..574418c47f 100644 --- a/pallets/proxy-rmrk-equip/src/benchmarking.rs +++ b/pallets/proxy-rmrk-equip/src/benchmarking.rs @@ -1,3 +1,19 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + use sp_std::vec; use frame_benchmarking::{benchmarks, account}; diff --git a/pallets/proxy-rmrk-equip/src/lib.rs b/pallets/proxy-rmrk-equip/src/lib.rs index 601811bb69..82b2719626 100644 --- a/pallets/proxy-rmrk-equip/src/lib.rs +++ b/pallets/proxy-rmrk-equip/src/lib.rs @@ -14,9 +14,126 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # RMRK Core Proxy Pallet +//! +//! A pallet used as proxy for RMRK Core (). +//! +//! - [`Config`] +//! - [`Call`] +//! - [`Pallet`] +//! +//! ## Overview +//! +//! The RMRK Equip Proxy pallet mirrors the functionality of RMRK Equip, +//! binding its externalities to Unique's own underlying structure. +//! It is purposed to mimic RMRK Equip exactly, allowing seamless integrations +//! of solutions based on RMRK. +//! +//! RMRK Equip itself contains functionality to equip NFTs, and work with Bases, +//! Parts, and Themes. See [Proxy Implementation](#proxy-implementation) for details. +//! +//! Equip Proxy is responsible for a more specific area of RMRK, and heavily relies on the Core. +//! For a more foundational description of proxy implementation, please refer to [`pallet_rmrk_core`]. +//! +//! *Note*, that while RMRK itself is subject to active development and restructuring, +//! the proxy may be caught temporarily out of date. +//! +//! ### What is RMRK? +//! +//! RMRK is a set of NFT standards which compose several "NFT 2.0 lego" primitives. +//! Putting these legos together allows a user to create NFT systems of arbitrary complexity. +//! +//! Meaning, RMRK NFTs are dynamic, able to nest into each other and form a hierarchy, +//! make use of specific changeable and partially shared metadata in the form of resources, +//! and more. +//! +//! Visit RMRK documentation and repositories to learn more: +//! - Docs: +//! - FAQ: +//! - Substrate code repository: +//! - RMRK spec repository: +//! +//! ## Terminology +//! +//! For more information on RMRK, see RMRK's own documentation. +//! +//! ### Intro to RMRK +//! +//! - **Resource:** Additional piece of metadata of an NFT usually serving to add +//! a piece of media on top of the root metadata (NFT's own), be it a different wing +//! on the root template bird or something entirely unrelated. +//! +//! - **Base:** A list of possible "components" - Parts, a combination of which can +//! be appended/equipped to/on an NFT. +//! +//! - **Part:** Something that, together with other Parts, can constitute an NFT. +//! Parts are defined in the Base to which they belong. Parts can be either +//! of the `slot` type or `fixed` type. Slots are intended for equippables. +//! Note that "part of something" and "Part of a Base" can be easily confused, +//! and so in this documentation these words are distinguished by the capital letter. +//! +//! - **Theme:** Named objects of variable => value pairs which get interpolated into +//! the Base's `themable` Parts. Themes can hold any value, but are often represented +//! in RMRK's examples as colors applied to visible Parts. +//! +//! ### Peculiarities in Unique +//! +//! - **Scoped properties:** Properties that are normally obscured from users. +//! Their purpose is to contain structured metadata that was not included in the Unique standard +//! for collections and tokens, meant to be operated on by proxies and other outliers. +//! Scoped property keys are prefixed with `some-scope:`, where `some-scope` is +//! an arbitrary keyword, like "rmrk". `:` is considered an unacceptable symbol in user-defined +//! properties, which, along with other safeguards, makes scoped ones impossible to tamper with. +//! +//! - **Auxiliary properties:** A slightly different structure of properties, +//! trading universality of use for more convenient storage, writes and access. +//! Meant to be inaccessible to end users. +//! +//! ## Proxy Implementation +//! +//! An external user is supposed to be able to utilize this proxy as they would +//! utilize RMRK, and get exactly the same results. Normally, Unique transactions +//! are off-limits to RMRK collections and tokens, and vice versa. However, +//! the information stored on chain can be freely interpreted by storage reads and Unique RPCs. +//! +//! ### ID Mapping +//! +//! RMRK's collections' IDs are counted independently of Unique's and start at 0. +//! Note that tokens' IDs still start at 1. +//! The collections themselves, as well as tokens, are stored as Unique collections, +//! and thus RMRK IDs are mapped to Unique IDs (but not vice versa). +//! +//! ### External/Internal Collection Insulation +//! +//! A Unique transaction cannot target collections purposed for RMRK, +//! and they are flagged as `external` to specify that. On the other hand, +//! due to the mapping, RMRK transactions and RPCs simply cannot reach Unique collections. +//! +//! ### Native Properties +//! +//! Many of RMRK's native parameters are stored as scoped properties of a collection +//! or an NFT on the chain. Scoped properties are prefixed with `rmrk:`, where `:` +//! is an unacceptable symbol in user-defined properties, which, along with other safeguards, +//! makes them impossible to tamper with. +//! +//! ### Collection and NFT Types, or Base, Parts and Themes Handling +//! +//! RMRK introduces the concept of a Base, which is a catalogue of Parts, +//! possible components of an NFT. Due to its similarity with the functionality +//! of a token collection, a Base is stored and handled as one, and the Base's Parts and Themes +//! are this collection's NFTs. See [`CollectionType`] and [`NftType`]. +//! +//! ## Interface +//! +//! ### Dispatchables +//! +//! - `create_base` - Create a new Base. +//! - `theme_add` - Add a Theme to a Base. +//! - `equippable` - Update the array of Collections allowed to be equipped to a Base's specified Slot Part. + #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::{pallet_prelude::*, transactional, BoundedVec, dispatch::DispatchResult}; +use frame_support::{pallet_prelude::*, BoundedVec, dispatch::DispatchResult}; use frame_system::{pallet_prelude::*, ensure_signed}; use sp_runtime::DispatchError; use up_data_structs::*; @@ -45,15 +162,20 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config + pallet_rmrk_core::Config { - type Event: From> + IsType<::Event>; + /// Overarching event type. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + + /// The weight information of this pallet. type WeightInfo: WeightInfo; } + /// Map of a Base ID and a Part ID to an NFT in the Base collection serving as the Part. #[pallet::storage] #[pallet::getter(fn internal_part_id)] pub type InernalPartId = StorageDoubleMap<_, Twox64Concat, CollectionId, Twox64Concat, RmrkPartId, TokenId>; + /// Checkmark that a Base has a Theme NFT named "default". #[pallet::storage] #[pallet::getter(fn base_has_default_theme)] pub type BaseHasDefaultTheme = @@ -78,27 +200,37 @@ pub mod pallet { #[pallet::error] pub enum Error { + /// No permission to perform action. PermissionError, + /// Could not find an ID for a Base collection. It is likely there were too many collections created on the chain, causing an overflow. NoAvailableBaseId, + /// Could not find a suitable ID for a Part, likely too many Part tokens were created in the Base, causing an overflow NoAvailablePartId, + /// Base collection linked to this ID does not exist. BaseDoesntExist, + /// No Theme named "default" is associated with the Base. NeedsDefaultThemeFirst, + /// Part linked to this ID does not exist. PartDoesntExist, + /// Cannot assign equippables to a fixed Part. NoEquippableOnFixedPart, } #[pallet::call] impl Pallet { - /// Creates a new Base. - /// Modeled after [base interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/base.md) + /// Create a new Base. + /// + /// Modeled after the [Base interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/base.md) /// - /// Parameters: - /// - origin: Caller, will be assigned as the issuer of the Base - /// - base_type: media type, e.g. "svg" - /// - symbol: arbitrary client-chosen symbol - /// - parts: array of Fixed and Slot parts composing the base, confined in length by - /// RmrkPartsLimit - #[transactional] + /// # Permissions + /// - Anyone - will be assigned as the issuer of the Base. + /// + /// # Arguments: + /// - `origin`: Caller, will be assigned as the issuer of the Base + /// - `base_type`: Arbitrary media type, e.g. "svg". + /// - `symbol`: Arbitrary client-chosen symbol. + /// - `parts`: Array of Fixed and Slot Parts composing the Base, + /// confined in length by [`RmrkPartsLimit`](up_data_structs::RmrkPartsLimit). #[pallet::weight(>::create_base(parts.len() as u32))] pub fn create_base( origin: OriginFor, @@ -118,8 +250,15 @@ pub mod pallet { ..Default::default() }; - let collection_id_res = - >::init_collection(cross_sender.clone(), data, true); + let collection_id_res = >::init_collection( + cross_sender.clone(), + cross_sender.clone(), + data, + up_data_structs::CollectionFlags { + external: true, + ..Default::default() + }, + ); if let Err(DispatchError::Arithmetic(_)) = &collection_id_res { return Err(>::NoAvailableBaseId.into()); @@ -131,8 +270,11 @@ pub mod pallet { collection_id, PropertyScope::Rmrk, [ - >::rmrk_property(CollectionType, &misc::CollectionType::Base)?, - >::rmrk_property(BaseType, &base_type)?, + >::encode_rmrk_property( + CollectionType, + &misc::CollectionType::Base, + )?, + >::encode_rmrk_property(BaseType, &base_type)?, ] .into_iter(), )?; @@ -151,20 +293,22 @@ pub mod pallet { Ok(()) } - /// Adds a Theme to a Base. - /// Modeled after [themeadd interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/themeadd.md) - /// Themes are stored in the Themes storage + /// Add a Theme to a Base. /// A Theme named "default" is required prior to adding other Themes. /// - /// Parameters: - /// - origin: The caller of the function, must be issuer of the base - /// - base_id: The Base containing the Theme to be updated - /// - theme: The Theme to add to the Base. A Theme has a name and properties, which are an + /// Modeled after [Themeadd interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/themeadd.md). + /// + /// # Permissions: + /// - Base issuer + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `base_id`: Base ID containing the Theme to be updated. + /// - `theme`: Theme to add to the Base. A Theme has a name and properties, which are an /// array of [key, value, inherit]. - /// - key: arbitrary BoundedString, defined by client - /// - value: arbitrary BoundedString, defined by client - /// - inherit: optional bool - #[transactional] + /// - `key`: Arbitrary BoundedString, defined by client. + /// - `value`: Arbitrary BoundedString, defined by client. + /// - `inherit`: Optional bool. #[pallet::weight(>::theme_add(theme.properties.len() as u32))] pub fn theme_add( origin: OriginFor, @@ -191,9 +335,9 @@ pub mod pallet { owner, &collection, [ - >::rmrk_property(TokenType, &NftType::Theme)?, - >::rmrk_property(ThemeName, &theme.name)?, - >::rmrk_property(ThemeInherit, &theme.inherit)?, + >::encode_rmrk_property(TokenType, &NftType::Theme)?, + >::encode_rmrk_property(ThemeName, &theme.name)?, + >::encode_rmrk_property(ThemeInherit, &theme.inherit)?, ] .into_iter(), ) @@ -204,7 +348,7 @@ pub mod pallet { collection_id, token_id, PropertyScope::Rmrk, - >::rmrk_property( + >::encode_rmrk_property( UserProperty(property.key.as_slice()), &property.value, )?, @@ -214,7 +358,18 @@ pub mod pallet { Ok(()) } - #[transactional] + /// Update the array of Collections allowed to be equipped to a Base's specified Slot Part. + /// + /// Modeled after [equippable interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/equippable.md). + /// + /// # Permissions: + /// - Base issuer + /// + /// # Arguments: + /// - `origin`: sender of the transaction + /// - `base_id`: Base containing the Slot Part to be updated. + /// - `slot_id`: Slot Part whose Equippable List is being updated . + /// - `equippables`: List of equippables that will override the current Equippables list. #[pallet::weight(>::equippable())] pub fn equippable( origin: OriginFor, @@ -253,7 +408,7 @@ pub mod pallet { base_collection_id, part_id, PropertyScope::Rmrk, - >::rmrk_property(EquippableList, &equippables)?, + >::encode_rmrk_property(EquippableList, &equippables)?, )?; } } @@ -266,6 +421,8 @@ pub mod pallet { } impl Pallet { + /// Create (or overwrite) a Part in a Base. + /// The Part and the Base are represented as an NFT and a Collection. fn create_part( sender: &T::CrossAccountId, collection: &NonfungibleHandle, @@ -298,7 +455,7 @@ impl Pallet { collection.id, token_id, PropertyScope::Rmrk, - >::rmrk_property(ExternalPartId, &part_id)?, + >::encode_rmrk_property(ExternalPartId, &part_id)?, )?; token_id @@ -310,9 +467,9 @@ impl Pallet { token_id, PropertyScope::Rmrk, [ - >::rmrk_property(TokenType, &nft_type)?, - >::rmrk_property(Src, &src)?, - >::rmrk_property(ZIndex, &z_index)?, + >::encode_rmrk_property(TokenType, &nft_type)?, + >::encode_rmrk_property(Src, &src)?, + >::encode_rmrk_property(ZIndex, &z_index)?, ] .into_iter(), )?; @@ -322,13 +479,15 @@ impl Pallet { collection.id, token_id, PropertyScope::Rmrk, - >::rmrk_property(EquippableList, &part.equippable)?, + >::encode_rmrk_property(EquippableList, &part.equippable)?, )?; } Ok(()) } + /// Ensure that the collection under the Base ID is a Base collection, + /// and fetch it. fn get_base(base_id: CollectionId) -> Result, DispatchError> { let collection = >::get_typed_nft_collection(base_id, misc::CollectionType::Base) diff --git a/pallets/proxy-rmrk-equip/src/rpc.rs b/pallets/proxy-rmrk-equip/src/rpc.rs index 7ca9350298..21baf261d0 100644 --- a/pallets/proxy-rmrk-equip/src/rpc.rs +++ b/pallets/proxy-rmrk-equip/src/rpc.rs @@ -1,7 +1,26 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! Realizations of RMRK RPCs (remote procedure calls) related to the Equip pallet. + use super::*; use pallet_rmrk_core::{misc, property::*}; use sp_std::vec::Vec; +/// Get base info by its ID. pub fn base( base_id: RmrkBaseId, ) -> Result>, DispatchError> { @@ -22,6 +41,7 @@ pub fn base( })) } +/// Get all parts of a base. pub fn base_parts(base_id: RmrkBaseId) -> Result, DispatchError> { use pallet_common::CommonCollectionOperations; @@ -93,6 +113,7 @@ pub fn base_parts(base_id: RmrkBaseId) -> Result, D Ok(parts) } +/// Get the theme names belonging to a base. pub fn theme_names(base_id: RmrkBaseId) -> Result, DispatchError> { use pallet_common::CommonCollectionOperations; @@ -124,6 +145,7 @@ pub fn theme_names(base_id: RmrkBaseId) -> Result, Ok(theme_names) } +/// Get theme info, including properties, optionally limited to the provided keys. pub fn theme( base_id: RmrkBaseId, theme_name: RmrkThemeName, diff --git a/pallets/proxy-rmrk-equip/src/weights.rs b/pallets/proxy-rmrk-equip/src/weights.rs index aca21e466f..ab3a887e93 100644 --- a/pallets/proxy-rmrk-equip/src/weights.rs +++ b/pallets/proxy-rmrk-equip/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_proxy_rmrk_equip //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-07-01, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -54,13 +54,13 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn create_base(b: u32, ) -> Weight { - (57_871_000 as Weight) - // Standard Error: 21_000 - .saturating_add((19_870_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().reads((2 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(8 as Weight)) - .saturating_add(T::DbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(58_417_000) + // Standard Error: 27_000 + .saturating_add(Weight::from_ref_time(20_439_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().reads((2 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(8 as u64)) + .saturating_add(T::DbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Common CollectionProperties (r:1 w:0) // Storage: Common CollectionById (r:1 w:0) @@ -71,20 +71,20 @@ impl WeightInfo for SubstrateWeight { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn theme_add(b: u32, ) -> Weight { - (46_121_000 as Weight) - // Standard Error: 31_000 - .saturating_add((2_988_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(46_005_000) + // Standard Error: 42_000 + .saturating_add(Weight::from_ref_time(2_922_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Common CollectionProperties (r:1 w:0) // Storage: Common CollectionById (r:1 w:0) // Storage: RmrkEquip InernalPartId (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn equippable() -> Weight { - (32_032_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(32_526_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } } @@ -103,13 +103,13 @@ impl WeightInfo for () { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn create_base(b: u32, ) -> Weight { - (57_871_000 as Weight) - // Standard Error: 21_000 - .saturating_add((19_870_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().reads((2 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(8 as Weight)) - .saturating_add(RocksDbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(58_417_000) + // Standard Error: 27_000 + .saturating_add(Weight::from_ref_time(20_439_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().reads((2 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(8 as u64)) + .saturating_add(RocksDbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Common CollectionProperties (r:1 w:0) // Storage: Common CollectionById (r:1 w:0) @@ -120,19 +120,19 @@ impl WeightInfo for () { // Storage: Nonfungible TokenData (r:0 w:1) // Storage: Nonfungible Owned (r:0 w:1) fn theme_add(b: u32, ) -> Weight { - (46_121_000 as Weight) - // Standard Error: 31_000 - .saturating_add((2_988_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(46_005_000) + // Standard Error: 42_000 + .saturating_add(Weight::from_ref_time(2_922_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Common CollectionProperties (r:1 w:0) // Storage: Common CollectionById (r:1 w:0) // Storage: RmrkEquip InernalPartId (r:1 w:0) // Storage: Nonfungible TokenProperties (r:1 w:1) fn equippable() -> Weight { - (32_032_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(32_526_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } } diff --git a/pallets/refungible/CHANGELOG.md b/pallets/refungible/CHANGELOG.md new file mode 100644 index 0000000000..3041556898 --- /dev/null +++ b/pallets/refungible/CHANGELOG.md @@ -0,0 +1,80 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## [v0.2.4] - 2022-08-24 + +### Change + - Add bound `AsRef<[u8; 32]>` to `T::CrossAccountId`. + + +## [v0.2.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Switch to new prefix removal methods 26734e9567589d75cdd99e404eabf11d5a97d975 + +New methods allows to call `remove_prefix` with limit multiple times +in the same block +However, we don't use prefix removal limits, so upgrade is +straightforward + +Upstream-Change: https://github.com/paritytech/substrate/pull/11490 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [v0.2.2] 2022-08-04 + +### Product changes + +- Now RefungibleMultipleItems may only receive single user on type level. + +### Added features + +- Implement property RPC 7bf45b532e32daa91f03c157b58874d21b42ae1f + +### Other changes + +- refactor: Disallow invalid bulk mints 53fec71cf728dddd012257b407ea30441e699f88 + +`create_multiple_items_ex` was allowing invalid (that will be always +rejected at runtime level) refungible mint extrinsics, by passing +multiple users into `RefungibleMultipleItems` call. + +## [v0.2.1] - 2022-07-27 + +### New features + +Implementation of ERC-721 EVM API ([#452](https://github.com/UniqueNetwork/unique-chain/pull/452)) + +## [v0.2.0] - 2022-08-01 + +### Deprecated + +`const_data` field is removed + +- `ItemData` +- `TokenData` + +## [v0.1.2] - 2022-07-14 + +### Other changes + +feat(refungible-pallet): add ERC-20 EVM API for RFT token pieces ([#413](https://github.com/UniqueNetwork/unique-chain/pull/413)) +test(refungible-pallet): add tests for ERC-20 EVM API for RFT token pieces ([#413](https://github.com/UniqueNetwork/unique-chain/pull/413)) + +## [v0.1.1] - 2022-07-14 + +### Added features + +- Support for properties for RFT collections and tokens. + +### Other changes + +- feat: RPC method `token_owners` returning 10 owners in no particular order. + +This was an internal request to improve the web interface and support fractionalization event. diff --git a/pallets/refungible/Cargo.toml b/pallets/refungible/Cargo.toml index c64f51ccb6..93de8f9923 100644 --- a/pallets/refungible/Cargo.toml +++ b/pallets/refungible/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-refungible" -version = "0.1.0" +version = "0.2.4" license = "GPLv3" edition = "2021" @@ -11,36 +11,44 @@ package = 'parity-scale-codec' version = '3.1.2' [dependencies] -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +evm-coder = { default-features = false, path = '../../crates/evm-coder' } +pallet-evm-coder-substrate = { default-features = false, path = '../../pallets/evm-coder-substrate' } pallet-common = { default-features = false, path = '../common' } pallet-structure = { default-features = false, path = '../structure' } -up-data-structs = { default-features = false, path = '../../primitives/data-structs' } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -scale-info = { version = "2.0.1", default-features = false, features = [ - "derive", -] } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } + struct-versioning = { path = "../../crates/struct-versioning" } +up-data-structs = { default-features = false, path = '../../primitives/data-structs' } +ethereum = { version = "0.12.0", default-features = false } +scale-info = { version = "2.0.1", default-features = false, features = ["derive",] } +derivative = { version = "2.2.0", features = ["use_core"] } [features] default = ["std"] std = [ + "ethereum/std", + "evm-coder/std", + 'frame-benchmarking/std', "frame-support/std", "frame-system/std", + "pallet-common/std", + "pallet-evm/std", + "pallet-evm-coder-substrate/std", + "pallet-structure/std", "sp-runtime/std", "sp-std/std", "up-data-structs/std", - "pallet-common/std", - "pallet-structure/std", - 'frame-benchmarking/std', - "pallet-evm/std", ] runtime-benchmarks = [ 'frame-benchmarking', 'frame-support/runtime-benchmarks', 'frame-system/runtime-benchmarks', + 'up-data-structs/runtime-benchmarks', ] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/refungible/src/benchmarking.rs b/pallets/refungible/src/benchmarking.rs index cb92910417..15d793fe07 100644 --- a/pallets/refungible/src/benchmarking.rs +++ b/pallets/refungible/src/benchmarking.rs @@ -17,35 +17,41 @@ use super::*; use crate::{Pallet, Config, RefungibleHandle}; -use sp_std::prelude::*; -use pallet_common::benchmarking::{create_collection_raw, create_data}; -use frame_benchmarking::{benchmarks, account}; -use up_data_structs::{CollectionMode, MAX_ITEMS_PER_BATCH, CUSTOM_DATA_LIMIT, budget::Unlimited}; -use pallet_common::bench_init; use core::convert::TryInto; use core::iter::IntoIterator; +use frame_benchmarking::{benchmarks, account}; +use pallet_common::{ + bench_init, + benchmarking::{create_collection_raw, property_key, property_value, create_data}, +}; +use sp_core::H160; +use sp_std::prelude::*; +use up_data_structs::{ + CollectionMode, MAX_ITEMS_PER_BATCH, MAX_PROPERTIES_PER_ITEM, CUSTOM_DATA_LIMIT, + budget::Unlimited, +}; const SEED: u32 = 1; fn create_max_item_data( users: impl IntoIterator, -) -> CreateRefungibleExData { - let const_data = create_data::(); - CreateRefungibleExData { - const_data, +) -> CreateItemData { + CreateItemData { users: users .into_iter() .collect::>() .try_into() .unwrap(), + properties: Default::default(), } } + fn create_max_item( collection: &RefungibleHandle, sender: &T::CrossAccountId, users: impl IntoIterator, ) -> Result { - let data: CreateRefungibleExData = create_max_item_data(users); + let data: CreateItemData = create_max_item_data(users); >::create_item(&collection, sender, data, &Unlimited)?; Ok(TokenId(>::get(&collection.id))) } @@ -55,11 +61,14 @@ fn create_collection( ) -> Result, DispatchError> { create_collection_raw( owner, - CollectionMode::NFT, - >::init_collection, + CollectionMode::ReFungible, + |owner: T::CrossAccountId, data| { + >::init_collection(owner.clone(), owner, data, Default::default()) + }, RefungibleHandle::cast, ) } + benchmarks! { create_item { bench_init!{ @@ -204,6 +213,68 @@ benchmarks! { >::set_allowance(&collection, &sender, &burner, item, 200)?; }: {>::burn_from(&collection, &burner, &sender, item, 200, &Unlimited)?} + set_token_property_permissions { + let b in 0..MAX_PROPERTIES_PER_ITEM; + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + let perms = (0..b).map(|k| PropertyKeyPermission { + key: property_key(k as usize), + permission: PropertyPermission { + mutable: false, + collection_admin: false, + token_owner: false, + }, + }).collect::>(); + }: {>::set_token_property_permissions(&collection, &owner, perms)?} + + set_token_properties { + let b in 0..MAX_PROPERTIES_PER_ITEM; + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + let perms = (0..b).map(|k| PropertyKeyPermission { + key: property_key(k as usize), + permission: PropertyPermission { + mutable: false, + collection_admin: true, + token_owner: true, + }, + }).collect::>(); + >::set_token_property_permissions(&collection, &owner, perms)?; + let props = (0..b).map(|k| Property { + key: property_key(k as usize), + value: property_value(), + }).collect::>(); + let item = create_max_item(&collection, &owner, [(owner.clone(), 200)])?; + }: {>::set_token_properties(&collection, &owner, item, props.into_iter(), false, &Unlimited)?} + + delete_token_properties { + let b in 0..MAX_PROPERTIES_PER_ITEM; + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + let perms = (0..b).map(|k| PropertyKeyPermission { + key: property_key(k as usize), + permission: PropertyPermission { + mutable: true, + collection_admin: true, + token_owner: true, + }, + }).collect::>(); + >::set_token_property_permissions(&collection, &owner, perms)?; + let props = (0..b).map(|k| Property { + key: property_key(k as usize), + value: property_value(), + }).collect::>(); + let item = create_max_item(&collection, &owner, [(owner.clone(), 200)])?; + >::set_token_properties(&collection, &owner, item, props.into_iter(), false, &Unlimited)?; + let to_delete = (0..b).map(|k| property_key(k as usize)).collect::>(); + }: {>::delete_token_properties(&collection, &owner, item, to_delete.into_iter(), &Unlimited)?} + repartition_item { bench_init!{ owner: sub; collection: collection(owner); @@ -211,4 +282,12 @@ benchmarks! { }; let item = create_max_item(&collection, &sender, [(owner.clone(), 100)])?; }: {>::repartition(&collection, &owner, item, 200)?} + + token_owner { + bench_init!{ + owner: sub; collection: collection(owner); + sender: cross_from_sub(owner); owner: cross_sub; + }; + let item = create_max_item(&collection, &sender, [(owner.clone(), 100)])?; + }: {>::token_owner(collection.id, item)} } diff --git a/pallets/refungible/src/common.rs b/pallets/refungible/src/common.rs index eb0c1930f8..9a7a416bf0 100644 --- a/pallets/refungible/src/common.rs +++ b/pallets/refungible/src/common.rs @@ -19,47 +19,74 @@ use core::marker::PhantomData; use sp_std::collections::btree_map::BTreeMap; use frame_support::{dispatch::DispatchResultWithPostInfo, ensure, fail, weights::Weight, traits::Get}; use up_data_structs::{ - CollectionId, TokenId, CreateItemExData, CreateRefungibleExData, budget::Budget, Property, - PropertyKey, PropertyValue, PropertyKeyPermission, CreateItemData, + CollectionId, TokenId, CreateItemExData, budget::Budget, Property, PropertyKey, PropertyValue, + PropertyKeyPermission, CollectionPropertiesVec, CreateRefungibleExMultipleOwners, + CreateRefungibleExSingleOwner, +}; +use pallet_common::{ + CommonCollectionOperations, CommonWeightInfo, RefungibleExtensions, with_weight, + weights::WeightInfo as _, }; -use pallet_common::{CommonCollectionOperations, CommonWeightInfo, RefungibleExtensions, with_weight}; use pallet_structure::Error as StructureError; use sp_runtime::{DispatchError}; use sp_std::{vec::Vec, vec}; use crate::{ AccountBalance, Allowance, Balance, Config, Error, Owned, Pallet, RefungibleHandle, - SelfWeightOf, TokenData, weights::WeightInfo, TokensMinted, + SelfWeightOf, weights::WeightInfo, TokensMinted, TotalSupply, CreateItemData, }; macro_rules! max_weight_of { ($($method:ident ($($args:tt)*)),*) => { - 0 + Weight::zero() $( .max(>::$method($($args)*)) )* }; } +fn properties_weight(properties: &CollectionPropertiesVec) -> Weight { + if properties.len() > 0 { + >::set_token_properties(properties.len() as u32) + } else { + Weight::zero() + } +} + pub struct CommonWeights(PhantomData); impl CommonWeightInfo for CommonWeights { fn create_item() -> Weight { >::create_item() } - fn create_multiple_items(data: &[CreateItemData]) -> Weight { - >::create_multiple_items(data.len() as u32) + fn create_multiple_items(data: &[up_data_structs::CreateItemData]) -> Weight { + >::create_multiple_items(data.len() as u32).saturating_add( + data.iter() + .map(|data| match data { + up_data_structs::CreateItemData::ReFungible(rft_data) => { + properties_weight::(&rft_data.properties) + } + _ => Weight::zero(), + }) + .fold(Weight::zero(), |a, b| a.saturating_add(b)), + ) } fn create_multiple_items_ex(call: &CreateItemExData) -> Weight { match call { CreateItemExData::RefungibleMultipleOwners(i) => { >::create_multiple_items_ex_multiple_owners(i.users.len() as u32) + .saturating_add(properties_weight::(&i.properties)) } CreateItemExData::RefungibleMultipleItems(i) => { >::create_multiple_items_ex_multiple_items(i.len() as u32) + .saturating_add( + i.iter() + .map(|d| properties_weight::(&d.properties)) + .fold(Weight::zero(), |a, b| a.saturating_add(b)), + ) } - _ => 0, + _ => Weight::zero(), } } @@ -67,29 +94,24 @@ impl CommonWeightInfo for CommonWeights { max_weight_of!(burn_item_partial(), burn_item_fully()) } - fn set_collection_properties(_amount: u32) -> Weight { - // Error - 0 + fn set_collection_properties(amount: u32) -> Weight { + >::set_collection_properties(amount) } - fn delete_collection_properties(_amount: u32) -> Weight { - // Error - 0 + fn delete_collection_properties(amount: u32) -> Weight { + >::delete_collection_properties(amount) } - fn set_token_properties(_amount: u32) -> Weight { - // Error - 0 + fn set_token_properties(amount: u32) -> Weight { + >::set_token_properties(amount) } - fn delete_token_properties(_amount: u32) -> Weight { - // Error - 0 + fn delete_token_properties(amount: u32) -> Weight { + >::delete_token_properties(amount) } - fn set_token_property_permissions(_amount: u32) -> Weight { - // Error - 0 + fn set_token_property_permissions(amount: u32) -> Weight { + >::set_token_property_permissions(amount) } fn transfer() -> Weight { @@ -124,27 +146,33 @@ impl CommonWeightInfo for CommonWeights { } fn burn_recursively_breadth_raw(_amount: u32) -> Weight { // Refungible token can't have children - 0 + Weight::zero() + } + + fn token_owner() -> Weight { + >::token_owner() } } fn map_create_data( data: up_data_structs::CreateItemData, to: &T::CrossAccountId, -) -> Result, DispatchError> { +) -> Result, DispatchError> { match data { - up_data_structs::CreateItemData::ReFungible(data) => Ok(CreateRefungibleExData { - const_data: data.const_data, + up_data_structs::CreateItemData::ReFungible(data) => Ok(CreateItemData { users: { let mut out = BTreeMap::new(); out.insert(to.clone(), data.pieces); out.try_into().expect("limit > 0") }, + properties: data.properties, }), _ => fail!(>::NotRefungibleDataUsedToMintFungibleCollectionToken), } } +/// Implementation of `CommonCollectionOperations` for `RefungibleHandle`. It wraps Refungible Pallete +/// methods and adds weight info. impl CommonCollectionOperations for RefungibleHandle { fn create_item( &self, @@ -191,12 +219,26 @@ impl CommonCollectionOperations for RefungibleHandle { ) -> DispatchResultWithPostInfo { let weight = >::create_multiple_items_ex(&data); let data = match data { - CreateItemExData::RefungibleMultipleOwners(r) => vec![r], - CreateItemExData::RefungibleMultipleItems(r) - if r.iter().all(|i| i.users.len() == 1) => - { - r.into_inner() - } + CreateItemExData::RefungibleMultipleOwners(CreateRefungibleExMultipleOwners { + users, + properties, + }) => vec![CreateItemData { users, properties }], + CreateItemExData::RefungibleMultipleItems(r) => r + .into_inner() + .into_iter() + .map( + |CreateRefungibleExSingleOwner { + user, + pieces, + properties, + }| CreateItemData { + users: BTreeMap::from([(user, pieces)]) + .try_into() + .expect("limit >= 1"), + properties, + }, + ) + .collect(), _ => fail!(>::NotRefungibleDataUsedToMintFungibleCollectionToken), }; @@ -295,46 +337,85 @@ impl CommonCollectionOperations for RefungibleHandle { fn set_collection_properties( &self, - _sender: T::CrossAccountId, - _property: Vec, + sender: T::CrossAccountId, + properties: Vec, ) -> DispatchResultWithPostInfo { - fail!(>::SettingPropertiesNotAllowed) + let weight = >::set_collection_properties(properties.len() as u32); + + with_weight( + >::set_collection_properties(self, &sender, properties), + weight, + ) } fn delete_collection_properties( &self, - _sender: &T::CrossAccountId, - _property_keys: Vec, + sender: &T::CrossAccountId, + property_keys: Vec, ) -> DispatchResultWithPostInfo { - fail!(>::SettingPropertiesNotAllowed) + let weight = >::delete_collection_properties(property_keys.len() as u32); + + with_weight( + >::delete_collection_properties(self, sender, property_keys), + weight, + ) } fn set_token_properties( &self, - _sender: T::CrossAccountId, - _token_id: TokenId, - _property: Vec, - _nesting_budget: &dyn Budget, + sender: T::CrossAccountId, + token_id: TokenId, + properties: Vec, + nesting_budget: &dyn Budget, ) -> DispatchResultWithPostInfo { - fail!(>::SettingPropertiesNotAllowed) + let weight = >::set_token_properties(properties.len() as u32); + + with_weight( + >::set_token_properties( + self, + &sender, + token_id, + properties.into_iter(), + false, + nesting_budget, + ), + weight, + ) } fn set_token_property_permissions( &self, - _sender: &T::CrossAccountId, - _property_permissions: Vec, + sender: &T::CrossAccountId, + property_permissions: Vec, ) -> DispatchResultWithPostInfo { - fail!(>::SettingPropertiesNotAllowed) + let weight = + >::set_token_property_permissions(property_permissions.len() as u32); + + with_weight( + >::set_token_property_permissions(self, sender, property_permissions), + weight, + ) } fn delete_token_properties( &self, - _sender: T::CrossAccountId, - _token_id: TokenId, - _property_keys: Vec, - _nesting_budget: &dyn Budget, + sender: T::CrossAccountId, + token_id: TokenId, + property_keys: Vec, + nesting_budget: &dyn Budget, ) -> DispatchResultWithPostInfo { - fail!(>::SettingPropertiesNotAllowed) + let weight = >::delete_token_properties(property_keys.len() as u32); + + with_weight( + >::delete_token_properties( + self, + &sender, + token_id, + property_keys.into_iter(), + nesting_budget, + ), + weight, + ) } fn check_nesting( @@ -358,7 +439,7 @@ impl CommonCollectionOperations for RefungibleHandle { } fn collection_tokens(&self) -> Vec { - >::iter_prefix((self.id,)) + >::iter_prefix((self.id,)) .map(|(id, _)| id) .collect() } @@ -371,20 +452,40 @@ impl CommonCollectionOperations for RefungibleHandle { TokenId(>::get(self.id)) } - fn token_owner(&self, _token: TokenId) -> Option { - None + fn token_owner(&self, token: TokenId) -> Option { + >::token_owner(self.id, token) } - fn token_property(&self, _token_id: TokenId, _key: &PropertyKey) -> Option { - None + /// Returns 10 token in no particular order. + fn token_owners(&self, token: TokenId) -> Vec { + >::token_owners(self.id, token).unwrap_or_default() } - fn token_properties( - &self, - _token_id: TokenId, - _keys: Option>, - ) -> Vec { - Vec::new() + fn token_property(&self, token_id: TokenId, key: &PropertyKey) -> Option { + >::token_properties((self.id, token_id)) + .get(key) + .cloned() + } + + fn token_properties(&self, token_id: TokenId, keys: Option>) -> Vec { + let properties = >::token_properties((self.id, token_id)); + + keys.map(|keys| { + keys.into_iter() + .filter_map(|key| { + properties.get(&key).map(|value| Property { + key, + value: value.clone(), + }) + }) + .collect() + }) + .unwrap_or_else(|| { + properties + .into_iter() + .map(|(key, value)| Property { key, value }) + .collect() + }) } fn total_supply(&self) -> u32 { @@ -411,6 +512,10 @@ impl CommonCollectionOperations for RefungibleHandle { fn refungible_extensions(&self) -> Option<&dyn RefungibleExtensions> { Some(self) } + + fn total_pieces(&self, token: TokenId) -> Option { + >::total_pieces(self.id, token) + } } impl RefungibleExtensions for RefungibleHandle { diff --git a/pallets/refungible/src/erc.rs b/pallets/refungible/src/erc.rs index 022d5c98d7..8f36ac549d 100644 --- a/pallets/refungible/src/erc.rs +++ b/pallets/refungible/src/erc.rs @@ -14,34 +14,833 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -use up_data_structs::TokenId; -use pallet_common::erc::CommonEvmHandler; -use pallet_evm::PrecompileHandle; +//! # Refungible Pallet EVM API for tokens +//! +//! Provides ERC-721 standart support implementation and EVM API for unique extensions for Refungible Pallet. +//! Method implementations are mostly doing parameter conversion and calling Refungible Pallet methods. -use crate::{Config, RefungibleHandle}; +extern crate alloc; -impl CommonEvmHandler for RefungibleHandle { - const CODE: &'static [u8] = include_bytes!("./stubs/UniqueRefungible.raw"); +use core::{ + char::{REPLACEMENT_CHARACTER, decode_utf16}, + convert::TryInto, +}; +use evm_coder::{ToLog, execution::*, generate_stubgen, solidity, solidity_interface, types::*, weight}; +use frame_support::{BoundedBTreeMap, BoundedVec}; +use pallet_common::{ + CollectionHandle, CollectionPropertyPermissions, + erc::{CommonEvmHandler, CollectionCall, static_property::key}, +}; +use pallet_evm::{account::CrossAccountId, PrecompileHandle}; +use pallet_evm_coder_substrate::{call, dispatch_to_evm}; +use pallet_structure::{SelfWeightOf as StructureWeight, weights::WeightInfo as _}; +use sp_core::H160; +use sp_std::{collections::btree_map::BTreeMap, vec::Vec, vec}; +use up_data_structs::{ + CollectionId, CollectionPropertiesVec, mapping::TokenAddressMapping, Property, PropertyKey, + PropertyKeyPermission, PropertyPermission, TokenId, +}; - fn call( - self, - _handle: &mut impl PrecompileHandle, - ) -> Option { - // TODO: Implement RFT variant of ERC721 - None +use crate::{ + AccountBalance, Balance, Config, CreateItemData, Pallet, RefungibleHandle, SelfWeightOf, + TokenProperties, TokensMinted, TotalSupply, weights::WeightInfo, +}; + +pub const ADDRESS_FOR_PARTIALLY_OWNED_TOKENS: H160 = H160::repeat_byte(0xff); + +/// @title A contract that allows to set and delete token properties and change token property permissions. +#[solidity_interface(name = TokenProperties)] +impl RefungibleHandle { + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. + fn set_token_property_permission( + &mut self, + caller: caller, + key: string, + is_mutable: bool, + collection_admin: bool, + token_owner: bool, + ) -> Result<()> { + let caller = T::CrossAccountId::from_eth(caller); + >::set_token_property_permissions( + self, + &caller, + vec![PropertyKeyPermission { + key: >::from(key) + .try_into() + .map_err(|_| "too long key")?, + permission: PropertyPermission { + mutable: is_mutable, + collection_admin, + token_owner, + }, + }], + ) + .map_err(dispatch_to_evm::) + } + + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. + fn set_property( + &mut self, + caller: caller, + token_id: uint256, + key: string, + value: bytes, + ) -> Result<()> { + let caller = T::CrossAccountId::from_eth(caller); + let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; + let key = >::from(key) + .try_into() + .map_err(|_| "key too long")?; + let value = value.try_into().map_err(|_| "value too long")?; + + let nesting_budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + >::set_token_property( + self, + &caller, + TokenId(token_id), + Property { key, value }, + &nesting_budget, + ) + .map_err(dispatch_to_evm::) + } + + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + fn delete_property(&mut self, token_id: uint256, caller: caller, key: string) -> Result<()> { + let caller = T::CrossAccountId::from_eth(caller); + let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; + let key = >::from(key) + .try_into() + .map_err(|_| "key too long")?; + + let nesting_budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + >::delete_token_property(self, &caller, TokenId(token_id), key, &nesting_budget) + .map_err(dispatch_to_evm::) + } + + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes + fn property(&self, token_id: uint256, key: string) -> Result { + let token_id: u32 = token_id.try_into().map_err(|_| "token id overflow")?; + let key = >::from(key) + .try_into() + .map_err(|_| "key too long")?; + + let props = >::get((self.id, token_id)); + let prop = props.get(&key).ok_or("key not found")?; + + Ok(prop.to_vec()) + } +} + +#[derive(ToLog)] +pub enum ERC721Events { + /// @dev This event emits when NFTs are created (`from` == 0) and destroyed + /// (`to` == 0). Exception: during contract creation, any number of RFTs + /// may be created and assigned without emitting Transfer. + Transfer { + #[indexed] + from: address, + #[indexed] + to: address, + #[indexed] + token_id: uint256, + }, + /// @dev Not supported + Approval { + #[indexed] + owner: address, + #[indexed] + approved: address, + #[indexed] + token_id: uint256, + }, + /// @dev Not supported + #[allow(dead_code)] + ApprovalForAll { + #[indexed] + owner: address, + #[indexed] + operator: address, + approved: bool, + }, +} + +#[derive(ToLog)] +pub enum ERC721UniqueMintableEvents { + /// @dev Not supported + #[allow(dead_code)] + MintingFinished {}, +} + +#[solidity_interface(name = ERC721Metadata)] +impl RefungibleHandle { + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + #[solidity(hide, rename_selector = "name")] + fn name_proxy(&self) -> Result { + self.name() + } + + /// @notice An abbreviated name for NFTs in this contract + /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + #[solidity(hide, rename_selector = "symbol")] + fn symbol_proxy(&self) -> Result { + self.symbol() + } + + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata + #[solidity(rename_selector = "tokenURI")] + fn token_uri(&self, token_id: uint256) -> Result { + let token_id_u32: u32 = token_id.try_into().map_err(|_| "token id overflow")?; + + match get_token_property(self, token_id_u32, &key::url()).as_deref() { + Err(_) | Ok("") => (), + Ok(url) => { + return Ok(url.into()); + } + }; + + let base_uri = + pallet_common::Pallet::::get_collection_property(self.id, &key::base_uri()) + .map(BoundedVec::into_inner) + .map(string::from_utf8) + .transpose() + .map_err(|e| { + Error::Revert(alloc::format!( + "Can not convert value \"baseURI\" to string with error \"{}\"", + e + )) + })?; + + let base_uri = match base_uri.as_deref() { + None | Some("") => { + return Ok("".into()); + } + Some(base_uri) => base_uri.into(), + }; + + Ok( + match get_token_property(self, token_id_u32, &key::suffix()).as_deref() { + Err(_) | Ok("") => base_uri, + Ok(suffix) => base_uri + suffix, + }, + ) + } +} + +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +#[solidity_interface(name = ERC721Enumerable)] +impl RefungibleHandle { + /// @notice Enumerate valid RFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) + fn token_by_index(&self, index: uint256) -> Result { + Ok(index) + } + + /// Not implemented + fn token_of_owner_by_index(&self, _owner: address, _index: uint256) -> Result { + // TODO: Not implemetable + Err("not implemented".into()) + } + + /// @notice Count RFTs tracked by this contract + /// @return A count of valid RFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address + fn total_supply(&self) -> Result { + self.consume_store_reads(1)?; + Ok(>::total_supply(self).into()) + } +} + +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +#[solidity_interface(name = ERC721, events(ERC721Events))] +impl RefungibleHandle { + /// @notice Count all RFTs assigned to an owner + /// @dev RFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of RFTs owned by `owner`, possibly zero + fn balance_of(&self, owner: address) -> Result { + self.consume_store_reads(1)?; + let owner = T::CrossAccountId::from_eth(owner); + let balance = >::get((self.id, owner)); + Ok(balance.into()) + } + + /// @notice Find the owner of an RFT + /// @dev RFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// Returns special 0xffffffffffffffffffffffffffffffffffffffff address for + /// the tokens that are partially owned. + /// @param tokenId The identifier for an RFT + /// @return The address of the owner of the RFT + fn owner_of(&self, token_id: uint256) -> Result
{ + self.consume_store_reads(2)?; + let token = token_id.try_into()?; + let owner = >::token_owner(self.id, token); + Ok(owner + .map(|address| *address.as_eth()) + .unwrap_or_else(|| ADDRESS_FOR_PARTIALLY_OWNED_TOKENS)) + } + + /// @dev Not implemented + fn safe_transfer_from_with_data( + &mut self, + _from: address, + _to: address, + _token_id: uint256, + _data: bytes, + ) -> Result { + // TODO: Not implemetable + Err("not implemented".into()) + } + + /// @dev Not implemented + fn safe_transfer_from( + &mut self, + _from: address, + _to: address, + _token_id: uint256, + ) -> Result { + // TODO: Not implemetable + Err("not implemented".into()) + } + + /// @notice Transfer ownership of an RFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer + #[weight(>::transfer_from_creating_removing())] + fn transfer_from( + &mut self, + caller: caller, + from: address, + to: address, + token_id: uint256, + ) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let from = T::CrossAccountId::from_eth(from); + let to = T::CrossAccountId::from_eth(to); + let token = token_id.try_into()?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + let balance = balance(&self, token, &from)?; + ensure_single_owner(&self, token, balance)?; + + >::transfer_from(self, &caller, &from, &to, token, balance, &budget) + .map_err(dispatch_to_evm::)?; + + Ok(()) + } + + /// @dev Not implemented + fn approve(&mut self, _caller: caller, _approved: address, _token_id: uint256) -> Result { + Err("not implemented".into()) + } + + /// @dev Not implemented + fn set_approval_for_all( + &mut self, + _caller: caller, + _operator: address, + _approved: bool, + ) -> Result { + // TODO: Not implemetable + Err("not implemented".into()) + } + + /// @dev Not implemented + fn get_approved(&self, _token_id: uint256) -> Result
{ + // TODO: Not implemetable + Err("not implemented".into()) + } + + /// @dev Not implemented + fn is_approved_for_all(&self, _owner: address, _operator: address) -> Result
{ + // TODO: Not implemetable + Err("not implemented".into()) + } +} + +/// Returns amount of pieces of `token` that `owner` have +pub fn balance( + collection: &RefungibleHandle, + token: TokenId, + owner: &T::CrossAccountId, +) -> Result { + collection.consume_store_reads(1)?; + let balance = >::get((collection.id, token, &owner)); + Ok(balance) +} + +/// Throws if `owner_balance` is lower than total amount of `token` pieces +pub fn ensure_single_owner( + collection: &RefungibleHandle, + token: TokenId, + owner_balance: u128, +) -> Result<()> { + collection.consume_store_reads(1)?; + let total_supply = >::get((collection.id, token)); + if total_supply != owner_balance { + return Err("token has multiple owners".into()); + } + Ok(()) +} + +/// @title ERC721 Token that can be irreversibly burned (destroyed). +#[solidity_interface(name = ERC721Burnable)] +impl RefungibleHandle { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current RFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The RFT to approve + #[weight(>::burn_item_fully())] + fn burn(&mut self, caller: caller, token_id: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let token = token_id.try_into()?; + + let balance = balance(&self, token, &caller)?; + ensure_single_owner(&self, token, balance)?; + + >::burn(self, &caller, token, balance).map_err(dispatch_to_evm::)?; + Ok(()) + } +} + +/// @title ERC721 minting logic. +#[solidity_interface(name = ERC721UniqueMintable, events(ERC721UniqueMintableEvents))] +impl RefungibleHandle { + fn minting_finished(&self) -> Result { + Ok(false) + } + + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token + #[weight(>::create_item())] + fn mint(&mut self, caller: caller, to: address) -> Result { + let token_id: uint256 = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + .into(); + self.mint_check_id(caller, to, token_id)?; + Ok(token_id) + } + + /// @notice Function to mint token. + /// @dev `tokenId` should be obtained with `nextTokenId` method, + /// unlike standard, you can't specify it manually + /// @param to The new owner + /// @param tokenId ID of the minted RFT + #[solidity(hide, rename_selector = "mint")] + #[weight(>::create_item())] + fn mint_check_id(&mut self, caller: caller, to: address, token_id: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let token_id: u32 = token_id.try_into()?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + if >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + != token_id + { + return Err("item id should be next".into()); + } + + let users = [(to.clone(), 1)] + .into_iter() + .collect::>() + .try_into() + .unwrap(); + >::create_item( + self, + &caller, + CreateItemData:: { + users, + properties: CollectionPropertiesVec::default(), + }, + &budget, + ) + .map_err(dispatch_to_evm::)?; + + Ok(true) + } + + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token + #[solidity(rename_selector = "mintWithTokenURI")] + #[weight(>::create_item())] + fn mint_with_token_uri( + &mut self, + caller: caller, + to: address, + token_uri: string, + ) -> Result { + let token_id: uint256 = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + .into(); + self.mint_with_token_uri_check_id(caller, to, token_id, token_uri)?; + Ok(token_id) + } + + /// @notice Function to mint token with the given tokenUri. + /// @dev `tokenId` should be obtained with `nextTokenId` method, + /// unlike standard, you can't specify it manually + /// @param to The new owner + /// @param tokenId ID of the minted RFT + /// @param tokenUri Token URI that would be stored in the RFT properties + #[solidity(hide, rename_selector = "mintWithTokenURI")] + #[weight(>::create_item())] + fn mint_with_token_uri_check_id( + &mut self, + caller: caller, + to: address, + token_id: uint256, + token_uri: string, + ) -> Result { + let key = key::url(); + let permission = get_token_permission::(self.id, &key)?; + if !permission.collection_admin { + return Err("Operation is not allowed".into()); + } + + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let token_id: u32 = token_id.try_into().map_err(|_| "amount overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + if >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + != token_id + { + return Err("item id should be next".into()); + } + + let mut properties = CollectionPropertiesVec::default(); + properties + .try_push(Property { + key, + value: token_uri + .into_bytes() + .try_into() + .map_err(|_| "token uri is too long")?, + }) + .map_err(|e| Error::Revert(alloc::format!("Can't add property: {:?}", e)))?; + + let users = [(to.clone(), 1)] + .into_iter() + .collect::>() + .try_into() + .unwrap(); + >::create_item( + self, + &caller, + CreateItemData:: { users, properties }, + &budget, + ) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @dev Not implemented + fn finish_minting(&mut self, _caller: caller) -> Result { + Err("not implementable".into()) } } -pub struct RefungibleTokenHandle(pub RefungibleHandle, pub TokenId); +fn get_token_property( + collection: &CollectionHandle, + token_id: u32, + key: &up_data_structs::PropertyKey, +) -> Result { + collection.consume_store_reads(1)?; + let properties = >::try_get((collection.id, token_id)) + .map_err(|_| Error::Revert("Token properties not found".into()))?; + if let Some(property) = properties.get(key) { + return Ok(string::from_utf8_lossy(property).into()); + } -impl CommonEvmHandler for RefungibleTokenHandle { - const CODE: &'static [u8] = include_bytes!("./stubs/UniqueRefungibleToken.raw"); + Err("Property tokenURI not found".into()) +} +fn get_token_permission( + collection_id: CollectionId, + key: &PropertyKey, +) -> Result { + let token_property_permissions = CollectionPropertyPermissions::::try_get(collection_id) + .map_err(|_| Error::Revert("No permissions for collection".into()))?; + let a = token_property_permissions + .get(key) + .map(Clone::clone) + .ok_or_else(|| { + let key = string::from_utf8(key.clone().into_inner()).unwrap_or_default(); + Error::Revert(alloc::format!("No permission for key {}", key)) + })?; + Ok(a) +} + +/// @title Unique extensions for ERC721. +#[solidity_interface(name = ERC721UniqueExtensions)] +impl RefungibleHandle { + /// @notice A descriptive name for a collection of NFTs in this contract + fn name(&self) -> Result { + Ok(decode_utf16(self.name.iter().copied()) + .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) + .collect::()) + } + + /// @notice An abbreviated name for NFTs in this contract + fn symbol(&self) -> Result { + Ok(string::from_utf8_lossy(&self.token_prefix).into()) + } + + /// @notice Transfer ownership of an RFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param to The new owner + /// @param tokenId The RFT to transfer + #[weight(>::transfer_creating_removing())] + fn transfer(&mut self, caller: caller, to: address, token_id: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let token = token_id.try_into()?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + let balance = balance(&self, token, &caller)?; + ensure_single_owner(&self, token, balance)?; + + >::transfer(self, &caller, &to, token, balance, &budget) + .map_err(dispatch_to_evm::)?; + Ok(()) + } + + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the RFT + /// @param tokenId The RFT to transfer + #[weight(>::burn_from())] + fn burn_from(&mut self, caller: caller, from: address, token_id: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let from = T::CrossAccountId::from_eth(from); + let token = token_id.try_into()?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + let balance = balance(&self, token, &caller)?; + ensure_single_owner(&self, token, balance)?; + + >::burn_from(self, &caller, &from, token, balance, &budget) + .map_err(dispatch_to_evm::)?; + Ok(()) + } + + /// @notice Returns next free RFT ID. + fn next_token_id(&self) -> Result { + self.consume_store_reads(1)?; + Ok(>::get(self.id) + .checked_add(1) + .ok_or("item id overflow")? + .into()) + } + + /// @notice Function to mint multiple tokens. + /// @dev `tokenIds` should be an array of consecutive numbers and first number + /// should be obtained with `nextTokenId` method + /// @param to The new owner + /// @param tokenIds IDs of the minted RFTs + #[solidity(hide)] + #[weight(>::create_multiple_items(token_ids.len() as u32))] + fn mint_bulk(&mut self, caller: caller, to: address, token_ids: Vec) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let mut expected_index = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + let total_tokens = token_ids.len(); + for id in token_ids.into_iter() { + let id: u32 = id.try_into().map_err(|_| "token id overflow")?; + if id != expected_index { + return Err("item id should be next".into()); + } + expected_index = expected_index.checked_add(1).ok_or("item id overflow")?; + } + let users = [(to.clone(), 1)] + .into_iter() + .collect::>() + .try_into() + .unwrap(); + let create_item_data = CreateItemData:: { + users, + properties: CollectionPropertiesVec::default(), + }; + let data = (0..total_tokens) + .map(|_| create_item_data.clone()) + .collect(); + + >::create_multiple_items(self, &caller, data, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @notice Function to mint multiple tokens with the given tokenUris. + /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + /// numbers and first number should be obtained with `nextTokenId` method + /// @param to The new owner + /// @param tokens array of pairs of token ID and token URI for minted tokens + #[solidity(hide, rename_selector = "mintBulkWithTokenURI")] + #[weight(>::create_multiple_items(tokens.len() as u32))] + fn mint_bulk_with_token_uri( + &mut self, + caller: caller, + to: address, + tokens: Vec<(uint256, string)>, + ) -> Result { + let key = key::url(); + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let mut expected_index = >::get(self.id) + .checked_add(1) + .ok_or("item id overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + let mut data = Vec::with_capacity(tokens.len()); + let users: BoundedBTreeMap<_, _, _> = [(to.clone(), 1)] + .into_iter() + .collect::>() + .try_into() + .unwrap(); + for (id, token_uri) in tokens { + let id: u32 = id.try_into().map_err(|_| "token id overflow")?; + if id != expected_index { + return Err("item id should be next".into()); + } + expected_index = expected_index.checked_add(1).ok_or("item id overflow")?; + + let mut properties = CollectionPropertiesVec::default(); + properties + .try_push(Property { + key: key.clone(), + value: token_uri + .into_bytes() + .try_into() + .map_err(|_| "token uri is too long")?, + }) + .map_err(|e| Error::Revert(alloc::format!("Can't add property: {:?}", e)))?; + + let create_item_data = CreateItemData:: { + users: users.clone(), + properties, + }; + data.push(create_item_data); + } + + >::create_multiple_items(self, &caller, data, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// Returns EVM address for refungible token + /// + /// @param token ID of the token + fn token_contract_address(&self, token: uint256) -> Result
{ + Ok(T::EvmTokenAddressMapping::token_to_address( + self.id, + token.try_into().map_err(|_| "token id overflow")?, + )) + } +} + +#[solidity_interface( + name = UniqueRefungible, + is( + ERC721, + ERC721Enumerable, + ERC721UniqueExtensions, + ERC721UniqueMintable, + ERC721Burnable, + ERC721Metadata(if(this.flags.erc721metadata)), + Collection(via(common_mut returns CollectionHandle)), + TokenProperties, + ) +)] +impl RefungibleHandle where T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]> {} + +// Not a tests, but code generators +generate_stubgen!(gen_impl, UniqueRefungibleCall<()>, true); +generate_stubgen!(gen_iface, UniqueRefungibleCall<()>, false); + +impl CommonEvmHandler for RefungibleHandle +where + T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]>, +{ + const CODE: &'static [u8] = include_bytes!("./stubs/UniqueRefungible.raw"); fn call( self, - _handle: &mut impl PrecompileHandle, + handle: &mut impl PrecompileHandle, ) -> Option { - // TODO: Implement RFT variant of ERC20 - None + call::, _, _>(handle, self) } } diff --git a/pallets/refungible/src/erc_token.rs b/pallets/refungible/src/erc_token.rs new file mode 100644 index 0000000000..5bc182e884 --- /dev/null +++ b/pallets/refungible/src/erc_token.rs @@ -0,0 +1,276 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! # Refungible Pallet EVM API for token pieces +//! +//! Provides ERC-20 standart support implementation and EVM API for unique extensions for Refungible Pallet. +//! Method implementations are mostly doing parameter conversion and calling Nonfungible Pallet methods. + +extern crate alloc; + +#[cfg(not(feature = "std"))] +use alloc::format; + +use core::{ + char::{REPLACEMENT_CHARACTER, decode_utf16}, + convert::TryInto, + ops::Deref, +}; +use evm_coder::{ToLog, execution::*, generate_stubgen, solidity_interface, types::*, weight}; +use pallet_common::{ + CommonWeightInfo, + erc::{CommonEvmHandler, PrecompileResult}, + eth::collection_id_to_address, +}; +use pallet_evm::{account::CrossAccountId, PrecompileHandle}; +use pallet_evm_coder_substrate::{call, dispatch_to_evm, WithRecorder}; +use pallet_structure::{SelfWeightOf as StructureWeight, weights::WeightInfo as _}; +use sp_std::vec::Vec; +use up_data_structs::TokenId; + +use crate::{ + Allowance, Balance, common::CommonWeights, Config, Pallet, RefungibleHandle, SelfWeightOf, + TotalSupply, weights::WeightInfo, +}; + +pub struct RefungibleTokenHandle(pub RefungibleHandle, pub TokenId); + +#[solidity_interface(name = ERC1633)] +impl RefungibleTokenHandle { + fn parent_token(&self) -> Result
{ + Ok(collection_id_to_address(self.id)) + } + + fn parent_token_id(&self) -> Result { + Ok(self.1.into()) + } +} + +#[derive(ToLog)] +pub enum ERC20Events { + /// @dev This event is emitted when the amount of tokens (value) is sent + /// from the from address to the to address. In the case of minting new + /// tokens, the transfer is usually from the 0 address while in the case + /// of burning tokens the transfer is to 0. + Transfer { + #[indexed] + from: address, + #[indexed] + to: address, + value: uint256, + }, + /// @dev This event is emitted when the amount of tokens (value) is approved + /// by the owner to be used by the spender. + Approval { + #[indexed] + owner: address, + #[indexed] + spender: address, + value: uint256, + }, +} + +/// @title Standard ERC20 token +/// +/// @dev Implementation of the basic standard token. +/// https://github.com/ethereum/EIPs/blob/master/EIPS/eip-20.md +#[solidity_interface(name = ERC20, events(ERC20Events))] +impl RefungibleTokenHandle { + /// @return the name of the token. + fn name(&self) -> Result { + Ok(decode_utf16(self.name.iter().copied()) + .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) + .collect::()) + } + + /// @return the symbol of the token. + fn symbol(&self) -> Result { + Ok(string::from_utf8_lossy(&self.token_prefix).into()) + } + + /// @dev Total number of tokens in existence + fn total_supply(&self) -> Result { + self.consume_store_reads(1)?; + Ok(>::get((self.id, self.1)).into()) + } + + /// @dev Not supported + fn decimals(&self) -> Result { + // Decimals aren't supported for refungible tokens + Ok(0) + } + + /// @dev Gets the balance of the specified address. + /// @param owner The address to query the balance of. + /// @return An uint256 representing the amount owned by the passed address. + fn balance_of(&self, owner: address) -> Result { + self.consume_store_reads(1)?; + let owner = T::CrossAccountId::from_eth(owner); + let balance = >::get((self.id, self.1, owner)); + Ok(balance.into()) + } + + /// @dev Transfer token for a specified address + /// @param to The address to transfer to. + /// @param amount The amount to be transferred. + #[weight(>::transfer())] + fn transfer(&mut self, caller: caller, to: address, amount: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let to = T::CrossAccountId::from_eth(to); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + >::transfer(self, &caller, &to, self.1, amount, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @dev Transfer tokens from one address to another + /// @param from address The address which you want to send tokens from + /// @param to address The address which you want to transfer to + /// @param amount uint256 the amount of tokens to be transferred + #[weight(>::transfer_from())] + fn transfer_from( + &mut self, + caller: caller, + from: address, + to: address, + amount: uint256, + ) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let from = T::CrossAccountId::from_eth(from); + let to = T::CrossAccountId::from_eth(to); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + >::transfer_from(self, &caller, &from, &to, self.1, amount, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @dev Approve the passed address to spend the specified amount of tokens on behalf of `msg.sender`. + /// Beware that changing an allowance with this method brings the risk that someone may use both the old + /// and the new allowance by unfortunate transaction ordering. One possible solution to mitigate this + /// race condition is to first reduce the spender's allowance to 0 and set the desired value afterwards: + /// https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + /// @param spender The address which will spend the funds. + /// @param amount The amount of tokens to be spent. + #[weight(>::approve())] + fn approve(&mut self, caller: caller, spender: address, amount: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let spender = T::CrossAccountId::from_eth(spender); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + + >::set_allowance(self, &caller, &spender, self.1, amount) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @dev Function to check the amount of tokens that an owner allowed to a spender. + /// @param owner address The address which owns the funds. + /// @param spender address The address which will spend the funds. + /// @return A uint256 specifying the amount of tokens still available for the spender. + fn allowance(&self, owner: address, spender: address) -> Result { + self.consume_store_reads(1)?; + let owner = T::CrossAccountId::from_eth(owner); + let spender = T::CrossAccountId::from_eth(spender); + + Ok(>::get((self.id, self.1, owner, spender)).into()) + } +} + +#[solidity_interface(name = ERC20UniqueExtensions)] +impl RefungibleTokenHandle { + /// @dev Function that burns an amount of the token of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. + #[weight(>::burn_from())] + fn burn_from(&mut self, caller: caller, from: address, amount: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let from = T::CrossAccountId::from_eth(from); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + let budget = self + .recorder + .weight_calls_budget(>::find_parent()); + + >::burn_from(self, &caller, &from, self.1, amount, &budget) + .map_err(dispatch_to_evm::)?; + Ok(true) + } + + /// @dev Function that changes total amount of the tokens. + /// Throws if `msg.sender` doesn't owns all of the tokens. + /// @param amount New total amount of the tokens. + #[weight(>::repartition_item())] + fn repartition(&mut self, caller: caller, amount: uint256) -> Result { + let caller = T::CrossAccountId::from_eth(caller); + let amount = amount.try_into().map_err(|_| "amount overflow")?; + + >::repartition(self, &caller, self.1, amount).map_err(dispatch_to_evm::)?; + Ok(true) + } +} + +impl RefungibleTokenHandle { + pub fn into_inner(self) -> RefungibleHandle { + self.0 + } + pub fn common_mut(&mut self) -> &mut RefungibleHandle { + &mut self.0 + } +} + +impl WithRecorder for RefungibleTokenHandle { + fn recorder(&self) -> &pallet_evm_coder_substrate::SubstrateRecorder { + self.0.recorder() + } + fn into_recorder(self) -> pallet_evm_coder_substrate::SubstrateRecorder { + self.0.into_recorder() + } +} + +impl Deref for RefungibleTokenHandle { + type Target = RefungibleHandle; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[solidity_interface( + name = UniqueRefungibleToken, + is(ERC20, ERC20UniqueExtensions, ERC1633) +)] +impl RefungibleTokenHandle where T::AccountId: From<[u8; 32]> {} + +generate_stubgen!(gen_impl, UniqueRefungibleTokenCall<()>, true); +generate_stubgen!(gen_iface, UniqueRefungibleTokenCall<()>, false); + +impl CommonEvmHandler for RefungibleTokenHandle +where + T::AccountId: From<[u8; 32]>, +{ + const CODE: &'static [u8] = include_bytes!("./stubs/UniqueRefungibleToken.raw"); + + fn call(self, handle: &mut impl PrecompileHandle) -> Option { + call::, _, _>(handle, self) + } +} diff --git a/pallets/refungible/src/lib.rs b/pallets/refungible/src/lib.rs index 3265f04e0a..1dc524a306 100644 --- a/pallets/refungible/src/lib.rs +++ b/pallets/refungible/src/lib.rs @@ -14,32 +14,130 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Refungible Pallet +//! +//! The Refungible pallet provides functionality for handling refungible collections and tokens. +//! +//! - [`Config`] +//! - [`RefungibleHandle`] +//! - [`Pallet`] +//! - [`CommonWeights`](common::CommonWeights) +//! +//! ## Overview +//! +//! The Refungible pallet provides functions for: +//! +//! - RFT collection creation and removal +//! - Minting and burning of RFT tokens +//! - Partition and repartition of RFT tokens +//! - Retrieving number of pieces of RFT token +//! - Retrieving account balances +//! - Transfering RFT token pieces +//! - Burning RFT token pieces +//! - Setting and checking allowance for RFT tokens +//! +//! ### Terminology +//! +//! - **RFT token:** Non fungible token that was partitioned to pieces. If an account owns all +//! of the RFT token pieces than it owns the RFT token and can repartition it. +//! +//! - **RFT Collection:** A collection of RFT tokens. All RFT tokens are part of a collection. +//! Each collection has its own settings and set of permissions. +//! +//! - **RFT token piece:** A fungible part of an RFT token. +//! +//! - **Balance:** RFT token pieces owned by an account +//! +//! - **Allowance:** Maximum number of RFT token pieces that one account is allowed to +//! transfer from the balance of another account +//! +//! - **Burning:** The process of “deleting” a token from a collection or removing token pieces from +//! an account balance. +//! +//! ### Implementations +//! +//! The Refungible pallet provides implementations for the following traits. If these traits provide +//! the functionality that you need, then you can avoid coupling with the Refungible pallet. +//! +//! - [`CommonWeightInfo`](pallet_common::CommonWeightInfo): Functions for retrieval of transaction weight +//! - [`CommonCollectionOperations`](pallet_common::CommonCollectionOperations): Functions for dealing +//! with collections +//! - [`RefungibleExtensions`](pallet_common::RefungibleExtensions): Functions specific for refungible +//! collection +//! +//! ## Interface +//! +//! ### Dispatchable Functions +//! +//! - `init_collection` - Create RFT collection. RFT collection can be configured to allow or deny access for +//! some accounts. +//! - `destroy_collection` - Destroy exising RFT collection. There should be no tokens in the collection. +//! - `burn` - Burn some amount of RFT token pieces owned by account. Burns the RFT token if no pieces left. +//! - `transfer` - Transfer some amount of RFT token pieces. Transfers should be enabled for RFT collection. +//! Nests the RFT token if RFT token pieces are sent to another token. +//! - `create_item` - Mint RFT token in collection. Sender should have permission to mint tokens. +//! - `set_allowance` - Set allowance for another account to transfer balance from sender's account. +//! - `repartition` - Repartition token to selected number of pieces. Sender should own all existing pieces. +//! +//! ## Assumptions +//! +//! * Total number of pieces for one token shouldn't exceed `up_data_structs::MAX_REFUNGIBLE_PIECES`. +//! * Total number of tokens of all types shouldn't be greater than `up_data_structs::MAX_TOKEN_PREFIX_LENGTH`. +//! * Sender should be in collection's allow list to perform operations on tokens. + #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::{ensure, BoundedVec}; -use up_data_structs::{ - AccessMode, CollectionId, CustomDataLimit, MAX_REFUNGIBLE_PIECES, TokenId, - CreateCollectionData, CreateRefungibleExData, mapping::TokenAddressMapping, budget::Budget, +use crate::erc_token::ERC20Events; +use crate::erc::ERC721Events; + +use codec::{Encode, Decode, MaxEncodedLen}; +use core::ops::Deref; +use derivative::Derivative; +use evm_coder::ToLog; +use frame_support::{ + BoundedBTreeMap, BoundedVec, ensure, fail, storage::with_transaction, transactional, + pallet_prelude::ConstU32, +}; +use pallet_evm::{account::CrossAccountId, Pallet as PalletEvm}; +use pallet_evm_coder_substrate::WithRecorder; +use pallet_common::{ + CommonCollectionOperations, Error as CommonError, eth::collection_id_to_address, + Event as CommonEvent, Pallet as PalletCommon, }; -use pallet_evm::account::CrossAccountId; -use pallet_common::{Error as CommonError, Event as CommonEvent, Pallet as PalletCommon}; use pallet_structure::Pallet as PalletStructure; -use sp_runtime::{ArithmeticError, DispatchError, DispatchResult}; -use sp_std::{vec::Vec, vec, collections::btree_map::BTreeMap}; -use core::ops::Deref; -use codec::{Encode, Decode, MaxEncodedLen}; use scale_info::TypeInfo; +use sp_core::H160; +use sp_runtime::{ArithmeticError, DispatchError, DispatchResult, TransactionOutcome}; +use sp_std::{vec::Vec, vec, collections::btree_map::BTreeMap}; +use up_data_structs::{ + AccessMode, budget::Budget, CollectionId, CollectionFlags, CollectionPropertiesVec, + CreateCollectionData, CustomDataLimit, mapping::TokenAddressMapping, MAX_ITEMS_PER_BATCH, + MAX_REFUNGIBLE_PIECES, Property, PropertyKey, PropertyKeyPermission, PropertyPermission, + PropertyScope, PropertyValue, TokenId, TrySetProperty, +}; pub use pallet::*; #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking; pub mod common; pub mod erc; +pub mod erc_token; pub mod weights; + +#[derive(Derivative, Clone)] +pub struct CreateItemData { + #[derivative(Debug(format_with = "bounded::map_debug"))] + pub users: BoundedBTreeMap>, + #[derivative(Debug(format_with = "bounded::vec_debug"))] + pub properties: CollectionPropertiesVec, +} pub(crate) type SelfWeightOf = ::WeightInfo; +/// Token data, stored independently from other data used to describe it +/// for the convenience of database access. Notably contains the token metadata. #[struct_versioning::versioned(version = 2, upper)] #[derive(Encode, Decode, Default, TypeInfo, MaxEncodedLen)] +#[deprecated(since = "0.2.0", note = "ItemData is no more contains usefull data")] pub struct ItemData { pub const_data: BoundedVec, @@ -62,13 +160,13 @@ pub mod pallet { pub enum Error { /// Not Refungible item data used to mint in Refungible collection. NotRefungibleDataUsedToMintFungibleCollectionToken, - /// Maximum refungibility exceeded + /// Maximum refungibility exceeded. WrongRefungiblePieces, - /// Refungible token can't be repartitioned by user who isn't owns all pieces + /// Refungible token can't be repartitioned by user who isn't owns all pieces. RepartitionWhileNotOwningAllPieces, - /// Refungible token can't nest other tokens + /// Refungible token can't nest other tokens. RefungibleDisallowsNesting, - /// Setting item properties is not allowed + /// Setting item properties is not allowed. SettingPropertiesNotAllowed, } @@ -79,27 +177,44 @@ pub mod pallet { type WeightInfo: WeightInfo; } - const STORAGE_VERSION: StorageVersion = StorageVersion::new(1); + const STORAGE_VERSION: StorageVersion = StorageVersion::new(2); #[pallet::pallet] #[pallet::storage_version(STORAGE_VERSION)] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); + /// Total amount of minted tokens in a collection. #[pallet::storage] pub type TokensMinted = StorageMap; + + /// Amount of tokens burnt in a collection. #[pallet::storage] pub type TokensBurnt = StorageMap; + /// Token data, used to partially describe a token. + // TODO: remove #[pallet::storage] + #[deprecated(since = "0.2.0", note = "ItemData is no more contains usefull data")] pub type TokenData = StorageNMap< Key = (Key, Key), Value = ItemData, QueryKind = ValueQuery, >; + /// Amount of pieces a refungible token is split into. + #[pallet::storage] + #[pallet::getter(fn token_properties)] + pub type TokenProperties = StorageNMap< + Key = (Key, Key), + Value = up_data_structs::Properties, + QueryKind = ValueQuery, + OnEmpty = up_data_structs::TokenProperties, + >; + + /// Total amount of pieces for token #[pallet::storage] pub type TotalSupply = StorageNMap< Key = (Key, Key), @@ -107,7 +222,7 @@ pub mod pallet { QueryKind = ValueQuery, >; - /// Used to enumerate tokens owned by account + /// Used to enumerate tokens owned by account. #[pallet::storage] pub type Owned = StorageNMap< Key = ( @@ -119,6 +234,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Amount of tokens (not pieces) partially owned by an account within a collection. #[pallet::storage] pub type AccountBalance = StorageNMap< Key = ( @@ -130,6 +246,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Amount of token pieces owned by account. #[pallet::storage] pub type Balance = StorageNMap< Key = ( @@ -142,6 +259,7 @@ pub mod pallet { QueryKind = ValueQuery, >; + /// Allowance set by a token owner for another user to perform one of certain transactions on a number of pieces of a token. #[pallet::storage] pub type Allowance = StorageNMap< Key = ( @@ -159,9 +277,13 @@ pub mod pallet { #[pallet::hooks] impl Hooks> for Pallet { fn on_runtime_upgrade() -> Weight { - StorageVersion::new(1).put::>(); + let storage_version = StorageVersion::get::>(); + if storage_version < StorageVersion::new(2) { + >::remove_all(None); + } + StorageVersion::new(2).put::>(); - 0 + Weight::zero() } } } @@ -174,7 +296,11 @@ impl RefungibleHandle { pub fn into_inner(self) -> pallet_common::CollectionHandle { self.0 } + pub fn common_mut(&mut self) -> &mut pallet_common::CollectionHandle { + &mut self.0 + } } + impl Deref for RefungibleHandle { type Target = pallet_common::CollectionHandle; @@ -183,23 +309,77 @@ impl Deref for RefungibleHandle { } } +impl WithRecorder for RefungibleHandle { + fn recorder(&self) -> &pallet_evm_coder_substrate::SubstrateRecorder { + self.0.recorder() + } + fn into_recorder(self) -> pallet_evm_coder_substrate::SubstrateRecorder { + self.0.into_recorder() + } +} + impl Pallet { + /// Get number of RFT tokens in collection pub fn total_supply(collection: &RefungibleHandle) -> u32 { >::get(collection.id) - >::get(collection.id) } + + /// Check that RFT token exists + /// + /// - `token`: Token ID. pub fn token_exists(collection: &RefungibleHandle, token: TokenId) -> bool { >::contains_key((collection.id, token)) } + + pub fn set_scoped_token_property( + collection_id: CollectionId, + token_id: TokenId, + scope: PropertyScope, + property: Property, + ) -> DispatchResult { + TokenProperties::::try_mutate((collection_id, token_id), |properties| { + properties.try_scoped_set(scope, property.key, property.value) + }) + .map_err(>::from)?; + + Ok(()) + } + + pub fn set_scoped_token_properties( + collection_id: CollectionId, + token_id: TokenId, + scope: PropertyScope, + properties: impl Iterator, + ) -> DispatchResult { + TokenProperties::::try_mutate((collection_id, token_id), |stored_properties| { + stored_properties.try_scoped_set_from_iter(scope, properties) + }) + .map_err(>::from)?; + + Ok(()) + } } // unchecked calls skips any permission checks impl Pallet { + /// Create RFT collection + /// + /// `init_collection` will take non-refundable deposit for collection creation. + /// + /// - `data`: Contains settings for collection limits and permissions. pub fn init_collection( owner: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, + flags: CollectionFlags, ) -> Result { - >::init_collection(owner, data, false) + >::init_collection(owner, payer, data, flags) } + + /// Destroy RFT collection + /// + /// `destroy_collection` will throw error if collection contains any tokens. + /// Only owner can destroy collection. pub fn destroy_collection( collection: RefungibleHandle, sender: &T::CrossAccountId, @@ -216,35 +396,56 @@ impl Pallet { >::remove(id); >::remove(id); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); - >::remove_prefix((id,), None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); + let _ = >::clear_prefix((id,), u32::MAX, None); Ok(()) } fn collection_has_tokens(collection_id: CollectionId) -> bool { - >::iter_prefix((collection_id,)) + >::iter_prefix((collection_id,)) .next() .is_some() } - pub fn burn_token(collection: &RefungibleHandle, token_id: TokenId) -> DispatchResult { + pub fn burn_token_unchecked( + collection: &RefungibleHandle, + owner: &T::CrossAccountId, + token_id: TokenId, + ) -> DispatchResult { let burnt = >::get(collection.id) .checked_add(1) .ok_or(ArithmeticError::Overflow)?; >::insert(collection.id, burnt); - >::remove((collection.id, token_id)); + >::remove((collection.id, token_id)); >::remove((collection.id, token_id)); - >::remove_prefix((collection.id, token_id), None); - >::remove_prefix((collection.id, token_id), None); - // TODO: ERC721 transfer event + let _ = >::clear_prefix((collection.id, token_id), u32::MAX, None); + let _ = >::clear_prefix((collection.id, token_id), u32::MAX, None); + >::deposit_log( + ERC721Events::Transfer { + from: *owner.as_eth(), + to: H160::default(), + token_id: token_id.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); Ok(()) } + /// Burn RFT token pieces + /// + /// `burn` will decrease total amount of token pieces and amount owned by sender. + /// `burn` can be called even if there are multiple owners of the RFT token. + /// If sender wouldn't have any pieces left after `burn` than she will stop being + /// one of the owners of the token. If there is no account that owns any pieces of + /// the token than token will be burned too. + /// + /// - `amount`: Amount of token pieces to burn. + /// - `token`: Token who's pieces should be burned + /// - `collection`: Collection that contains the token pub fn burn( collection: &RefungibleHandle, owner: &T::CrossAccountId, @@ -272,7 +473,15 @@ impl Pallet { >::remove((collection.id, owner, token)); >::unnest_if_nested(owner, collection.id, token); >::insert((collection.id, owner), account_balance); - Self::burn_token(collection, token)?; + Self::burn_token_unchecked(collection, owner, token)?; + >::deposit_log( + ERC20Events::Transfer { + from: *owner.as_eth(), + to: H160::default(), + value: amount.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); >::deposit_event(CommonEvent::ItemDestroyed( collection.id, token, @@ -301,11 +510,33 @@ impl Pallet { >::unnest_if_nested(owner, collection.id, token); >::remove((collection.id, token, owner)); >::insert((collection.id, owner), account_balance); + + if let Some(user) = Self::token_owner(collection.id, token) { + >::deposit_log( + ERC721Events::Transfer { + from: erc::ADDRESS_FOR_PARTIALLY_OWNED_TOKENS, + to: *user.as_eth(), + token_id: token.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + } } else { >::insert((collection.id, token, owner), balance); } >::insert((collection.id, token), total_supply); - // TODO: ERC20 transfer event + + >::deposit_log( + ERC20Events::Transfer { + from: *owner.as_eth(), + to: H160::default(), + value: amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + token, + )), + ); >::deposit_event(CommonEvent::ItemDestroyed( collection.id, token, @@ -315,6 +546,179 @@ impl Pallet { Ok(()) } + #[transactional] + fn modify_token_properties( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + token_id: TokenId, + properties: impl Iterator)>, + is_token_create: bool, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + let is_collection_admin = || collection.is_owner_or_admin(sender); + let is_token_owner = || -> Result { + let balance = collection.balance(sender.clone(), token_id); + let total_pieces: u128 = + Self::total_pieces(collection.id, token_id).unwrap_or(u128::MAX); + if balance != total_pieces { + return Ok(false); + } + + let is_bundle_owner = >::check_indirectly_owned( + sender.clone(), + collection.id, + token_id, + None, + nesting_budget, + )?; + + Ok(is_bundle_owner) + }; + + for (key, value) in properties { + let permission = >::property_permissions(collection.id) + .get(&key) + .cloned() + .unwrap_or_else(PropertyPermission::none); + + let is_property_exists = TokenProperties::::get((collection.id, token_id)) + .get(&key) + .is_some(); + + match permission { + PropertyPermission { mutable: false, .. } if is_property_exists => { + return Err(>::NoPermission.into()); + } + + PropertyPermission { + collection_admin, + token_owner, + .. + } => { + //TODO: investigate threats during public minting. + let is_token_create = + is_token_create && (collection_admin || token_owner) && value.is_some(); + if !(is_token_create + || (collection_admin && is_collection_admin()) + || (token_owner && is_token_owner()?)) + { + fail!(>::NoPermission); + } + } + } + + match value { + Some(value) => { + >::try_mutate((collection.id, token_id), |properties| { + properties.try_set(key.clone(), value) + }) + .map_err(>::from)?; + + >::deposit_event(CommonEvent::TokenPropertySet( + collection.id, + token_id, + key, + )); + } + None => { + >::try_mutate((collection.id, token_id), |properties| { + properties.remove(&key) + }) + .map_err(>::from)?; + + >::deposit_event(CommonEvent::TokenPropertyDeleted( + collection.id, + token_id, + key, + )); + } + } + } + + Ok(()) + } + + pub fn set_token_properties( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + token_id: TokenId, + properties: impl Iterator, + is_token_create: bool, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + Self::modify_token_properties( + collection, + sender, + token_id, + properties.map(|p| (p.key, Some(p.value))), + is_token_create, + nesting_budget, + ) + } + + pub fn set_token_property( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + token_id: TokenId, + property: Property, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + let is_token_create = false; + + Self::set_token_properties( + collection, + sender, + token_id, + [property].into_iter(), + is_token_create, + nesting_budget, + ) + } + + pub fn delete_token_properties( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + token_id: TokenId, + property_keys: impl Iterator, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + let is_token_create = false; + + Self::modify_token_properties( + collection, + sender, + token_id, + property_keys.into_iter().map(|key| (key, None)), + is_token_create, + nesting_budget, + ) + } + + pub fn delete_token_property( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + token_id: TokenId, + property_key: PropertyKey, + nesting_budget: &dyn Budget, + ) -> DispatchResult { + Self::delete_token_properties( + collection, + sender, + token_id, + [property_key].into_iter(), + nesting_budget, + ) + } + + /// Transfer RFT token pieces from one account to another. + /// + /// If the sender is no longer owns any pieces after the `transfer` than she stops being an owner of the token. + /// + /// - `from`: Owner of token pieces to transfer. + /// - `to`: Recepient of transfered token pieces. + /// - `amount`: Amount of token pieces to transfer. + /// - `token`: Token whos pieces should be transfered + /// - `collection`: Collection that contains the token pub fn transfer( collection: &RefungibleHandle, from: &T::CrossAccountId, @@ -334,12 +738,13 @@ impl Pallet { } >::ensure_correct_receiver(to)?; - let balance_from = >::get((collection.id, token, from)) + let initial_balance_from = >::get((collection.id, token, from)); + let updated_balance_from = initial_balance_from .checked_sub(amount) .ok_or(>::TokenValueTooLow)?; let mut create_target = false; let from_to_differ = from != to; - let balance_to = if from != to { + let updated_balance_to = if from != to { let old_balance = >::get((collection.id, token, to)); if old_balance == 0 { create_target = true; @@ -353,7 +758,7 @@ impl Pallet { None }; - let account_balance_from = if balance_from == 0 { + let account_balance_from = if updated_balance_from == 0 { Some( >::get((collection.id, from)) .checked_sub(1) @@ -389,15 +794,15 @@ impl Pallet { nesting_budget, )?; - if let Some(balance_to) = balance_to { + if let Some(updated_balance_to) = updated_balance_to { // from != to - if balance_from == 0 { + if updated_balance_from == 0 { >::remove((collection.id, token, from)); >::unnest_if_nested(from, collection.id, token); } else { - >::insert((collection.id, token, from), balance_from); + >::insert((collection.id, token, from), updated_balance_from); } - >::insert((collection.id, token, to), balance_to); + >::insert((collection.id, token, to), updated_balance_to); if let Some(account_balance_from) = account_balance_from { >::insert((collection.id, from), account_balance_from); >::remove((collection.id, from, token)); @@ -408,7 +813,18 @@ impl Pallet { } } - // TODO: ERC20 transfer event + >::deposit_log( + ERC20Events::Transfer { + from: *from.as_eth(), + to: *to.as_eth(), + value: amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + token, + )), + ); + >::deposit_event(CommonEvent::Transfer( collection.id, token, @@ -416,13 +832,58 @@ impl Pallet { to.clone(), amount, )); + + let total_supply = >::get((collection.id, token)); + + if amount == total_supply { + // if token was fully owned by `from` and will be fully owned by `to` after transfer + >::deposit_log( + ERC721Events::Transfer { + from: *from.as_eth(), + to: *to.as_eth(), + token_id: token.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + } else if let Some(updated_balance_to) = updated_balance_to { + // if `from` not equals `to`. This condition is needed to avoid sending event + // when `from` fully owns token and sends part of token pieces to itself. + if initial_balance_from == total_supply { + // if token was fully owned by `from` and will be only partially owned by `to` + // and `from` after transfer + >::deposit_log( + ERC721Events::Transfer { + from: *from.as_eth(), + to: erc::ADDRESS_FOR_PARTIALLY_OWNED_TOKENS, + token_id: token.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + } else if updated_balance_to == total_supply { + // if token was partially owned by `from` and will be fully owned by `to` after transfer + >::deposit_log( + ERC721Events::Transfer { + from: erc::ADDRESS_FOR_PARTIALLY_OWNED_TOKENS, + to: *to.as_eth(), + token_id: token.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + } + } + Ok(()) } + /// Batched operation to create multiple RFT tokens. + /// + /// Same as `create_item` but creates multiple tokens. + /// + /// - `data`: Same as 'data` in `create_item` but contains data for multiple tokens. pub fn create_multiple_items( collection: &RefungibleHandle, sender: &T::CrossAccountId, - data: Vec>, + data: Vec>, nesting_budget: &dyn Budget, ) -> DispatchResult { if !collection.is_owner_or_admin(sender) { @@ -503,34 +964,87 @@ impl Pallet { // ========= + with_transaction(|| { + for (i, data) in data.iter().enumerate() { + let token_id = first_token_id + i as u32 + 1; + >::insert((collection.id, token_id), totals[i]); + + for (user, amount) in data.users.iter() { + if *amount == 0 { + continue; + } + >::insert((collection.id, token_id, &user), amount); + >::insert((collection.id, &user, TokenId(token_id)), true); + >::nest_if_sent_to_token_unchecked( + user, + collection.id, + TokenId(token_id), + ); + } + + if let Err(e) = Self::set_token_properties( + collection, + sender, + TokenId(token_id), + data.properties.clone().into_iter(), + true, + nesting_budget, + ) { + return TransactionOutcome::Rollback(Err(e)); + } + } + TransactionOutcome::Commit(Ok(())) + })?; + >::insert(collection.id, tokens_minted); + for (account, balance) in balances { >::insert((collection.id, account), balance); } + for (i, token) in data.into_iter().enumerate() { let token_id = first_token_id + i as u32 + 1; - >::insert((collection.id, token_id), totals[i]); - - >::insert( - (collection.id, token_id), - ItemData { - const_data: token.const_data, - }, - ); - for (user, amount) in token.users.into_iter() { - if amount == 0 { - continue; - } - >::insert((collection.id, token_id, &user), amount); - >::insert((collection.id, &user, TokenId(token_id)), true); - >::nest_if_sent_to_token_unchecked( - &user, - collection.id, - TokenId(token_id), + let receivers = token + .users + .into_iter() + .filter(|(_, amount)| *amount > 0) + .collect::>(); + + if let [(user, _)] = receivers.as_slice() { + // if there is exactly one receiver + >::deposit_log( + ERC721Events::Transfer { + from: H160::default(), + to: *user.as_eth(), + token_id: token_id.into(), + } + .to_log(collection_id_to_address(collection.id)), + ); + } else if let [_, ..] = receivers.as_slice() { + // if there is more than one receiver + >::deposit_log( + ERC721Events::Transfer { + from: H160::default(), + to: erc::ADDRESS_FOR_PARTIALLY_OWNED_TOKENS, + token_id: token_id.into(), + } + .to_log(collection_id_to_address(collection.id)), ); + } - // TODO: ERC20 transfer event + for (user, amount) in receivers.into_iter() { + >::deposit_log( + ERC20Events::Transfer { + from: H160::default(), + to: *user.as_eth(), + value: amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + TokenId(token_id), + )), + ); >::deposit_event(CommonEvent::ItemCreated( collection.id, TokenId(token_id), @@ -554,7 +1068,18 @@ impl Pallet { } else { >::insert((collection.id, token, sender, spender), amount); } - // TODO: ERC20 approval event + + >::deposit_log( + ERC20Events::Approval { + owner: *sender.as_eth(), + spender: *spender.as_eth(), + value: amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + token, + )), + ); >::deposit_event(CommonEvent::Approved( collection.id, token, @@ -564,6 +1089,9 @@ impl Pallet { )) } + /// Set allowance for the spender to `transfer` or `burn` sender's token pieces. + /// + /// - `amount`: Amount of token pieces the spender is allowed to `transfer` or `burn. pub fn set_allowance( collection: &RefungibleHandle, sender: &T::CrossAccountId, @@ -632,6 +1160,12 @@ impl Pallet { Ok(allowance) } + /// Transfer RFT token pieces from one account to another. + /// + /// Same as the [`transfer`] but spender doesn't needs to be an owner of the token pieces. + /// The owner should set allowance for the spender to transfer pieces. + /// + /// [`transfer`]: struct.Pallet.html#method.transfer pub fn transfer_from( collection: &RefungibleHandle, spender: &T::CrossAccountId, @@ -653,6 +1187,12 @@ impl Pallet { Ok(()) } + /// Burn RFT token pieces from the account. + /// + /// Same as the [`burn`] but spender doesn't need to be an owner of the token pieces. The owner should + /// set allowance for the spender to burn pieces + /// + /// [`burn`]: struct.Pallet.html#method.burn pub fn burn_from( collection: &RefungibleHandle, spender: &T::CrossAccountId, @@ -673,16 +1213,29 @@ impl Pallet { Ok(()) } - /// Delegated to `create_multiple_items` + /// Create RFT token. + /// + /// The sender should be the owner/admin of the collection or collection should be configured + /// to allow public minting. + /// + /// - `data`: Contains list of users who will become the owners of the token pieces and amount + /// of token pieces they will receive. pub fn create_item( collection: &RefungibleHandle, sender: &T::CrossAccountId, - data: CreateRefungibleExData, + data: CreateItemData, nesting_budget: &dyn Budget, ) -> DispatchResult { Self::create_multiple_items(collection, sender, vec![data], nesting_budget) } + /// Repartition RFT token. + /// + /// `repartition` will set token balance of the sender and total amount of token pieces. + /// Sender should own all of the token pieces. `repartition' could be done even if some + /// token pieces were burned before. + /// + /// - `amount`: Total amount of token pieces that the token will have after `repartition`. pub fn repartition( collection: &RefungibleHandle, owner: &T::CrossAccountId, @@ -695,15 +1248,133 @@ impl Pallet { ); ensure!(amount > 0, >::TokenValueTooLow); // Ensure user owns all pieces - let total_supply = >::get((collection.id, token)); + let total_pieces = Self::total_pieces(collection.id, token).unwrap_or(u128::MAX); let balance = >::get((collection.id, token, owner)); ensure!( - total_supply == balance, + total_pieces == balance, >::RepartitionWhileNotOwningAllPieces ); >::insert((collection.id, token, owner), amount); >::insert((collection.id, token), amount); + + if amount > total_pieces { + let mint_amount = amount - total_pieces; + >::deposit_log( + ERC20Events::Transfer { + from: H160::default(), + to: *owner.as_eth(), + value: mint_amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + token, + )), + ); + >::deposit_event(CommonEvent::ItemCreated( + collection.id, + token, + owner.clone(), + mint_amount, + )); + } else if total_pieces > amount { + let burn_amount = total_pieces - amount; + >::deposit_log( + ERC20Events::Transfer { + from: *owner.as_eth(), + to: H160::default(), + value: burn_amount.into(), + } + .to_log(T::EvmTokenAddressMapping::token_to_address( + collection.id, + token, + )), + ); + >::deposit_event(CommonEvent::ItemDestroyed( + collection.id, + token, + owner.clone(), + burn_amount, + )); + } + Ok(()) } + + fn token_owner(collection_id: CollectionId, token_id: TokenId) -> Option { + let mut owner = None; + let mut count = 0; + for key in Balance::::iter_key_prefix((collection_id, token_id)) { + count += 1; + if count > 1 { + return None; + } + owner = Some(key); + } + owner + } + + fn total_pieces(collection_id: CollectionId, token_id: TokenId) -> Option { + >::try_get((collection_id, token_id)).ok() + } + + pub fn set_collection_properties( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + properties: Vec, + ) -> DispatchResult { + >::set_collection_properties(collection, sender, properties) + } + + pub fn delete_collection_properties( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + property_keys: Vec, + ) -> DispatchResult { + >::delete_collection_properties(collection, sender, property_keys) + } + + pub fn set_token_property_permissions( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + property_permissions: Vec, + ) -> DispatchResult { + >::set_token_property_permissions(collection, sender, property_permissions) + } + + pub fn set_scoped_token_property_permissions( + collection: &RefungibleHandle, + sender: &T::CrossAccountId, + scope: PropertyScope, + property_permissions: Vec, + ) -> DispatchResult { + >::set_scoped_token_property_permissions( + collection, + sender, + scope, + property_permissions, + ) + } + + /// Returns 10 token in no particular order. + /// + /// There is no direct way to get token holders in ascending order, + /// since `iter_prefix` returns values in no particular order. + /// Therefore, getting the 10 largest holders with a large value of holders + /// can lead to impact memory allocation + sorting with `n * log (n)`. + pub fn token_owners( + collection_id: CollectionId, + token: TokenId, + ) -> Option> { + let res: Vec = >::iter_prefix((collection_id, token)) + .map(|(owner, _amount)| owner) + .take(10) + .collect(); + + if res.is_empty() { + None + } else { + Some(res) + } + } } diff --git a/pallets/refungible/src/stubs/UniqueRefungible.raw b/pallets/refungible/src/stubs/UniqueRefungible.raw index 1333ed77b7..c0925d1e31 100644 Binary files a/pallets/refungible/src/stubs/UniqueRefungible.raw and b/pallets/refungible/src/stubs/UniqueRefungible.raw differ diff --git a/pallets/refungible/src/stubs/UniqueRefungible.sol b/pallets/refungible/src/stubs/UniqueRefungible.sol new file mode 100644 index 0000000000..7471a4c9f5 --- /dev/null +++ b/pallets/refungible/src/stubs/UniqueRefungible.sol @@ -0,0 +1,828 @@ +// SPDX-License-Identifier: OTHER +// This code is automatically generated + +pragma solidity >=0.8.0 <0.9.0; + +/// @dev common stubs holder +contract Dummy { + uint8 dummy; + string stub_error = "this contract is implemented in native"; +} + +contract ERC165 is Dummy { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { + require(false, stub_error); + interfaceID; + return true; + } +} + +/// @title A contract that allows to set and delete token properties and change token property permissions. +/// @dev the ERC-165 identifier for this interface is 0x41369377 +contract TokenProperties is Dummy, ERC165 { + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. + /// @dev EVM selector for this function is: 0x222d97fa, + /// or in textual repr: setTokenPropertyPermission(string,bool,bool,bool) + function setTokenPropertyPermission( + string memory key, + bool isMutable, + bool collectionAdmin, + bool tokenOwner + ) public { + require(false, stub_error); + key; + isMutable; + collectionAdmin; + tokenOwner; + dummy = 0; + } + + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. + /// @dev EVM selector for this function is: 0x1752d67b, + /// or in textual repr: setProperty(uint256,string,bytes) + function setProperty( + uint256 tokenId, + string memory key, + bytes memory value + ) public { + require(false, stub_error); + tokenId; + key; + value; + dummy = 0; + } + + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @dev EVM selector for this function is: 0x066111d1, + /// or in textual repr: deleteProperty(uint256,string) + function deleteProperty(uint256 tokenId, string memory key) public { + require(false, stub_error); + tokenId; + key; + dummy = 0; + } + + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes + /// @dev EVM selector for this function is: 0x7228c327, + /// or in textual repr: property(uint256,string) + function property(uint256 tokenId, string memory key) public view returns (bytes memory) { + require(false, stub_error); + tokenId; + key; + dummy; + return hex""; + } +} + +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +contract Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) public { + require(false, stub_error); + key; + value; + dummy = 0; + } + + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) public { + require(false, stub_error); + key; + dummy = 0; + } + + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) public view returns (bytes memory) { + require(false, stub_error); + key; + dummy; + return hex""; + } + + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) public { + require(false, stub_error); + sponsor; + dummy = 0; + } + + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() public view returns (bool) { + require(false, stub_error); + dummy; + return false; + } + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() public { + require(false, stub_error); + dummy = 0; + } + + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() public { + require(false, stub_error); + dummy = 0; + } + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() public view returns (Tuple17 memory) { + require(false, stub_error); + dummy; + return Tuple17(0x0000000000000000000000000000000000000000, 0); + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) public { + require(false, stub_error); + limit; + value; + dummy = 0; + } + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) public { + require(false, stub_error); + limit; + value; + dummy = 0; + } + + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() public view returns (address) { + require(false, stub_error); + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) public { + require(false, stub_error); + newAdmin; + dummy = 0; + } + + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) public { + require(false, stub_error); + admin; + dummy = 0; + } + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) public { + require(false, stub_error); + enable; + dummy = 0; + } + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) public { + require(false, stub_error); + enable; + collections; + dummy = 0; + } + + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) public { + require(false, stub_error); + mode; + dummy = 0; + } + + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) public view returns (bool) { + require(false, stub_error); + user; + dummy; + return false; + } + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) public { + require(false, stub_error); + user; + dummy = 0; + } + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) public { + require(false, stub_error); + user; + dummy = 0; + } + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) public { + require(false, stub_error); + mode; + dummy = 0; + } + + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) public view returns (bool) { + require(false, stub_error); + user; + dummy; + return false; + } + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() public view returns (Tuple17 memory) { + require(false, stub_error); + dummy; + return Tuple17(0x0000000000000000000000000000000000000000, 0); + } + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) public { + require(false, stub_error); + newOwner; + dummy = 0; + } +} + +/// @dev anonymous struct +struct Tuple17 { + address field_0; + uint256 field_1; +} + +/// @dev the ERC-165 identifier for this interface is 0x5b5e139f +contract ERC721Metadata is Dummy, ERC165 { + // /// @notice A descriptive name for a collection of NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x06fdde03, + // /// or in textual repr: name() + // function name() public view returns (string memory) { + // require(false, stub_error); + // dummy; + // return ""; + // } + + // /// @notice An abbreviated name for NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x95d89b41, + // /// or in textual repr: symbol() + // function symbol() public view returns (string memory) { + // require(false, stub_error); + // dummy; + // return ""; + // } + + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata + /// @dev EVM selector for this function is: 0xc87b56dd, + /// or in textual repr: tokenURI(uint256) + function tokenURI(uint256 tokenId) public view returns (string memory) { + require(false, stub_error); + tokenId; + dummy; + return ""; + } +} + +/// @title ERC721 Token that can be irreversibly burned (destroyed). +/// @dev the ERC-165 identifier for this interface is 0x42966c68 +contract ERC721Burnable is Dummy, ERC165 { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current RFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The RFT to approve + /// @dev EVM selector for this function is: 0x42966c68, + /// or in textual repr: burn(uint256) + function burn(uint256 tokenId) public { + require(false, stub_error); + tokenId; + dummy = 0; + } +} + +/// @dev inlined interface +contract ERC721UniqueMintableEvents { + event MintingFinished(); +} + +/// @title ERC721 minting logic. +/// @dev the ERC-165 identifier for this interface is 0x476ff149 +contract ERC721UniqueMintable is Dummy, ERC165, ERC721UniqueMintableEvents { + /// @dev EVM selector for this function is: 0x05d2035b, + /// or in textual repr: mintingFinished() + function mintingFinished() public view returns (bool) { + require(false, stub_error); + dummy; + return false; + } + + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x6a627842, + /// or in textual repr: mint(address) + function mint(address to) public returns (uint256) { + require(false, stub_error); + to; + dummy = 0; + return 0; + } + + // /// @notice Function to mint token. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted RFT + // /// @dev EVM selector for this function is: 0x40c10f19, + // /// or in textual repr: mint(address,uint256) + // function mint(address to, uint256 tokenId) public returns (bool) { + // require(false, stub_error); + // to; + // tokenId; + // dummy = 0; + // return false; + // } + + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x45c17782, + /// or in textual repr: mintWithTokenURI(address,string) + function mintWithTokenURI(address to, string memory tokenUri) public returns (uint256) { + require(false, stub_error); + to; + tokenUri; + dummy = 0; + return 0; + } + + // /// @notice Function to mint token with the given tokenUri. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted RFT + // /// @param tokenUri Token URI that would be stored in the RFT properties + // /// @dev EVM selector for this function is: 0x50bb4e7f, + // /// or in textual repr: mintWithTokenURI(address,uint256,string) + // function mintWithTokenURI(address to, uint256 tokenId, string memory tokenUri) public returns (bool) { + // require(false, stub_error); + // to; + // tokenId; + // tokenUri; + // dummy = 0; + // return false; + // } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x7d64bcb4, + /// or in textual repr: finishMinting() + function finishMinting() public returns (bool) { + require(false, stub_error); + dummy = 0; + return false; + } +} + +/// @title Unique extensions for ERC721. +/// @dev the ERC-165 identifier for this interface is 0xef1eaacb +contract ERC721UniqueExtensions is Dummy, ERC165 { + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// @notice An abbreviated name for NFTs in this contract + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// @notice Transfer ownership of an RFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param to The new owner + /// @param tokenId The RFT to transfer + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 tokenId) public { + require(false, stub_error); + to; + tokenId; + dummy = 0; + } + + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the RFT + /// @param tokenId The RFT to transfer + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 tokenId) public { + require(false, stub_error); + from; + tokenId; + dummy = 0; + } + + /// @notice Returns next free RFT ID. + /// @dev EVM selector for this function is: 0x75794a3c, + /// or in textual repr: nextTokenId() + function nextTokenId() public view returns (uint256) { + require(false, stub_error); + dummy; + return 0; + } + + // /// @notice Function to mint multiple tokens. + // /// @dev `tokenIds` should be an array of consecutive numbers and first number + // /// should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokenIds IDs of the minted RFTs + // /// @dev EVM selector for this function is: 0x44a9945e, + // /// or in textual repr: mintBulk(address,uint256[]) + // function mintBulk(address to, uint256[] memory tokenIds) public returns (bool) { + // require(false, stub_error); + // to; + // tokenIds; + // dummy = 0; + // return false; + // } + + // /// @notice Function to mint multiple tokens with the given tokenUris. + // /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + // /// numbers and first number should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokens array of pairs of token ID and token URI for minted tokens + // /// @dev EVM selector for this function is: 0x36543006, + // /// or in textual repr: mintBulkWithTokenURI(address,(uint256,string)[]) + // function mintBulkWithTokenURI(address to, Tuple6[] memory tokens) public returns (bool) { + // require(false, stub_error); + // to; + // tokens; + // dummy = 0; + // return false; + // } + + /// Returns EVM address for refungible token + /// + /// @param token ID of the token + /// @dev EVM selector for this function is: 0xab76fac6, + /// or in textual repr: tokenContractAddress(uint256) + function tokenContractAddress(uint256 token) public view returns (address) { + require(false, stub_error); + token; + dummy; + return 0x0000000000000000000000000000000000000000; + } +} + +/// @dev anonymous struct +struct Tuple6 { + uint256 field_0; + string field_1; +} + +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x780e9d63 +contract ERC721Enumerable is Dummy, ERC165 { + /// @notice Enumerate valid RFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) + /// @dev EVM selector for this function is: 0x4f6ccce7, + /// or in textual repr: tokenByIndex(uint256) + function tokenByIndex(uint256 index) public view returns (uint256) { + require(false, stub_error); + index; + dummy; + return 0; + } + + /// Not implemented + /// @dev EVM selector for this function is: 0x2f745c59, + /// or in textual repr: tokenOfOwnerByIndex(address,uint256) + function tokenOfOwnerByIndex(address owner, uint256 index) public view returns (uint256) { + require(false, stub_error); + owner; + index; + dummy; + return 0; + } + + /// @notice Count RFTs tracked by this contract + /// @return A count of valid RFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() public view returns (uint256) { + require(false, stub_error); + dummy; + return 0; + } +} + +/// @dev inlined interface +contract ERC721Events { + event Transfer(address indexed from, address indexed to, uint256 indexed tokenId); + event Approval(address indexed owner, address indexed approved, uint256 indexed tokenId); + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); +} + +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +/// @dev the ERC-165 identifier for this interface is 0x58800161 +contract ERC721 is Dummy, ERC165, ERC721Events { + /// @notice Count all RFTs assigned to an owner + /// @dev RFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of RFTs owned by `owner`, possibly zero + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) public view returns (uint256) { + require(false, stub_error); + owner; + dummy; + return 0; + } + + /// @notice Find the owner of an RFT + /// @dev RFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// Returns special 0xffffffffffffffffffffffffffffffffffffffff address for + /// the tokens that are partially owned. + /// @param tokenId The identifier for an RFT + /// @return The address of the owner of the RFT + /// @dev EVM selector for this function is: 0x6352211e, + /// or in textual repr: ownerOf(uint256) + function ownerOf(uint256 tokenId) public view returns (address) { + require(false, stub_error); + tokenId; + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x60a11672, + /// or in textual repr: safeTransferFromWithData(address,address,uint256,bytes) + function safeTransferFromWithData( + address from, + address to, + uint256 tokenId, + bytes memory data + ) public { + require(false, stub_error); + from; + to; + tokenId; + data; + dummy = 0; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x42842e0e, + /// or in textual repr: safeTransferFrom(address,address,uint256) + function safeTransferFrom( + address from, + address to, + uint256 tokenId + ) public { + require(false, stub_error); + from; + to; + tokenId; + dummy = 0; + } + + /// @notice Transfer ownership of an RFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 tokenId + ) public { + require(false, stub_error); + from; + to; + tokenId; + dummy = 0; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address approved, uint256 tokenId) public { + require(false, stub_error); + approved; + tokenId; + dummy = 0; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xa22cb465, + /// or in textual repr: setApprovalForAll(address,bool) + function setApprovalForAll(address operator, bool approved) public { + require(false, stub_error); + operator; + approved; + dummy = 0; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x081812fc, + /// or in textual repr: getApproved(uint256) + function getApproved(uint256 tokenId) public view returns (address) { + require(false, stub_error); + tokenId; + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xe985e9c5, + /// or in textual repr: isApprovedForAll(address,address) + function isApprovedForAll(address owner, address operator) public view returns (address) { + require(false, stub_error); + owner; + operator; + dummy; + return 0x0000000000000000000000000000000000000000; + } +} + +contract UniqueRefungible is + Dummy, + ERC165, + ERC721, + ERC721Enumerable, + ERC721UniqueExtensions, + ERC721UniqueMintable, + ERC721Burnable, + ERC721Metadata, + Collection, + TokenProperties +{} diff --git a/pallets/refungible/src/stubs/UniqueRefungibleToken.raw b/pallets/refungible/src/stubs/UniqueRefungibleToken.raw index 1333ed77b7..a0b8de43a9 100644 Binary files a/pallets/refungible/src/stubs/UniqueRefungibleToken.raw and b/pallets/refungible/src/stubs/UniqueRefungibleToken.raw differ diff --git a/pallets/refungible/src/stubs/UniqueRefungibleToken.sol b/pallets/refungible/src/stubs/UniqueRefungibleToken.sol new file mode 100644 index 0000000000..d777f067dd --- /dev/null +++ b/pallets/refungible/src/stubs/UniqueRefungibleToken.sol @@ -0,0 +1,192 @@ +// SPDX-License-Identifier: OTHER +// This code is automatically generated + +pragma solidity >=0.8.0 <0.9.0; + +/// @dev common stubs holder +contract Dummy { + uint8 dummy; + string stub_error = "this contract is implemented in native"; +} + +contract ERC165 is Dummy { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { + require(false, stub_error); + interfaceID; + return true; + } +} + +/// @dev the ERC-165 identifier for this interface is 0x5755c3f2 +contract ERC1633 is Dummy, ERC165 { + /// @dev EVM selector for this function is: 0x80a54001, + /// or in textual repr: parentToken() + function parentToken() public view returns (address) { + require(false, stub_error); + dummy; + return 0x0000000000000000000000000000000000000000; + } + + /// @dev EVM selector for this function is: 0xd7f083f3, + /// or in textual repr: parentTokenId() + function parentTokenId() public view returns (uint256) { + require(false, stub_error); + dummy; + return 0; + } +} + +/// @dev the ERC-165 identifier for this interface is 0xab8deb37 +contract ERC20UniqueExtensions is Dummy, ERC165 { + /// @dev Function that burns an amount of the token of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 amount) public returns (bool) { + require(false, stub_error); + from; + amount; + dummy = 0; + return false; + } + + /// @dev Function that changes total amount of the tokens. + /// Throws if `msg.sender` doesn't owns all of the tokens. + /// @param amount New total amount of the tokens. + /// @dev EVM selector for this function is: 0xd2418ca7, + /// or in textual repr: repartition(uint256) + function repartition(uint256 amount) public returns (bool) { + require(false, stub_error); + amount; + dummy = 0; + return false; + } +} + +/// @dev inlined interface +contract ERC20Events { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +/// @title Standard ERC20 token +/// +/// @dev Implementation of the basic standard token. +/// https://github.com/ethereum/EIPs/blob/master/EIPS/eip-20.md +/// @dev the ERC-165 identifier for this interface is 0x942e8b22 +contract ERC20 is Dummy, ERC165, ERC20Events { + /// @return the name of the token. + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// @return the symbol of the token. + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() public view returns (string memory) { + require(false, stub_error); + dummy; + return ""; + } + + /// @dev Total number of tokens in existence + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() public view returns (uint256) { + require(false, stub_error); + dummy; + return 0; + } + + /// @dev Not supported + /// @dev EVM selector for this function is: 0x313ce567, + /// or in textual repr: decimals() + function decimals() public view returns (uint8) { + require(false, stub_error); + dummy; + return 0; + } + + /// @dev Gets the balance of the specified address. + /// @param owner The address to query the balance of. + /// @return An uint256 representing the amount owned by the passed address. + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) public view returns (uint256) { + require(false, stub_error); + owner; + dummy; + return 0; + } + + /// @dev Transfer token for a specified address + /// @param to The address to transfer to. + /// @param amount The amount to be transferred. + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 amount) public returns (bool) { + require(false, stub_error); + to; + amount; + dummy = 0; + return false; + } + + /// @dev Transfer tokens from one address to another + /// @param from address The address which you want to send tokens from + /// @param to address The address which you want to transfer to + /// @param amount uint256 the amount of tokens to be transferred + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 amount + ) public returns (bool) { + require(false, stub_error); + from; + to; + amount; + dummy = 0; + return false; + } + + /// @dev Approve the passed address to spend the specified amount of tokens on behalf of `msg.sender`. + /// Beware that changing an allowance with this method brings the risk that someone may use both the old + /// and the new allowance by unfortunate transaction ordering. One possible solution to mitigate this + /// race condition is to first reduce the spender's allowance to 0 and set the desired value afterwards: + /// https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + /// @param spender The address which will spend the funds. + /// @param amount The amount of tokens to be spent. + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address spender, uint256 amount) public returns (bool) { + require(false, stub_error); + spender; + amount; + dummy = 0; + return false; + } + + /// @dev Function to check the amount of tokens that an owner allowed to a spender. + /// @param owner address The address which owns the funds. + /// @param spender address The address which will spend the funds. + /// @return A uint256 specifying the amount of tokens still available for the spender. + /// @dev EVM selector for this function is: 0xdd62ed3e, + /// or in textual repr: allowance(address,address) + function allowance(address owner, address spender) public view returns (uint256) { + require(false, stub_error); + owner; + spender; + dummy; + return 0; + } +} + +contract UniqueRefungibleToken is Dummy, ERC165, ERC20, ERC20UniqueExtensions, ERC1633 {} diff --git a/pallets/refungible/src/weights.rs b/pallets/refungible/src/weights.rs index b04d9e04f8..4b35e1fa41 100644 --- a/pallets/refungible/src/weights.rs +++ b/pallets/refungible/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_refungible //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-27, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -49,7 +49,11 @@ pub trait WeightInfo { fn transfer_from_removing() -> Weight; fn transfer_from_creating_removing() -> Weight; fn burn_from() -> Weight; + fn set_token_property_permissions(b: u32, ) -> Weight; + fn set_token_properties(b: u32, ) -> Weight; + fn delete_token_properties(b: u32, ) -> Weight; fn repartition_item() -> Weight; + fn token_owner() -> Weight; } /// Weights for pallet_refungible using the Substrate node and recommended hardware. @@ -59,166 +63,201 @@ impl WeightInfo for SubstrateWeight { // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Balance (r:0 w:1) // Storage: Refungible TotalSupply (r:0 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) fn create_item() -> Weight { - (17_553_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(29_527_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible TotalSupply (r:0 w:4) - // Storage: Refungible TokenData (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items(b: u32, ) -> Weight { - (10_654_000 as Weight) - // Standard Error: 1_000 - .saturating_add((5_114_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) - .saturating_add(T::DbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(28_541_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(6_671_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + .saturating_add(T::DbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible AccountBalance (r:4 w:4) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible TotalSupply (r:0 w:4) - // Storage: Refungible TokenData (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items_ex_multiple_items(b: u32, ) -> Weight { - (3_587_000 as Weight) - // Standard Error: 2_000 - .saturating_add((7_931_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) - .saturating_add(T::DbWeight::get().writes((5 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(24_366_000) + // Standard Error: 5_000 + .saturating_add(Weight::from_ref_time(9_338_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + .saturating_add(T::DbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible TotalSupply (r:0 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible AccountBalance (r:4 w:4) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items_ex_multiple_owners(b: u32, ) -> Weight { - (1_980_000 as Weight) - // Standard Error: 2_000 - .saturating_add((6_305_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) - .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(27_574_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(7_193_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + .saturating_add(T::DbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Refungible TotalSupply (r:1 w:1) - // Storage: Refungible Balance (r:1 w:1) + // Storage: Refungible Balance (r:3 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Owned (r:0 w:1) fn burn_item_partial() -> Weight { - (21_010_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(42_943_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) } // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible TokensBurnt (r:1 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) + // Storage: Refungible TokenProperties (r:0 w:1) fn burn_item_fully() -> Weight { - (28_413_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(36_861_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Refungible Balance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) fn transfer_normal() -> Weight { - (17_513_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(27_789_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_creating() -> Weight { - (20_469_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(32_893_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_removing() -> Weight { - (22_472_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(34_703_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:2) fn transfer_creating_removing() -> Weight { - (24_866_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(37_547_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Refungible Balance (r:1 w:0) // Storage: Refungible Allowance (r:0 w:1) fn approve() -> Weight { - (13_475_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(20_039_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) fn transfer_from_normal() -> Weight { - (24_707_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(37_628_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(3 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_from_creating() -> Weight { - (27_812_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(42_072_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_from_removing() -> Weight { - (29_966_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(43_024_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(5 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:2) fn transfer_from_creating_removing() -> Weight { - (31_660_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(7 as Weight)) + Weight::from_ref_time(45_910_000) + .saturating_add(T::DbWeight::get().reads(6 as u64)) + .saturating_add(T::DbWeight::get().writes(7 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible TokensBurnt (r:1 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) + // Storage: Refungible TokenProperties (r:0 w:1) fn burn_from() -> Weight { - (36_248_000 as Weight) - .saturating_add(T::DbWeight::get().reads(5 as Weight)) - .saturating_add(T::DbWeight::get().writes(7 as Weight)) + Weight::from_ref_time(48_584_000) + .saturating_add(T::DbWeight::get().reads(5 as u64)) + .saturating_add(T::DbWeight::get().writes(7 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:1) + fn set_token_property_permissions(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 60_000 + .saturating_add(Weight::from_ref_time(15_533_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:0) + // Storage: Refungible TokenProperties (r:1 w:1) + fn set_token_properties(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 3_609_000 + .saturating_add(Weight::from_ref_time(590_204_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:0) + // Storage: Refungible TokenProperties (r:1 w:1) + fn delete_token_properties(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 3_637_000 + .saturating_add(Weight::from_ref_time(603_468_000).saturating_mul(b as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) fn repartition_item() -> Weight { - (8_226_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(22_356_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + } + // Storage: Refungible Balance (r:2 w:0) + fn token_owner() -> Weight { + Weight::from_ref_time(9_431_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) } } @@ -228,165 +267,200 @@ impl WeightInfo for () { // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Balance (r:0 w:1) // Storage: Refungible TotalSupply (r:0 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) fn create_item() -> Weight { - (17_553_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(29_527_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible TotalSupply (r:0 w:4) - // Storage: Refungible TokenData (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items(b: u32, ) -> Weight { - (10_654_000 as Weight) - // Standard Error: 1_000 - .saturating_add((5_114_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes((4 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(28_541_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(6_671_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + .saturating_add(RocksDbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible AccountBalance (r:4 w:4) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible TotalSupply (r:0 w:4) - // Storage: Refungible TokenData (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items_ex_multiple_items(b: u32, ) -> Weight { - (3_587_000 as Weight) - // Standard Error: 2_000 - .saturating_add((7_931_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((5 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(24_366_000) + // Standard Error: 5_000 + .saturating_add(Weight::from_ref_time(9_338_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + .saturating_add(RocksDbWeight::get().writes((4 as u64).saturating_mul(b as u64))) } // Storage: Refungible TokensMinted (r:1 w:1) // Storage: Refungible TotalSupply (r:0 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible AccountBalance (r:4 w:4) // Storage: Refungible Balance (r:0 w:4) // Storage: Refungible Owned (r:0 w:4) fn create_multiple_items_ex_multiple_owners(b: u32, ) -> Weight { - (1_980_000 as Weight) - // Standard Error: 2_000 - .saturating_add((6_305_000 as Weight).saturating_mul(b as Weight)) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(b as Weight))) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(b as Weight))) + Weight::from_ref_time(27_574_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(7_193_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(b as u64))) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + .saturating_add(RocksDbWeight::get().writes((3 as u64).saturating_mul(b as u64))) } // Storage: Refungible TotalSupply (r:1 w:1) - // Storage: Refungible Balance (r:1 w:1) + // Storage: Refungible Balance (r:3 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible Owned (r:0 w:1) fn burn_item_partial() -> Weight { - (21_010_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(42_943_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) } // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible TokensBurnt (r:1 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) + // Storage: Refungible TokenProperties (r:0 w:1) fn burn_item_fully() -> Weight { - (28_413_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(36_861_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Refungible Balance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) fn transfer_normal() -> Weight { - (17_513_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(27_789_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_creating() -> Weight { - (20_469_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(32_893_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_removing() -> Weight { - (22_472_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) + Weight::from_ref_time(34_703_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) } // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:2) fn transfer_creating_removing() -> Weight { - (24_866_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(37_547_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Refungible Balance (r:1 w:0) // Storage: Refungible Allowance (r:0 w:1) fn approve() -> Weight { - (13_475_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(20_039_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) fn transfer_from_normal() -> Weight { - (24_707_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + Weight::from_ref_time(37_628_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(3 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_from_creating() -> Weight { - (27_812_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(42_072_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:1 w:1) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:1) fn transfer_from_removing() -> Weight { - (29_966_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + Weight::from_ref_time(43_024_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(5 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible Balance (r:2 w:2) // Storage: Refungible AccountBalance (r:2 w:2) + // Storage: Refungible TotalSupply (r:1 w:0) // Storage: Refungible Owned (r:0 w:2) fn transfer_from_creating_removing() -> Weight { - (31_660_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(7 as Weight)) + Weight::from_ref_time(45_910_000) + .saturating_add(RocksDbWeight::get().reads(6 as u64)) + .saturating_add(RocksDbWeight::get().writes(7 as u64)) } // Storage: Refungible Allowance (r:1 w:1) // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) // Storage: Refungible AccountBalance (r:1 w:1) // Storage: Refungible TokensBurnt (r:1 w:1) - // Storage: Refungible TokenData (r:0 w:1) // Storage: Refungible Owned (r:0 w:1) + // Storage: Refungible TokenProperties (r:0 w:1) fn burn_from() -> Weight { - (36_248_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(5 as Weight)) - .saturating_add(RocksDbWeight::get().writes(7 as Weight)) + Weight::from_ref_time(48_584_000) + .saturating_add(RocksDbWeight::get().reads(5 as u64)) + .saturating_add(RocksDbWeight::get().writes(7 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:1) + fn set_token_property_permissions(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 60_000 + .saturating_add(Weight::from_ref_time(15_533_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:0) + // Storage: Refungible TokenProperties (r:1 w:1) + fn set_token_properties(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 3_609_000 + .saturating_add(Weight::from_ref_time(590_204_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) + } + // Storage: Common CollectionPropertyPermissions (r:1 w:0) + // Storage: Refungible TokenProperties (r:1 w:1) + fn delete_token_properties(b: u32, ) -> Weight { + (Weight::from_ref_time(0)) + // Standard Error: 3_637_000 + .saturating_add(Weight::from_ref_time(603_468_000).saturating_mul(b as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Refungible TotalSupply (r:1 w:1) // Storage: Refungible Balance (r:1 w:1) fn repartition_item() -> Weight { - (8_226_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(22_356_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + } + // Storage: Refungible Balance (r:2 w:0) + fn token_owner() -> Weight { + Weight::from_ref_time(9_431_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) } } diff --git a/pallets/scheduler/CHANGELOG.md b/pallets/scheduler/CHANGELOG.md new file mode 100644 index 0000000000..732ab15dc7 --- /dev/null +++ b/pallets/scheduler/CHANGELOG.md @@ -0,0 +1,10 @@ + +## [v0.1.1] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/scheduler/Cargo.toml b/pallets/scheduler/Cargo.toml index dc4b651590..5e9cff8247 100644 --- a/pallets/scheduler/Cargo.toml +++ b/pallets/scheduler/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-unique-scheduler" -version = "0.1.0" +version = "0.1.1" authors = ["Unique Network "] edition = "2021" license = "GPLv3" @@ -16,20 +16,20 @@ scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = 'https://github.com/paritytech/substrate.git', branch = 'polkadot-v0.9.24' } -frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = 'https://github.com/paritytech/substrate.git', branch = 'polkadot-v0.9.30' } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } -up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24" } -log = { version = "0.4.14", default-features = false } +up-sponsorship = { version = "0.1.0", default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30" } +log = { version = "0.4.16", default-features = false } [dev-dependencies] -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -substrate-test-utils = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +substrate-test-utils = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } [features] default = ["std"] @@ -50,4 +50,4 @@ runtime-benchmarks = [ "frame-support/runtime-benchmarks", "frame-system/runtime-benchmarks", ] -#try-runtime = ["frame-support/try-runtime"] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/scheduler/src/benchmarking.rs b/pallets/scheduler/src/benchmarking.rs index 9906998512..eb60ce2791 100644 --- a/pallets/scheduler/src/benchmarking.rs +++ b/pallets/scheduler/src/benchmarking.rs @@ -93,9 +93,9 @@ fn fill_schedule( Ok(()) } -fn call_and_hash(i: u32) -> (::Call, T::Hash) { +fn call_and_hash(i: u32) -> (::RuntimeCall, T::Hash) { // Essentially a no-op call. - let call: ::Call = frame_system::Call::remark { remark: i.encode() }.into(); + let call: ::RuntimeCall = frame_system::Call::remark { remark: i.encode() }.into(); let hash = T::Hashing::hash_of(&call); (call, hash) } diff --git a/pallets/scheduler/src/lib.rs b/pallets/scheduler/src/lib.rs index 7813bc0d4e..57bbf00bfb 100644 --- a/pallets/scheduler/src/lib.rs +++ b/pallets/scheduler/src/lib.rs @@ -24,7 +24,7 @@ // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -32,27 +32,36 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! # Schedulerdo_reschedule +//! # Unique scheduler +//! A Pallet for scheduling dispatches. +//! +//! - [`Config`] +//! - [`Call`] +//! - [`Pallet`] +//! +//! ## Overview //! //! This Pallet exposes capabilities for scheduling dispatches to occur at a //! specified block number or at a specified period. These scheduled dispatches -//! may be named or anonymous and may be canceled. +//! should be named and may be canceled. //! -//! **NOTE:** The scheduled calls will be dispatched with the default filter -//! for the origin: namely `frame_system::Config::BaseCallFilter` for all origin -//! except root which will get no filter. And not the filter contained in origin -//! use to call `fn schedule`. +//! **NOTE:** The unique scheduler is designed for deferred transaction calls by block number. +//! Any user can book a call of a certain transaction to a specific block number. +//! Also possible to book a call with a certain frequency. //! -//! If a call is scheduled using proxy or whatever mecanism which adds filter, -//! then those filter will not be used when dispatching the schedule call. +//! Key differences from the original pallet: +//! +//! Schedule Id restricted by 16 bytes. Identificator for booked call. +//! Priority limited by HARD DEADLINE (<= 63). Calls over maximum weight don't include to block. +//! The maximum weight that may be scheduled per block for any dispatchables of less priority than `schedule::HARD_DEADLINE`. +//! Maybe_periodic limit is 100 calls. Reserved for future sponsored transaction support. +//! At 100 calls reserved amount is not so much and this is avoid potential problems with balance locks. +//! Any account allowed to schedule any calls. Account withdraw implemented through default transaction logic. //! //! ## Interface //! //! ### Dispatchable Functions //! -//! * `schedule` - schedule a dispatch, which may be periodic, to occur at a specified block and -//! with a specified priority. -//! * `cancel` - cancel a scheduled dispatch, specified by block number and index. //! * `schedule_named` - augments the `schedule` interface with an additional `Vec` parameter //! that can be used for identification. //! * `cancel_named` - the named complement to the cancel function. @@ -77,13 +86,13 @@ use sp_runtime::{ use sp_std::{borrow::Borrow, cmp::Ordering, marker::PhantomData, prelude::*}; use frame_support::{ - dispatch::{DispatchError, DispatchResult, Dispatchable, Parameter}, + dispatch::{DispatchError, DispatchResult, Dispatchable, Parameter, GetDispatchInfo}, traits::{ schedule::{self, DispatchTime, MaybeHashed}, NamedReservableCurrency, EnsureOrigin, Get, IsType, OriginTrait, PrivilegeCmp, StorageVersion, }, - weights::{GetDispatchInfo, Weight}, + weights::{Weight}, }; pub use weights::WeightInfo; @@ -95,7 +104,8 @@ pub type TaskAddress = (BlockNumber, u32); pub const MAX_TASK_ID_LENGTH_IN_BYTES: u8 = 16; type ScheduledId = [u8; MAX_TASK_ID_LENGTH_IN_BYTES as usize]; -pub type CallOrHashOf = MaybeHashed<::Call, ::Hash>; +pub type CallOrHashOf = + MaybeHashed<::RuntimeCall, ::Hash>; /// Information regarding an item to be executed in the future. #[cfg_attr(any(feature = "std", test), derive(PartialEq, Eq))] @@ -200,12 +210,12 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { /// The overarching event type. - type Event: From> + IsType<::Event>; + type RuntimeEvent: From> + IsType<::RuntimeEvent>; /// The aggregated origin which the dispatch will take. - type Origin: OriginTrait + type RuntimeOrigin: OriginTrait + From - + IsType<::Origin>; + + IsType<::RuntimeOrigin>; /// The caller origin, overarching type of all pallets origins. type PalletsOrigin: From> + Codec + Clone + Eq + TypeInfo; @@ -213,9 +223,11 @@ pub mod pallet { type Currency: NamedReservableCurrency; /// The aggregated call type. - type Call: Parameter - + Dispatchable::Origin, PostInfo = PostDispatchInfo> - + GetDispatchInfo + type RuntimeCall: Parameter + + Dispatchable< + RuntimeOrigin = ::RuntimeOrigin, + PostInfo = PostDispatchInfo, + > + GetDispatchInfo + From>; /// The maximum weight that may be scheduled per block for any dispatchables of less @@ -224,7 +236,7 @@ pub mod pallet { type MaximumWeight: Get; /// Required origin to schedule or cancel calls. - type ScheduleOrigin: EnsureOrigin<::Origin>; + type ScheduleOrigin: EnsureOrigin<::RuntimeOrigin>; /// Compare the privileges of origins. /// @@ -258,28 +270,31 @@ pub mod pallet { /// A Scheduler-Runtime interface for finer payment handling. pub trait DispatchCall { + /// Reserve (lock) the maximum spendings on a call, calculated from its weight and the repetition count. fn reserve_balance( id: ScheduledId, sponsor: ::AccountId, - call: ::Call, + call: ::RuntimeCall, count: u32, ) -> Result<(), DispatchError>; + /// Unreserve (unlock) a certain amount from the payer's reserved funds, returning the change. fn pay_for_call( id: ScheduledId, sponsor: ::AccountId, - call: ::Call, + call: ::RuntimeCall, ) -> Result; /// Resolve the call dispatch, including any post-dispatch operations. fn dispatch_call( signer: T::AccountId, - function: ::Call, + function: ::RuntimeCall, ) -> Result< Result>, TransactionValidityError, >; + /// Release unspent reserved funds in case of a schedule cancel. fn cancel_reserve( id: ScheduledId, sponsor: ::AccountId, @@ -392,9 +407,10 @@ pub mod pallet { let periodic = s.maybe_periodic.is_some(); let call_weight = call.get_dispatch_info().weight; let mut item_weight = T::WeightInfo::item(periodic, named, Some(resolved)); - let origin = - <::Origin as From>::from(s.origin.clone()) - .into(); + let origin = <::RuntimeOrigin as From>::from( + s.origin.clone(), + ) + .into(); if ensure_signed(origin).is_ok() { // Weights of Signed dispatches expect their signing account to be whitelisted. item_weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 1)); @@ -408,7 +424,7 @@ pub mod pallet { let test_weight = total_weight .saturating_add(call_weight) .saturating_add(item_weight); - if !hard_deadline && order > 0 && test_weight > limit { + if !hard_deadline && order > 0 && test_weight.all_gt(limit) { // Cannot be scheduled this block - postpone until next. total_weight.saturating_accrue(T::WeightInfo::item(false, named, None)); if let Some(ref id) = s.maybe_id { @@ -424,8 +440,10 @@ pub mod pallet { } let sender = ensure_signed( - <::Origin as From>::from(s.origin.clone()) - .into(), + <::RuntimeOrigin as From>::from( + s.origin.clone(), + ) + .into(), ) .unwrap(); @@ -438,6 +456,8 @@ pub mod pallet { // ); // } + // Execute transaction via chain default pipeline + // That means dispatch will be processed like any user's extrinsic e.g. transaction fees will be taken let r = T::CallExecutor::dispatch_call(sender, call.clone()); let mut actual_call_weight: Weight = item_weight; @@ -482,8 +502,8 @@ pub mod pallet { Agenda::::append(wake, Some(s)); } } - 0 - //total_weight + // Total weight should be 0, because the transaction is already paid for + Weight::zero() } } @@ -500,7 +520,7 @@ pub mod pallet { call: Box>, ) -> DispatchResult { T::ScheduleOrigin::ensure_origin(origin.clone())?; - let origin = ::Origin::from(origin); + let origin = ::RuntimeOrigin::from(origin); Self::do_schedule_named( id, DispatchTime::At(when), @@ -516,7 +536,7 @@ pub mod pallet { #[pallet::weight(::WeightInfo::cancel_named(T::MaxScheduledPerBlock::get()))] pub fn cancel_named(origin: OriginFor, id: ScheduledId) -> DispatchResult { T::ScheduleOrigin::ensure_origin(origin.clone())?; - let origin = ::Origin::from(origin); + let origin = ::RuntimeOrigin::from(origin); Self::do_cancel_named(Some(origin.caller().clone()), id)?; Ok(()) } @@ -536,7 +556,7 @@ pub mod pallet { call: Box>, ) -> DispatchResult { T::ScheduleOrigin::ensure_origin(origin.clone())?; - let origin = ::Origin::from(origin); + let origin = ::RuntimeOrigin::from(origin); Self::do_schedule_named( id, DispatchTime::After(after), diff --git a/pallets/scheduler/src/weights.rs b/pallets/scheduler/src/weights.rs index 98df961492..646ccf74f8 100644 --- a/pallets/scheduler/src/weights.rs +++ b/pallets/scheduler/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_unique_scheduler //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -55,13 +55,13 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic_named_resolved(s: u32, ) -> Weight { - (27_374_000 as Weight) - // Standard Error: 7_000 - .saturating_add((9_673_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(26_641_000) + // Standard Error: 9_000 + .saturating_add(Weight::from_ref_time(8_547_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -69,12 +69,12 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named_resolved(s: u32, ) -> Weight { - (25_967_000 as Weight) - // Standard Error: 6_000 - .saturating_add((5_916_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(23_941_000) + // Standard Error: 17_000 + .saturating_add(Weight::from_ref_time(5_282_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: System Account (r:1 w:1) @@ -82,13 +82,13 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic(s: u32, ) -> Weight { - (27_097_000 as Weight) - // Standard Error: 5_000 - .saturating_add((9_652_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(24_858_000) + // Standard Error: 7_000 + .saturating_add(Weight::from_ref_time(8_657_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: System Account (r:1 w:1) @@ -96,23 +96,23 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic_resolved(s: u32, ) -> Weight { - (43_116_000 as Weight) - // Standard Error: 18_000 - .saturating_add((8_352_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(25_515_000) + // Standard Error: 14_000 + .saturating_add(Weight::from_ref_time(8_656_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_aborted(s: u32, ) -> Weight { - (4_921_000 as Weight) - // Standard Error: 4_000 - .saturating_add((2_249_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(7_584_000) + // Standard Error: 1_000 + .saturating_add(Weight::from_ref_time(2_065_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -120,22 +120,22 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named_aborted(s: u32, ) -> Weight { - (26_934_000 as Weight) - // Standard Error: 7_000 - .saturating_add((5_819_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(25_552_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(5_187_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named(s: u32, ) -> Weight { - (6_423_000 as Weight) - // Standard Error: 1_000 - .saturating_add((2_141_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(8_980_000) + // Standard Error: 12_000 + .saturating_add(Weight::from_ref_time(2_050_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -143,12 +143,12 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize(s: u32, ) -> Weight { - (27_586_000 as Weight) - // Standard Error: 11_000 - .saturating_add((5_264_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(24_482_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(5_249_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -156,30 +156,30 @@ impl WeightInfo for SubstrateWeight { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_resolved(s: u32, ) -> Weight { - (24_356_000 as Weight) + Weight::from_ref_time(25_187_000) // Standard Error: 4_000 - .saturating_add((5_301_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + .saturating_add(Weight::from_ref_time(5_216_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(4 as u64)) + .saturating_add(T::DbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Lookup (r:1 w:1) // Storage: Scheduler Agenda (r:1 w:1) fn schedule_named(s: u32, ) -> Weight { - (14_871_000 as Weight) - // Standard Error: 1_000 - .saturating_add((183_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(17_316_000) + // Standard Error: 3_000 + .saturating_add(Weight::from_ref_time(82_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Scheduler Lookup (r:1 w:1) // Storage: Scheduler Agenda (r:1 w:1) fn cancel_named(s: u32, ) -> Weight { - (16_676_000 as Weight) + Weight::from_ref_time(15_652_000) // Standard Error: 1_000 - .saturating_add((500_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + .saturating_add(Weight::from_ref_time(436_000).saturating_mul(s as u64)) + .saturating_add(T::DbWeight::get().reads(2 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } } @@ -191,13 +191,13 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic_named_resolved(s: u32, ) -> Weight { - (27_374_000 as Weight) - // Standard Error: 7_000 - .saturating_add((9_673_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(26_641_000) + // Standard Error: 9_000 + .saturating_add(Weight::from_ref_time(8_547_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -205,12 +205,12 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named_resolved(s: u32, ) -> Weight { - (25_967_000 as Weight) - // Standard Error: 6_000 - .saturating_add((5_916_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(23_941_000) + // Standard Error: 17_000 + .saturating_add(Weight::from_ref_time(5_282_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: System Account (r:1 w:1) @@ -218,13 +218,13 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic(s: u32, ) -> Weight { - (27_097_000 as Weight) - // Standard Error: 5_000 - .saturating_add((9_652_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(24_858_000) + // Standard Error: 7_000 + .saturating_add(Weight::from_ref_time(8_657_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: System Account (r:1 w:1) @@ -232,23 +232,23 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_periodic_resolved(s: u32, ) -> Weight { - (43_116_000 as Weight) - // Standard Error: 18_000 - .saturating_add((8_352_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(25_515_000) + // Standard Error: 14_000 + .saturating_add(Weight::from_ref_time(8_656_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().reads((1 as u64).saturating_mul(s as u64))) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((2 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_aborted(s: u32, ) -> Weight { - (4_921_000 as Weight) - // Standard Error: 4_000 - .saturating_add((2_249_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(7_584_000) + // Standard Error: 1_000 + .saturating_add(Weight::from_ref_time(2_065_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -256,22 +256,22 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named_aborted(s: u32, ) -> Weight { - (26_934_000 as Weight) - // Standard Error: 7_000 - .saturating_add((5_819_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(25_552_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(5_187_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:2 w:2) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_named(s: u32, ) -> Weight { - (6_423_000 as Weight) - // Standard Error: 1_000 - .saturating_add((2_141_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(8_980_000) + // Standard Error: 12_000 + .saturating_add(Weight::from_ref_time(2_050_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -279,12 +279,12 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize(s: u32, ) -> Weight { - (27_586_000 as Weight) - // Standard Error: 11_000 - .saturating_add((5_264_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + Weight::from_ref_time(24_482_000) + // Standard Error: 4_000 + .saturating_add(Weight::from_ref_time(5_249_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Agenda (r:1 w:1) // Storage: System Account (r:1 w:1) @@ -292,29 +292,29 @@ impl WeightInfo for () { // Storage: System BlockWeight (r:1 w:1) // Storage: Scheduler Lookup (r:0 w:1) fn on_initialize_resolved(s: u32, ) -> Weight { - (24_356_000 as Weight) + Weight::from_ref_time(25_187_000) // Standard Error: 4_000 - .saturating_add((5_301_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + .saturating_add(Weight::from_ref_time(5_216_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(4 as u64)) + .saturating_add(RocksDbWeight::get().writes((1 as u64).saturating_mul(s as u64))) } // Storage: Scheduler Lookup (r:1 w:1) // Storage: Scheduler Agenda (r:1 w:1) fn schedule_named(s: u32, ) -> Weight { - (14_871_000 as Weight) - // Standard Error: 1_000 - .saturating_add((183_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(17_316_000) + // Standard Error: 3_000 + .saturating_add(Weight::from_ref_time(82_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Scheduler Lookup (r:1 w:1) // Storage: Scheduler Agenda (r:1 w:1) fn cancel_named(s: u32, ) -> Weight { - (16_676_000 as Weight) + Weight::from_ref_time(15_652_000) // Standard Error: 1_000 - .saturating_add((500_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + .saturating_add(Weight::from_ref_time(436_000).saturating_mul(s as u64)) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } } diff --git a/pallets/structure/CHANGELOG.md b/pallets/structure/CHANGELOG.md new file mode 100644 index 0000000000..830ad22333 --- /dev/null +++ b/pallets/structure/CHANGELOG.md @@ -0,0 +1,10 @@ + +## [v0.1.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/pallets/structure/Cargo.toml b/pallets/structure/Cargo.toml index c7b5902527..167c9e1962 100644 --- a/pallets/structure/Cargo.toml +++ b/pallets/structure/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "pallet-structure" -version = "0.1.0" +version = "0.1.2" edition = "2021" [dependencies] -frame-support = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -frame-system = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -frame-benchmarking = { default-features = false, optional = true, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-std = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-benchmarking = { default-features = false, optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } pallet-common = { path = "../common", default-features = false } parity-scale-codec = { version = "3.1.2", default-features = false, features = [ "derive", @@ -16,7 +16,7 @@ scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } up-data-structs = { path = "../../primitives/data-structs", default-features = false } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } [features] default = ["std"] @@ -32,3 +32,4 @@ std = [ "pallet-evm/std", ] runtime-benchmarks = ['frame-benchmarking', 'pallet-common/runtime-benchmarks'] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallets/structure/src/benchmarking.rs b/pallets/structure/src/benchmarking.rs index 26d8155436..022ddd024d 100644 --- a/pallets/structure/src/benchmarking.rs +++ b/pallets/structure/src/benchmarking.rs @@ -1,9 +1,26 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + use super::*; use frame_benchmarking::{benchmarks, account}; use frame_support::traits::{Currency, Get}; use up_data_structs::{ - CreateCollectionData, CollectionMode, CreateItemData, CreateNftData, budget::Unlimited, + CreateCollectionData, CollectionMode, CreateItemData, CollectionFlags, CreateNftData, + budget::Unlimited, }; use pallet_common::Config as CommonConfig; use pallet_evm::account::CrossAccountId; @@ -16,10 +33,15 @@ benchmarks! { let caller_cross = T::CrossAccountId::from_sub(caller.clone()); ::Currency::deposit_creating(&caller, T::CollectionCreationPrice::get()); - T::CollectionDispatch::create(caller_cross.clone(), CreateCollectionData { - mode: CollectionMode::NFT, - ..Default::default() - })?; + T::CollectionDispatch::create( + caller_cross.clone(), + caller_cross.clone(), + CreateCollectionData { + mode: CollectionMode::NFT, + ..Default::default() + }, + CollectionFlags::default(), + )?; let dispatch = T::CollectionDispatch::dispatch(CollectionHandle::try_get(CollectionId(1))?); let dispatch = dispatch.as_dyn(); diff --git a/pallets/structure/src/lib.rs b/pallets/structure/src/lib.rs index 58defa2929..043d7bd453 100644 --- a/pallets/structure/src/lib.rs +++ b/pallets/structure/src/lib.rs @@ -1,3 +1,56 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! # Structure Pallet +//! +//! The Structure pallet provides functionality for handling tokens nesting an unnesting. +//! +//! - [`Config`] +//! - [`Pallet`] +//! +//! ## Overview +//! +//! The Structure pallet provides functions for: +//! +//! - Searching for token parents, children and owners. Actual implementation of searching for +//! parent/child is done by pallets corresponding to token's collection type. +//! - Nesting and unnesting tokens. Actual implementation of nesting is done by pallets corresponding +//! to token's collection type. +//! +//! ### Terminology +//! +//! - **Nesting:** Setting up parent-child relationship between tokens. Nested tokens are inhereting +//! owner from their parent. There could be multiple levels of nesting. Token couldn't be nested in +//! it's child token i.e. parent-child relationship graph shouldn't have +//! +//! - **Parent:** Token that current token is nested in. +//! +//! - **Owner:** Account that owns the token and all nested tokens. +//! +//! ## Interface +//! +//! ### Available Functions +//! +//! - `find_parent` - Find parent of the token. It could be an account or another token. +//! - `parent_chain` - Find chain of parents of the token. +//! - `find_topmost_owner` - Find account or token in the end of the chain of parents. +//! - `check_nesting` - Check if the token could be nested in the other token +//! - `nest_if_sent_to_token` - Nest the token in the other token +//! - `unnest_if_nested` - Unnest the token from the other token + #![cfg_attr(not(feature = "std"), no_std)] use pallet_common::CommonCollectionOperations; @@ -25,27 +78,29 @@ pub mod pallet { #[pallet::error] pub enum Error { - /// While searched for owner, got already checked account + /// While nesting, encountered an already checked account, detecting a loop. OuroborosDetected, - /// While searched for owner, encountered depth limit + /// While nesting, reached the depth limit of nesting, exceeding the provided budget. DepthLimit, - /// While iterating over children, encountered breadth limit + /// While nesting, reached the breadth limit of nesting, exceeding the provided budget. BreadthLimit, - /// While searched for owner, found token owner by not-yet-existing token + /// Couldn't find the token owner that is itself a token. TokenNotFound, } #[pallet::event] pub enum Event { - /// Executed call on behalf of token + /// Executed call on behalf of the token. Executed(DispatchResult), } #[pallet::config] pub trait Config: frame_system::Config + pallet_common::Config { type WeightInfo: weights::WeightInfo; - type Event: IsType<::Event> + From>; - type Call: Parameter + UnfilteredDispatchable + GetDispatchInfo; + type RuntimeEvent: IsType<::RuntimeEvent> + From>; + type RuntimeCall: Parameter + + UnfilteredDispatchable + + GetDispatchInfo; } #[pallet::pallet] @@ -73,15 +128,21 @@ pub mod pallet { #[derive(PartialEq)] pub enum Parent { - /// Token owned by normal account + /// Token owned by a normal account. User(CrossAccountId), - /// Passed token not found + /// Could not find the token provided as the owner. TokenNotFound, - /// Token owner is another token (target token still may not exist) + /// Token owner is another token (still, the target token may not exist). Token(CollectionId, TokenId), } impl Pallet { + /// Find account owning the `token` or a token that the `token` is nested in. + /// + /// Returns the enum that have three variants: + /// - [`User`](crate::Parent::User): Contains account. + /// - [`Token`](crate::Parent::Token): Contains token id and collection id. + /// - [`TokenNotFound`](crate::Parent::TokenNotFound): Indicates that parent was not found pub fn find_parent( collection: CollectionId, token: TokenId, @@ -103,6 +164,11 @@ impl Pallet { }) } + /// Get the chain of parents of a token in the nesting hierarchy + /// + /// Returns an iterator of addresses of the owning tokens and the owning account, + /// starting from the immediate parent token, ending with the account. + /// Returns error if cycle is detected. pub fn parent_chain( mut collection: CollectionId, mut token: TokenId, @@ -133,6 +199,8 @@ impl Pallet { /// Try to dereference address, until finding top level owner /// /// May return token address if parent token not yet exists + /// + /// - `budget`: Limit for searching parents in depth. pub fn find_topmost_owner( collection: CollectionId, token: TokenId, @@ -149,6 +217,10 @@ impl Pallet { }) } + /// Find the topmost parent and check that assigning `for_nest` token as a child for + /// `token` wouldn't create a cycle. + /// + /// - `budget`: Limit for searching parents in depth. pub fn get_checked_topmost_owner( collection: CollectionId, token: TokenId, @@ -177,6 +249,10 @@ impl Pallet { Err(>::DepthLimit.into()) } + /// Burn token and all of it's nested tokens + /// + /// - `self_budget`: Limit for searching children in depth. + /// - `breadth_budget`: Limit of breadth of searching children. pub fn burn_item_recursively( from: T::CrossAccountId, collection: CollectionId, @@ -190,7 +266,13 @@ impl Pallet { dispatch.burn_item_recursively(from.clone(), token, self_budget, breadth_budget) } - /// Check if token indirectly owned by specified user + /// Check if `token` indirectly owned by `user` + /// + /// Returns `true` if `user` is `token`'s owner. Or If token is provided as `user` then + /// check that `user` and `token` have same owner. + /// Checks that assigning `for_nest` token as a child for `token` wouldn't create a cycle. + /// + /// - `budget`: Limit for searching parents in depth. pub fn check_indirectly_owned( user: T::CrossAccountId, collection: CollectionId, @@ -207,6 +289,12 @@ impl Pallet { .map(|indirect_owner| indirect_owner == target_parent) } + /// Checks that `under` is valid token and that `token_id` could be nested under it + /// and that `from` is `under`'s owner + /// + /// Returns OK if `under` is not a token + /// + /// - `nesting_budget`: Limit for searching parents in depth. pub fn check_nesting( from: T::CrossAccountId, under: &T::CrossAccountId, @@ -219,6 +307,11 @@ impl Pallet { }) } + /// Nests `token_id` under `under` token + /// + /// Returns OK if `under` is not a token. Checks that nesting is possible. + /// + /// - `nesting_budget`: Limit for searching parents in depth. pub fn nest_if_sent_to_token( from: T::CrossAccountId, under: &T::CrossAccountId, @@ -235,6 +328,9 @@ impl Pallet { }) } + /// Nests `token_id` under `owner` token + /// + /// Caller should check that nesting wouldn't cause recursion in nesting pub fn nest_if_sent_to_token_unchecked( owner: &T::CrossAccountId, collection_id: CollectionId, @@ -245,6 +341,7 @@ impl Pallet { }); } + /// Unnests `token_id` from `owner`. pub fn unnest_if_nested( owner: &T::CrossAccountId, collection_id: CollectionId, diff --git a/pallets/structure/src/weights.rs b/pallets/structure/src/weights.rs index 1c290a32b5..160e386fb5 100644 --- a/pallets/structure/src/weights.rs +++ b/pallets/structure/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_structure //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-15, STEPS: `50`, REPEAT: 200, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -19,7 +19,7 @@ // --template // .maintain/frame-weight-template.hbs // --steps=50 -// --repeat=200 +// --repeat=80 // --heap-pages=4096 // --output=./pallets/structure/src/weights.rs @@ -42,8 +42,8 @@ impl WeightInfo for SubstrateWeight { // Storage: Common CollectionById (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:0) fn find_parent() -> Weight { - (7_013_000 as Weight) - .saturating_add(T::DbWeight::get().reads(2 as Weight)) + Weight::from_ref_time(7_180_000) + .saturating_add(T::DbWeight::get().reads(2 as u64)) } } @@ -52,7 +52,7 @@ impl WeightInfo for () { // Storage: Common CollectionById (r:1 w:0) // Storage: Nonfungible TokenData (r:1 w:0) fn find_parent() -> Weight { - (7_013_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(2 as Weight)) + Weight::from_ref_time(7_180_000) + .saturating_add(RocksDbWeight::get().reads(2 as u64)) } } diff --git a/pallets/unique/CHANGELOG.md b/pallets/unique/CHANGELOG.md new file mode 100644 index 0000000000..1a70f7d02d --- /dev/null +++ b/pallets/unique/CHANGELOG.md @@ -0,0 +1,50 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + + +## [v0.2.0] 2022-09-13 + +### Changes +- Change **collectionHelper** method `createRefungibleCollection` to `createRFTCollection`, + +## [v0.1.4] 2022-09-05 + +### Added + +- Methods `force_set_sponsor` , `force_remove_collection_sponsor` to be able to administer sponsorships with other pallets. Added to implement `AppPromotion` pallet logic. + +## [v0.1.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- refactor: Remove `#[transactional]` from extrinsics 7fd36cea2f6e00c02c67ccc1de9649ae404efd31 + +Every extrinsic now runs in transaction implicitly, and +`#[transactional]` on pallet dispatchable is now meaningless + +Upstream-Change: https://github.com/paritytech/substrate/issues/10806 + +- refactor: Switch to new prefix removal methods 26734e9567589d75cdd99e404eabf11d5a97d975 + +New methods allows to call `remove_prefix` with limit multiple times +in the same block +However, we don't use prefix removal limits, so upgrade is +straightforward + +Upstream-Change: https://github.com/paritytech/substrate/pull/11490 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [v0.1.1] - 2022-07-25 + +### Added + +- Method for creating `ERC721Metadata` compatible NFT collection. +- Method for creating `ERC721Metadata` compatible ReFungible collection. +- Method for creating ReFungible collection. diff --git a/pallets/unique/Cargo.toml b/pallets/unique/Cargo.toml index 0ebb7be390..2a4580876f 100644 --- a/pallets/unique/Cargo.toml +++ b/pallets/unique/Cargo.toml @@ -9,7 +9,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'pallet-unique' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.1.0' +version = "0.2.0" [package.metadata.docs.rs] targets = ['x86_64-unknown-linux-gnu'] @@ -32,6 +32,7 @@ std = [ 'pallet-evm-coder-substrate/std', 'pallet-nonfungible/std', ] +try-runtime = ["frame-support/try-runtime"] limit-testing = ["up-data-structs/limit-testing"] ################################################################################ @@ -59,37 +60,37 @@ version = '3.1.2' default-features = false optional = true git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-support] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-std] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-runtime] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-io] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" ################################################################################ # Local Dependencies @@ -98,8 +99,9 @@ up-data-structs = { default-features = false, path = "../../primitives/data-stru scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-common = { default-features = false, path = "../common" } evm-coder = { default-features = false, path = '../../crates/evm-coder' } pallet-evm-coder-substrate = { default-features = false, path = '../../pallets/evm-coder-substrate' } pallet-nonfungible = { default-features = false, path = '../../pallets/nonfungible' } +pallet-refungible = { default-features = false, path = '../../pallets/refungible' } diff --git a/pallets/unique/src/benchmarking.rs b/pallets/unique/src/benchmarking.rs index eb7e7cf6d5..f679247427 100644 --- a/pallets/unique/src/benchmarking.rs +++ b/pallets/unique/src/benchmarking.rs @@ -46,7 +46,9 @@ fn create_collection_helper( )?; Ok(>::get()) } -fn create_nft_collection(owner: T::AccountId) -> Result { +pub fn create_nft_collection( + owner: T::AccountId, +) -> Result { create_collection_helper::(owner, CollectionMode::NFT) } diff --git a/pallets/unique/src/eth/mod.rs b/pallets/unique/src/eth/mod.rs index c6276f154f..b5248d0a37 100644 --- a/pallets/unique/src/eth/mod.rs +++ b/pallets/unique/src/eth/mod.rs @@ -14,26 +14,36 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! Implementation of CollectionHelpers contract. + use core::marker::PhantomData; -use evm_coder::{execution::*, generate_stubgen, solidity_interface, weight, types::*}; use ethereum as _; -use pallet_evm_coder_substrate::{SubstrateRecorder, WithRecorder}; -use pallet_evm::{OnMethodCall, PrecompileResult, account::CrossAccountId, PrecompileHandle}; -use up_data_structs::{ - CreateCollectionData, MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, - MAX_COLLECTION_NAME_LENGTH, -}; +use evm_coder::{execution::*, generate_stubgen, solidity_interface, solidity, weight, types::*}; use frame_support::traits::Get; use pallet_common::{ CollectionById, - erc::{token_uri_key, CollectionHelpersEvents}, + dispatch::CollectionDispatch, + erc::{ + CollectionHelpersEvents, + static_property::{key}, + }, + Pallet as PalletCommon, +}; +use pallet_evm_coder_substrate::{dispatch_to_evm, SubstrateRecorder, WithRecorder}; +use pallet_evm::{account::CrossAccountId, OnMethodCall, PrecompileHandle, PrecompileResult}; +use sp_std::vec; +use up_data_structs::{ + CollectionName, CollectionDescription, CollectionTokenPrefix, CreateCollectionData, + CollectionMode, PropertyValue, CollectionFlags, }; -use crate::{SelfWeightOf, Config, weights::WeightInfo}; + +use crate::{Config, SelfWeightOf, weights::WeightInfo}; use sp_std::vec::Vec; use alloc::format; -struct EvmCollectionHelpers(SubstrateRecorder); +/// See [`CollectionHelpersCall`] +pub struct EvmCollectionHelpers(SubstrateRecorder); impl WithRecorder for EvmCollectionHelpers { fn recorder(&self) -> &SubstrateRecorder { &self.0 @@ -44,62 +54,251 @@ impl WithRecorder for EvmCollectionHelpers { } } -#[solidity_interface(name = "CollectionHelpers", events(CollectionHelpersEvents))] -impl EvmCollectionHelpers { +fn convert_data( + caller: caller, + name: string, + description: string, + token_prefix: string, +) -> Result<( + T::CrossAccountId, + CollectionName, + CollectionDescription, + CollectionTokenPrefix, +)> { + let caller = T::CrossAccountId::from_eth(caller); + let name = name + .encode_utf16() + .collect::>() + .try_into() + .map_err(|_| error_field_too_long(stringify!(name), CollectionName::bound()))?; + let description = description + .encode_utf16() + .collect::>() + .try_into() + .map_err(|_| { + error_field_too_long(stringify!(description), CollectionDescription::bound()) + })?; + let token_prefix = token_prefix.into_bytes().try_into().map_err(|_| { + error_field_too_long(stringify!(token_prefix), CollectionTokenPrefix::bound()) + })?; + Ok((caller, name, description, token_prefix)) +} + +fn create_refungible_collection_internal< + T: Config + pallet_nonfungible::Config + pallet_refungible::Config, +>( + caller: caller, + value: value, + name: string, + description: string, + token_prefix: string, +) -> Result
{ + let (caller, name, description, token_prefix) = + convert_data::(caller, name, description, token_prefix)?; + let data = CreateCollectionData { + name, + mode: CollectionMode::ReFungible, + description, + token_prefix, + ..Default::default() + }; + check_sent_amount_equals_collection_creation_price::(value)?; + let collection_helpers_address = + T::CrossAccountId::from_eth(::ContractAddress::get()); + + let collection_id = T::CollectionDispatch::create( + caller.clone(), + collection_helpers_address, + data, + Default::default(), + ) + .map_err(pallet_evm_coder_substrate::dispatch_to_evm::)?; + let address = pallet_common::eth::collection_id_to_address(collection_id); + Ok(address) +} + +fn check_sent_amount_equals_collection_creation_price(value: value) -> Result<()> { + let value = value.as_u128(); + let creation_price: u128 = T::CollectionCreationPrice::get() + .try_into() + .map_err(|_| ()) // workaround for `expect` requiring `Debug` trait + .expect("Collection creation price should be convertible to u128"); + if value != creation_price { + return Err(format!( + "Sent amount not equals to collection creation price ({0})", + creation_price + ) + .into()); + } + Ok(()) +} + +/// @title Contract, which allows users to operate with collections +#[solidity_interface(name = CollectionHelpers, events(CollectionHelpersEvents))] +impl EvmCollectionHelpers +where + T: Config + pallet_common::Config + pallet_nonfungible::Config + pallet_refungible::Config, +{ + /// Create an NFT collection + /// @param name Name of the collection + /// @param description Informative description of the collection + /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + /// @return address Address of the newly created collection #[weight(>::create_collection())] - fn create_nonfungible_collection( + #[solidity(rename_selector = "createNFTCollection")] + fn create_nft_collection( &mut self, caller: caller, + value: value, name: string, description: string, token_prefix: string, ) -> Result
{ - let caller = T::CrossAccountId::from_eth(caller); - let name = name - .encode_utf16() - .collect::>() - .try_into() - .map_err(|_| error_feild_too_long(stringify!(name), MAX_COLLECTION_NAME_LENGTH))?; - let description = description - .encode_utf16() - .collect::>() - .try_into() - .map_err(|_| { - error_feild_too_long(stringify!(description), MAX_COLLECTION_DESCRIPTION_LENGTH) - })?; - let token_prefix = token_prefix - .into_bytes() - .try_into() - .map_err(|_| error_feild_too_long(stringify!(token_prefix), MAX_TOKEN_PREFIX_LENGTH))?; - - let key = token_uri_key(); - let permission = up_data_structs::PropertyPermission { - mutable: true, - collection_admin: true, - token_owner: false, - }; - let mut token_property_permissions = - up_data_structs::CollectionPropertiesPermissionsVec::default(); - token_property_permissions - .try_push(up_data_structs::PropertyKeyPermission { key, permission }) - .map_err(|e| Error::Revert(format!("{:?}", e)))?; - + let (caller, name, description, token_prefix) = + convert_data::(caller, name, description, token_prefix)?; let data = CreateCollectionData { name, + mode: CollectionMode::NFT, description, token_prefix, - token_property_permissions, ..Default::default() }; - - let collection_id = - >::init_collection(caller.clone(), data, false) - .map_err(pallet_evm_coder_substrate::dispatch_to_evm::)?; + check_sent_amount_equals_collection_creation_price::(value)?; + let collection_helpers_address = + T::CrossAccountId::from_eth(::ContractAddress::get()); + let collection_id = T::CollectionDispatch::create( + caller, + collection_helpers_address, + data, + Default::default(), + ) + .map_err(dispatch_to_evm::)?; let address = pallet_common::eth::collection_id_to_address(collection_id); Ok(address) } + /// Create an NFT collection + /// @param name Name of the collection + /// @param description Informative description of the collection + /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + /// @return address Address of the newly created collection + #[weight(>::create_collection())] + #[deprecated(note = "mathod was renamed to `create_nft_collection`, prefer it instead")] + #[solidity(hide)] + fn create_nonfungible_collection( + &mut self, + caller: caller, + value: value, + name: string, + description: string, + token_prefix: string, + ) -> Result
{ + self.create_nft_collection(caller, value, name, description, token_prefix) + } + + #[weight(>::create_collection())] + #[solidity(rename_selector = "createRFTCollection")] + fn create_rft_collection( + &mut self, + caller: caller, + value: value, + name: string, + description: string, + token_prefix: string, + ) -> Result
{ + create_refungible_collection_internal::(caller, value, name, description, token_prefix) + } + + #[solidity(rename_selector = "makeCollectionERC721MetadataCompatible")] + fn make_collection_metadata_compatible( + &mut self, + caller: caller, + collection: address, + base_uri: string, + ) -> Result<()> { + let caller = T::CrossAccountId::from_eth(caller); + let collection = + pallet_common::eth::map_eth_to_id(&collection).ok_or("not a collection address")?; + let mut collection = + >::new(collection).ok_or("collection not found")?; + + if !matches!( + collection.mode, + CollectionMode::NFT | CollectionMode::ReFungible + ) { + return Err("target collection should be either NFT or Refungible".into()); + } + + self.recorder().consume_sstore()?; + collection + .check_is_owner_or_admin(&caller) + .map_err(dispatch_to_evm::)?; + + if collection.flags.erc721metadata { + return Err("target collection is already Erc721Metadata compatible".into()); + } + collection.flags.erc721metadata = true; + let all_permissions = >::get(collection.id); + if all_permissions.get(&key::url()).is_none() { + self.recorder().consume_sstore()?; + >::set_property_permission( + &collection, + &caller, + up_data_structs::PropertyKeyPermission { + key: key::url(), + permission: up_data_structs::PropertyPermission { + mutable: true, + collection_admin: true, + token_owner: false, + }, + }, + ) + .map_err(dispatch_to_evm::)?; + } + if all_permissions.get(&key::suffix()).is_none() { + self.recorder().consume_sstore()?; + >::set_property_permission( + &collection, + &caller, + up_data_structs::PropertyKeyPermission { + key: key::suffix(), + permission: up_data_structs::PropertyPermission { + mutable: true, + collection_admin: true, + token_owner: false, + }, + }, + ) + .map_err(dispatch_to_evm::)?; + } + + let all_properties = >::get(collection.id); + if all_properties.get(&key::base_uri()).is_none() && !base_uri.is_empty() { + self.recorder().consume_sstore()?; + >::set_collection_properties( + &collection, + &caller, + vec![up_data_structs::Property { + key: key::base_uri(), + value: base_uri + .into_bytes() + .try_into() + .map_err(|_| "base uri is too large")?, + }], + ) + .map_err(dispatch_to_evm::)?; + } + + self.recorder().consume_sstore()?; + collection.save().map_err(dispatch_to_evm::)?; + + Ok(()) + } + + /// Check if a collection exists + /// @param collectionAddress Address of the collection in question + /// @return bool Does the collection exist? fn is_collection_exist(&self, _caller: caller, collection_address: address) -> Result { if let Some(id) = pallet_common::eth::map_eth_to_id(&collection_address) { let collection_id = id; @@ -108,10 +307,21 @@ impl EvmCollectionHelpers { Ok(false) } + + fn collection_creation_fee(&self) -> Result { + let price: u128 = T::CollectionCreationPrice::get() + .try_into() + .map_err(|_| ()) // workaround for `expect` requiring `Debug` trait + .expect("Collection creation price should be convertible to u128"); + Ok(price.into()) + } } +/// Implements [`OnMethodCall`], which delegates call to [`EvmCollectionHelpers`] pub struct CollectionHelpersOnMethodCall(PhantomData<*const T>); -impl OnMethodCall for CollectionHelpersOnMethodCall { +impl OnMethodCall + for CollectionHelpersOnMethodCall +{ fn is_reserved(contract: &sp_core::H160) -> bool { contract == &T::ContractAddress::get() } @@ -139,6 +349,6 @@ impl OnMethodCall for CollectionHelpe generate_stubgen!(collection_helper_impl, CollectionHelpersCall<()>, true); generate_stubgen!(collection_helper_iface, CollectionHelpersCall<()>, false); -fn error_feild_too_long(feild: &str, bound: u32) -> Error { +fn error_field_too_long(feild: &str, bound: usize) -> Error { Error::Revert(format!("{} is too long. Max length is {}.", feild, bound)) } diff --git a/pallets/unique/src/eth/stubs/CollectionHelpers.raw b/pallets/unique/src/eth/stubs/CollectionHelpers.raw index 549328051a..7d9a32abc1 100644 Binary files a/pallets/unique/src/eth/stubs/CollectionHelpers.raw and b/pallets/unique/src/eth/stubs/CollectionHelpers.raw differ diff --git a/pallets/unique/src/eth/stubs/CollectionHelpers.sol b/pallets/unique/src/eth/stubs/CollectionHelpers.sol index e46a36e37e..12a23169da 100644 --- a/pallets/unique/src/eth/stubs/CollectionHelpers.sol +++ b/pallets/unique/src/eth/stubs/CollectionHelpers.sol @@ -3,40 +3,40 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder contract Dummy { uint8 dummy; string stub_error = "this contract is implemented in native"; } contract ERC165 is Dummy { - function supportsInterface(bytes4 interfaceID) - external - view - returns (bool) - { + function supportsInterface(bytes4 interfaceID) external view returns (bool) { require(false, stub_error); interfaceID; return true; } } -// Inline +/// @dev inlined interface contract CollectionHelpersEvents { - event CollectionCreated( - address indexed owner, - address indexed collectionId - ); + event CollectionCreated(address indexed owner, address indexed collectionId); } -// Selector: 20947cd0 +/// @title Contract, which allows users to operate with collections +/// @dev the ERC-165 identifier for this interface is 0x58918631 contract CollectionHelpers is Dummy, ERC165, CollectionHelpersEvents { - // Selector: createNonfungibleCollection(string,string,string) e34a6844 - function createNonfungibleCollection( + /// Create an NFT collection + /// @param name Name of the collection + /// @param description Informative description of the collection + /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + /// @return address Address of the newly created collection + /// @dev EVM selector for this function is: 0x844af658, + /// or in textual repr: createNFTCollection(string,string,string) + function createNFTCollection( string memory name, string memory description, string memory tokenPrefix - ) public returns (address) { + ) public payable returns (address) { require(false, stub_error); name; description; @@ -45,15 +45,63 @@ contract CollectionHelpers is Dummy, ERC165, CollectionHelpersEvents { return 0x0000000000000000000000000000000000000000; } - // Selector: isCollectionExist(address) c3de1494 - function isCollectionExist(address collectionAddress) - public - view - returns (bool) - { + // /// Create an NFT collection + // /// @param name Name of the collection + // /// @param description Informative description of the collection + // /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + // /// @return address Address of the newly created collection + // /// @dev EVM selector for this function is: 0xe34a6844, + // /// or in textual repr: createNonfungibleCollection(string,string,string) + // function createNonfungibleCollection(string memory name, string memory description, string memory tokenPrefix) public payable returns (address) { + // require(false, stub_error); + // name; + // description; + // tokenPrefix; + // dummy = 0; + // return 0x0000000000000000000000000000000000000000; + // } + + /// @dev EVM selector for this function is: 0xab173450, + /// or in textual repr: createRFTCollection(string,string,string) + function createRFTCollection( + string memory name, + string memory description, + string memory tokenPrefix + ) public payable returns (address) { + require(false, stub_error); + name; + description; + tokenPrefix; + dummy = 0; + return 0x0000000000000000000000000000000000000000; + } + + /// @dev EVM selector for this function is: 0x85624258, + /// or in textual repr: makeCollectionERC721MetadataCompatible(address,string) + function makeCollectionERC721MetadataCompatible(address collection, string memory baseUri) public { + require(false, stub_error); + collection; + baseUri; + dummy = 0; + } + + /// Check if a collection exists + /// @param collectionAddress Address of the collection in question + /// @return bool Does the collection exist? + /// @dev EVM selector for this function is: 0xc3de1494, + /// or in textual repr: isCollectionExist(address) + function isCollectionExist(address collectionAddress) public view returns (bool) { require(false, stub_error); collectionAddress; dummy; return false; } + + /// @dev EVM selector for this function is: 0xd23a7ab1, + /// or in textual repr: collectionCreationFee() + function collectionCreationFee() public view returns (uint256) { + require(false, stub_error); + dummy; + return 0; + } } diff --git a/pallets/unique/src/lib.rs b/pallets/unique/src/lib.rs index 204e335b68..496aebb416 100644 --- a/pallets/unique/src/lib.rs +++ b/pallets/unique/src/lib.rs @@ -14,6 +14,55 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Unique Pallet +//! +//! A pallet governing Unique transactions. +//! +//! - [`Config`] +//! - [`Call`] +//! - [`Pallet`] +//! +//! ## Overview +//! +//! The Unique pallet's purpose is to be the primary interface between +//! external users and the inner structure of the Unique chains. +//! +//! It also contains an implementation of [`CollectionHelpers`][`eth`], +//! an Ethereum contract dealing with collection operations. +//! +//! ## Interface +//! +//! ### Dispatchables +//! +//! - `create_collection` - Create a collection of tokens. **Deprecated**, use `create_collection_ex`. +//! - `create_collection_ex` - Create a collection of tokens with explicit parameters. +//! - `destroy_collection` - Destroy a collection if no tokens exist within. +//! - `add_to_allow_list` - Add an address to allow list. +//! - `remove_from_allow_list` - Remove an address from allow list. +//! - `change_collection_owner` - Change the owner of the collection. +//! - `add_collection_admin` - Add an admin to a collection. +//! - `remove_collection_admin` - Remove admin of a collection. +//! - `set_collection_sponsor` - Invite a new collection sponsor. +//! - `confirm_sponsorship` - Confirm own sponsorship of a collection, becoming the sponsor. +//! - `remove_collection_sponsor` - Remove a sponsor from a collection. +//! - `create_item` - Create an item within a collection. +//! - `create_multiple_items` - Create multiple items within a collection. +//! - `set_collection_properties` - Add or change collection properties. +//! - `delete_collection_properties` - Delete specified collection properties. +//! - `set_token_properties` - Add or change token properties. +//! - `delete_token_properties` - Delete token properties. +//! - `set_token_property_permissions` - Add or change token property permissions of a collection. +//! - `create_multiple_items_ex` - Create multiple items within a collection with explicitly specified initial parameters. +//! - `set_transfers_enabled_flag` - Completely allow or disallow transfers for a particular collection. +//! - `burn_item` - Destroy an item. +//! - `burn_from` - Destroy an item on behalf of the owner as a non-owner account. +//! - `transfer` - Change ownership of the token. +//! - `transfer_from` - Change ownership of the token on behalf of the owner as a non-owner account. +//! - `approve` - Allow a non-permissioned address to transfer or burn an item. +//! - `set_collection_limits` - Set specific limits of a collection. +//! - `set_collection_permissions` - Set specific permissions of a collection. +//! - `repartition` - Re-partition a refungible token, while owning all of its parts. + #![recursion_limit = "1024"] #![cfg_attr(not(feature = "std"), no_std)] #![allow( @@ -29,13 +78,12 @@ use frame_support::{ dispatch::DispatchResult, ensure, fail, weights::{Weight}, - transactional, pallet_prelude::{DispatchResultWithPostInfo, ConstU32}, BoundedVec, }; use scale_info::TypeInfo; use frame_system::{self as system, ensure_signed}; -use sp_runtime::{sp_std::prelude::Vec}; +use sp_std::{vec, vec::Vec}; use up_data_structs::{ MAX_COLLECTION_NAME_LENGTH, MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, CreateItemData, CollectionLimits, CollectionPermissions, CollectionId, CollectionMode, TokenId, @@ -50,32 +98,39 @@ use pallet_common::{ pub mod eth; #[cfg(feature = "runtime-benchmarks")] -mod benchmarking; +pub mod benchmarking; pub mod weights; use weights::WeightInfo; -const NESTING_BUDGET: u32 = 5; +/// Maximum number of levels of depth in the token nesting tree. +pub const NESTING_BUDGET: u32 = 5; decl_error! { - /// Error for non-fungible-token module. + /// Errors for the common Unique transactions. pub enum Error for Module { - /// Decimal_points parameter must be lower than MAX_DECIMAL_POINTS constant, currently it is 30. + /// Decimal_points parameter must be lower than [`up_data_structs::MAX_DECIMAL_POINTS`]. CollectionDecimalPointLimitExceeded, /// This address is not set as sponsor, use setCollectionSponsor first. ConfirmUnsetSponsorFail, /// Length of items properties must be greater than 0. EmptyArgument, - /// Repertition is only supported by refungible collection + /// Repertition is only supported by refungible collection. RepartitionCalledOnNonRefungibleCollection, } } +/// Configuration trait of this pallet. pub trait Config: system::Config + pallet_common::Config + Sized + TypeInfo { - type Event: From> + Into<::Event>; + /// Overarching event type. + type RuntimeEvent: From> + Into<::RuntimeEvent>; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; + + /// Weight information for common pallet operations. type CommonWeightInfo: CommonWeightInfo; + + /// Weight info information for extra refungible pallet operations. type RefungibleExtensionsWeightInfo: RefungibleExtensionsWeightInfo; } @@ -88,80 +143,68 @@ decl_event! { /// Collection sponsor was removed /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. + /// * collection_id: ID of the affected collection. CollectionSponsorRemoved(CollectionId), /// Collection admin was added /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * admin: Admin address. + /// * collection_id: ID of the affected collection. + /// * admin: Admin address. CollectionAdminAdded(CollectionId, CrossAccountId), - /// Collection owned was change + /// Collection owned was changed /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * owner: New owner address. + /// * collection_id: ID of the affected collection. + /// * owner: New owner address. CollectionOwnedChanged(CollectionId, AccountId), /// Collection sponsor was set /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * owner: New sponsor address. + /// * collection_id: ID of the affected collection. + /// * owner: New sponsor address. CollectionSponsorSet(CollectionId, AccountId), /// New sponsor was confirm /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * sponsor: New sponsor address. + /// * collection_id: ID of the affected collection. + /// * sponsor: New sponsor address. SponsorshipConfirmed(CollectionId, AccountId), /// Collection admin was removed /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * admin: Admin address. + /// * collection_id: ID of the affected collection. + /// * admin: Removed admin address. CollectionAdminRemoved(CollectionId, CrossAccountId), - /// Address was remove from allow list + /// Address was removed from the allow list /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * user: Address. + /// * collection_id: ID of the affected collection. + /// * user: Address of the removed account. AllowListAddressRemoved(CollectionId, CrossAccountId), - /// Address was add to allow list + /// Address was added to the allow list /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. - /// - /// * user: Address. + /// * collection_id: ID of the affected collection. + /// * user: Address of the added account. AllowListAddressAdded(CollectionId, CrossAccountId), - /// Collection limits was set + /// Collection limits were set /// /// # Arguments - /// - /// * collection_id: Globally unique collection identifier. + /// * collection_id: ID of the affected collection. CollectionLimitSet(CollectionId), + /// Collection permissions were set + /// + /// # Arguments + /// * collection_id: ID of the affected collection. CollectionPermissionSet(CollectionId), } } @@ -198,7 +241,7 @@ decl_storage! { ChainVersion: u64; //#endregion - //#region Tokens transfer rate limit baskets + //#region Tokens transfer sponosoring rate limit baskets /// (Collection id (controlled?2), who created (real)) /// TODO: Off chain worker should remove from this map when collection gets removed pub CreateItemBasket get(fn create_item_basket): map hasher(blake2_128_concat) (CollectionId, T::AccountId) => Option; @@ -214,56 +257,67 @@ decl_storage! { /// Collection id (controlled?2), token id (controlled?2) #[deprecated] pub VariableMetaDataBasket get(fn variable_meta_data_basket): double_map hasher(blake2_128_concat) CollectionId, hasher(blake2_128_concat) TokenId => Option; + /// Last sponsoring of token property setting // todo:doc rephrase this and the following pub TokenPropertyBasket get(fn token_property_basket): double_map hasher(blake2_128_concat) CollectionId, hasher(blake2_128_concat) TokenId => Option; - /// Approval sponsoring + /// Last sponsoring of NFT approval in a collection pub NftApproveBasket get(fn nft_approve_basket): double_map hasher(blake2_128_concat) CollectionId, hasher(blake2_128_concat) TokenId => Option; + /// Last sponsoring of fungible tokens approval in a collection pub FungibleApproveBasket get(fn fungible_approve_basket): double_map hasher(blake2_128_concat) CollectionId, hasher(twox_64_concat) T::AccountId => Option; + /// Last sponsoring of RFT approval in a collection pub RefungibleApproveBasket get(fn refungible_approve_basket): nmap hasher(blake2_128_concat) CollectionId, hasher(blake2_128_concat) TokenId, hasher(twox_64_concat) T::AccountId => Option; } } decl_module! { + /// Type alias to Pallet, to be used by construct_runtime. pub struct Module for enum Call where - origin: T::Origin + origin: T::RuntimeOrigin { type Error = Error; - fn deposit_event() = default; + pub fn deposit_event() = default; fn on_initialize(_now: T::BlockNumber) -> Weight { - 0 + Weight::zero() } fn on_runtime_upgrade() -> Weight { - 0 + Weight::zero() } - /// This method creates a Collection of NFTs. Each Token may have multiple properties encoded as an array of bytes of certain length. The initial owner of the collection is set to the address that signed the transaction and can be changed later. + /// Create a collection of tokens. /// - /// # Permissions + /// Each Token may have multiple properties encoded as an array of bytes + /// of certain length. The initial owner of the collection is set + /// to the address that signed the transaction and can be changed later. /// - /// * Anyone. - /// - /// # Arguments + /// Prefer the more advanced [`create_collection_ex`][`Pallet::create_collection_ex`] instead. /// - /// * collection_name: UTF-16 string with collection name (limit 64 characters), will be stored as zero-terminated. + /// # Permissions /// - /// * collection_description: UTF-16 string with collection description (limit 256 characters), will be stored as zero-terminated. + /// * Anyone - becomes the owner of the new collection. /// - /// * token_prefix: UTF-8 string with token prefix. + /// # Arguments /// - /// * mode: [CollectionMode] collection type and type dependent data. + /// * `collection_name`: Wide-character string with collection name + /// (limit [`MAX_COLLECTION_NAME_LENGTH`]). + /// * `collection_description`: Wide-character string with collection description + /// (limit [`MAX_COLLECTION_DESCRIPTION_LENGTH`]). + /// * `token_prefix`: Byte string containing the token prefix to mark a collection + /// to which a token belongs (limit [`MAX_TOKEN_PREFIX_LENGTH`]). + /// * `mode`: Type of items stored in the collection and type dependent data. // returns collection ID #[weight = >::create_collection()] - #[transactional] - #[deprecated] - pub fn create_collection(origin, - collection_name: BoundedVec>, - collection_description: BoundedVec>, - token_prefix: BoundedVec>, - mode: CollectionMode) -> DispatchResult { + #[deprecated(note = "`create_collection_ex` is more up-to-date and advanced, prefer it instead")] + pub fn create_collection( + origin, + collection_name: BoundedVec>, + collection_description: BoundedVec>, + token_prefix: BoundedVec>, + mode: CollectionMode + ) -> DispatchResult { let data: CreateCollectionData = CreateCollectionData { name: collection_name, description: collection_description, @@ -274,32 +328,38 @@ decl_module! { Self::create_collection_ex(origin, data) } - /// This method creates a collection + /// Create a collection with explicit parameters. /// - /// Prefer it to deprecated [`created_collection`] method + /// Prefer it to the deprecated [`create_collection`][`Pallet::create_collection`] method. + /// + /// # Permissions + /// + /// * Anyone - becomes the owner of the new collection. + /// + /// # Arguments + /// + /// * `data`: Explicit data of a collection used for its creation. #[weight = >::create_collection()] - #[transactional] pub fn create_collection_ex(origin, data: CreateCollectionData) -> DispatchResult { let sender = ensure_signed(origin)?; // ========= - - T::CollectionDispatch::create(T::CrossAccountId::from_sub(sender), data)?; + let sender = T::CrossAccountId::from_sub(sender); + let _id = T::CollectionDispatch::create(sender.clone(), sender, data, Default::default())?; Ok(()) } - /// Destroys collection if no tokens within this collection + /// Destroy a collection if no tokens exist within. /// /// # Permissions /// - /// * Collection Owner. + /// * Collection owner /// /// # Arguments /// - /// * collection_id: collection to destroy. + /// * `collection_id`: Collection to destroy. #[weight = >::destroy_collection()] - #[transactional] pub fn destroy_collection(origin, collection_id: CollectionId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let collection = >::try_get(collection_id)?; @@ -309,13 +369,16 @@ decl_module! { T::CollectionDispatch::destroy(sender, collection)?; - >::remove_prefix(collection_id, None); - >::remove_prefix(collection_id, None); - >::remove_prefix((collection_id,), None); + // TODO: basket cleanup should be moved elsewhere + // Maybe runtime dispatch.rs should perform it? - >::remove_prefix(collection_id, None); - >::remove_prefix(collection_id, None); - >::remove_prefix((collection_id,), None); + let _ = >::clear_prefix(collection_id, u32::MAX, None); + let _ = >::clear_prefix(collection_id, u32::MAX, None); + let _ = >::clear_prefix((collection_id,), u32::MAX, None); + + let _ = >::clear_prefix(collection_id, u32::MAX, None); + let _ = >::clear_prefix(collection_id, u32::MAX, None); + let _ = >::clear_prefix((collection_id,), u32::MAX, None); Ok(()) } @@ -324,16 +387,14 @@ decl_module! { /// /// # Permissions /// - /// * Collection Owner - /// * Collection Admin + /// * Collection owner + /// * Collection admin /// /// # Arguments /// - /// * collection_id. - /// - /// * address. + /// * `collection_id`: ID of the modified collection. + /// * `address`: ID of the address to be added to the allowlist. #[weight = >::add_to_allow_list()] - #[transactional] pub fn add_to_allow_list(origin, collection_id: CollectionId, address: T::CrossAccountId) -> DispatchResult{ let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -359,16 +420,14 @@ decl_module! { /// /// # Permissions /// - /// * Collection Owner - /// * Collection Admin + /// * Collection owner + /// * Collection admin /// /// # Arguments /// - /// * collection_id. - /// - /// * address. + /// * `collection_id`: ID of the modified collection. + /// * `address`: ID of the address to be removed from the allowlist. #[weight = >::remove_from_allow_list()] - #[transactional] pub fn remove_from_allow_list(origin, collection_id: CollectionId, address: T::CrossAccountId) -> DispatchResult{ let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -394,15 +453,13 @@ decl_module! { /// /// # Permissions /// - /// * Collection Owner. + /// * Collection owner /// /// # Arguments /// - /// * collection_id. - /// - /// * new_owner. + /// * `collection_id`: ID of the modified collection. + /// * `new_owner`: ID of the account that will become the owner. #[weight = >::change_collection_owner()] - #[transactional] pub fn change_collection_owner(origin, collection_id: CollectionId, new_owner: T::AccountId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -420,21 +477,23 @@ decl_module! { target_collection.save() } - /// Adds an admin of the Collection. - /// NFT Collection can be controlled by multiple admin addresses (some which can also be servers, for example). Admins can issue and burn NFTs, as well as add and remove other admins, but cannot change NFT or Collection ownership. + /// Add an admin to a collection. + /// + /// NFT Collection can be controlled by multiple admin addresses + /// (some which can also be servers, for example). Admins can issue + /// and burn NFTs, as well as add and remove other admins, + /// but cannot change NFT or Collection ownership. /// /// # Permissions /// - /// * Collection Owner. - /// * Collection Admin. + /// * Collection owner + /// * Collection admin /// /// # Arguments /// - /// * collection_id: ID of the Collection to add admin for. - /// - /// * new_admin_id: Address of new admin to add. + /// * `collection_id`: ID of the Collection to add an admin for. + /// * `new_admin`: Address of new admin to add. #[weight = >::add_collection_admin()] - #[transactional] pub fn add_collection_admin(origin, collection_id: CollectionId, new_admin_id: T::CrossAccountId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let collection = >::try_get(collection_id)?; @@ -448,20 +507,21 @@ decl_module! { >::toggle_admin(&collection, &sender, &new_admin_id, true) } - /// Remove admin address of the Collection. An admin address can remove itself. List of admins may become empty, in which case only Collection Owner will be able to add an Admin. + /// Remove admin of a collection. + /// + /// An admin address can remove itself. List of admins may become empty, + /// in which case only Collection Owner will be able to add an Admin. /// /// # Permissions /// - /// * Collection Owner. - /// * Collection Admin. + /// * Collection owner + /// * Collection admin /// /// # Arguments /// - /// * collection_id: ID of the Collection to remove admin for. - /// - /// * account_id: Address of admin to remove. + /// * `collection_id`: ID of the collection to remove the admin for. + /// * `account_id`: Address of the admin to remove. #[weight = >::remove_collection_admin()] - #[transactional] pub fn remove_collection_admin(origin, collection_id: CollectionId, account_id: T::CrossAccountId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let collection = >::try_get(collection_id)?; @@ -475,17 +535,20 @@ decl_module! { >::toggle_admin(&collection, &sender, &account_id, false) } + /// Set (invite) a new collection sponsor. + /// + /// If successful, confirmation from the sponsor-to-be will be pending. + /// /// # Permissions /// - /// * Collection Owner + /// * Collection owner + /// * Collection admin /// /// # Arguments /// - /// * collection_id. - /// - /// * new_sponsor. + /// * `collection_id`: ID of the modified collection. + /// * `new_sponsor`: ID of the account of the sponsor-to-be. #[weight = >::set_collection_sponsor()] - #[transactional] pub fn set_collection_sponsor(origin, collection_id: CollectionId, new_sponsor: T::AccountId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -503,15 +566,20 @@ decl_module! { target_collection.save() } + /// Confirm own sponsorship of a collection, becoming the sponsor. + /// + /// An invitation must be pending, see [`set_collection_sponsor`][`Pallet::set_collection_sponsor`]. + /// Sponsor can pay the fees of a transaction instead of the sender, + /// but only within specified limits. + /// /// # Permissions /// - /// * Sponsor. + /// * Sponsor-to-be /// /// # Arguments /// - /// * collection_id. + /// * `collection_id`: ID of the collection with the pending sponsor. #[weight = >::confirm_sponsorship()] - #[transactional] pub fn confirm_sponsorship(origin, collection_id: CollectionId) -> DispatchResult { let sender = ensure_signed(origin)?; @@ -530,17 +598,16 @@ decl_module! { target_collection.save() } - /// Switch back to pay-per-own-transaction model. + /// Remove a collection's a sponsor, making everyone pay for their own transactions. /// /// # Permissions /// - /// * Collection owner. + /// * Collection owner /// /// # Arguments /// - /// * collection_id. + /// * `collection_id`: ID of the collection with the sponsor to remove. #[weight = >::remove_collection_sponsor()] - #[transactional] pub fn remove_collection_sponsor(origin, collection_id: CollectionId) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -556,26 +623,25 @@ decl_module! { target_collection.save() } - /// This method creates a concrete instance of NFT Collection created with CreateCollection method. + /// Mint an item within a collection. + /// + /// A collection must exist first, see [`create_collection_ex`][`Pallet::create_collection_ex`]. /// /// # Permissions /// - /// * Collection Owner. - /// * Collection Admin. + /// * Collection owner + /// * Collection admin /// * Anyone if /// * Allow List is enabled, and /// * Address is added to allow list, and - /// * MintPermission is enabled (see SetMintPermission method) + /// * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) /// /// # Arguments /// - /// * collection_id: ID of the collection. - /// - /// * owner: Address, initial owner of the NFT. - /// - /// * data: Token data to store on chain. + /// * `collection_id`: ID of the collection to which an item would belong. + /// * `owner`: Address of the initial owner of the item. + /// * `data`: Token data describing the item to store on chain. #[weight = T::CommonWeightInfo::create_item()] - #[transactional] pub fn create_item(origin, collection_id: CollectionId, owner: T::CrossAccountId, data: CreateItemData) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let budget = budget::Value::new(NESTING_BUDGET); @@ -583,26 +649,25 @@ decl_module! { dispatch_tx::(collection_id, |d| d.create_item(sender, owner, data, &budget)) } - /// This method creates multiple items in a collection created with CreateCollection method. + /// Create multiple items within a collection. + /// + /// A collection must exist first, see [`create_collection_ex`][`Pallet::create_collection_ex`]. /// /// # Permissions /// - /// * Collection Owner. - /// * Collection Admin. + /// * Collection owner + /// * Collection admin /// * Anyone if /// * Allow List is enabled, and - /// * Address is added to allow list, and - /// * MintPermission is enabled (see SetMintPermission method) + /// * Address is added to the allow list, and + /// * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) /// /// # Arguments /// - /// * collection_id: ID of the collection. - /// - /// * itemsData: Array items properties. Each property is an array of bytes itself, see [create_item]. - /// - /// * owner: Address, initial owner of the NFT. + /// * `collection_id`: ID of the collection to which the tokens would belong. + /// * `owner`: Address of the initial owner of the tokens. + /// * `items_data`: Vector of data describing each item to be created. #[weight = T::CommonWeightInfo::create_multiple_items(&items_data)] - #[transactional] pub fn create_multiple_items(origin, collection_id: CollectionId, owner: T::CrossAccountId, items_data: Vec) -> DispatchResultWithPostInfo { ensure!(!items_data.is_empty(), Error::::EmptyArgument); let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -611,8 +676,19 @@ decl_module! { dispatch_tx::(collection_id, |d| d.create_multiple_items(sender, owner, items_data, &budget)) } + /// Add or change collection properties. + /// + /// # Permissions + /// + /// * Collection owner + /// * Collection admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + /// * `properties`: Vector of key-value pairs stored as the collection's metadata. + /// Keys support Latin letters, `-`, `_`, and `.` as symbols. #[weight = T::CommonWeightInfo::set_collection_properties(properties.len() as u32)] - #[transactional] pub fn set_collection_properties( origin, collection_id: CollectionId, @@ -625,8 +701,19 @@ decl_module! { dispatch_tx::(collection_id, |d| d.set_collection_properties(sender, properties)) } + /// Delete specified collection properties. + /// + /// # Permissions + /// + /// * Collection Owner + /// * Collection Admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + /// * `property_keys`: Vector of keys of the properties to be deleted. + /// Keys support Latin letters, `-`, `_`, and `.` as symbols. #[weight = T::CommonWeightInfo::delete_collection_properties(property_keys.len() as u32)] - #[transactional] pub fn delete_collection_properties( origin, collection_id: CollectionId, @@ -639,8 +726,25 @@ decl_module! { dispatch_tx::(collection_id, |d| d.delete_collection_properties(&sender, property_keys)) } + /// Add or change token properties according to collection's permissions. + /// Currently properties only work with NFTs. + /// + /// # Permissions + /// + /// * Depends on collection's token property permissions and specified property mutability: + /// * Collection owner + /// * Collection admin + /// * Token owner + /// + /// See [`set_token_property_permissions`][`Pallet::set_token_property_permissions`]. + /// + /// # Arguments + /// + /// * `collection_id: ID of the collection to which the token belongs. + /// * `token_id`: ID of the modified token. + /// * `properties`: Vector of key-value pairs stored as the token's metadata. + /// Keys support Latin letters, `-`, `_`, and `.` as symbols. #[weight = T::CommonWeightInfo::set_token_properties(properties.len() as u32)] - #[transactional] pub fn set_token_properties( origin, collection_id: CollectionId, @@ -655,8 +759,22 @@ decl_module! { dispatch_tx::(collection_id, |d| d.set_token_properties(sender, token_id, properties, &budget)) } + /// Delete specified token properties. Currently properties only work with NFTs. + /// + /// # Permissions + /// + /// * Depends on collection's token property permissions and specified property mutability: + /// * Collection owner + /// * Collection admin + /// * Token owner + /// + /// # Arguments + /// + /// * `collection_id`: ID of the collection to which the token belongs. + /// * `token_id`: ID of the modified token. + /// * `property_keys`: Vector of keys of the properties to be deleted. + /// Keys support Latin letters, `-`, `_`, and `.` as symbols. #[weight = T::CommonWeightInfo::delete_token_properties(property_keys.len() as u32)] - #[transactional] pub fn delete_token_properties( origin, collection_id: CollectionId, @@ -671,8 +789,22 @@ decl_module! { dispatch_tx::(collection_id, |d| d.delete_token_properties(sender, token_id, property_keys, &budget)) } + /// Add or change token property permissions of a collection. + /// + /// Without a permission for a particular key, a property with that key + /// cannot be created in a token. + /// + /// # Permissions + /// + /// * Collection owner + /// * Collection admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + /// * `property_permissions`: Vector of permissions for property keys. + /// Keys support Latin letters, `-`, `_`, and `.` as symbols. #[weight = T::CommonWeightInfo::set_token_property_permissions(property_permissions.len() as u32)] - #[transactional] pub fn set_token_property_permissions( origin, collection_id: CollectionId, @@ -685,8 +817,22 @@ decl_module! { dispatch_tx::(collection_id, |d| d.set_token_property_permissions(&sender, property_permissions)) } + /// Create multiple items within a collection with explicitly specified initial parameters. + /// + /// # Permissions + /// + /// * Collection owner + /// * Collection admin + /// * Anyone if + /// * Allow List is enabled, and + /// * Address is added to allow list, and + /// * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) + /// + /// # Arguments + /// + /// * `collection_id`: ID of the collection to which the tokens would belong. + /// * `data`: Explicit item creation data. #[weight = T::CommonWeightInfo::create_multiple_items_ex(&data)] - #[transactional] pub fn create_multiple_items_ex(origin, collection_id: CollectionId, data: CreateItemExData) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let budget = budget::Value::new(NESTING_BUDGET); @@ -694,19 +840,17 @@ decl_module! { dispatch_tx::(collection_id, |d| d.create_multiple_items_ex(sender, data, &budget)) } - /// Set transfers_enabled value for particular collection + /// Completely allow or disallow transfers for a particular collection. /// /// # Permissions /// - /// * Collection Owner. + /// * Collection owner /// /// # Arguments /// - /// * collection_id: ID of the collection. - /// - /// * value: New flag value. + /// * `collection_id`: ID of the collection. + /// * `value`: New value of the flag, are transfers allowed? #[weight = >::set_transfers_enabled_flag()] - #[transactional] pub fn set_transfers_enabled_flag(origin, collection_id: CollectionId, value: bool) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let mut target_collection = >::try_get(collection_id)?; @@ -719,21 +863,23 @@ decl_module! { target_collection.save() } - /// Destroys a concrete instance of NFT. + /// Destroy an item. /// /// # Permissions /// - /// * Collection Owner. - /// * Collection Admin. - /// * Current NFT Owner. + /// * Collection owner + /// * Collection admin + /// * Current item owner /// /// # Arguments /// - /// * collection_id: ID of the collection. - /// - /// * item_id: ID of NFT to burn. + /// * `collection_id`: ID of the collection to which the item belongs. + /// * `item_id`: ID of item to burn. + /// * `value`: Number of pieces of the item to destroy. + /// * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + /// * Fungible Mode: The desired number of pieces to burn. + /// * Re-Fungible Mode: The desired number of pieces to burn. #[weight = T::CommonWeightInfo::burn_item()] - #[transactional] pub fn burn_item(origin, collection_id: CollectionId, item_id: TokenId, value: u128) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); @@ -748,24 +894,30 @@ decl_module! { Ok(post_info) } - /// Destroys a concrete instance of NFT on behalf of the owner - /// See also: [`approve`] + /// Destroy a token on behalf of the owner as a non-owner account. /// - /// # Permissions + /// See also: [`approve`][`Pallet::approve`]. /// - /// * Collection Owner. - /// * Collection Admin. - /// * Current NFT Owner. + /// After this method executes, one approval is removed from the total so that + /// the approved address will not be able to transfer this item again from this owner. /// - /// # Arguments + /// # Permissions /// - /// * collection_id: ID of the collection. + /// * Collection owner + /// * Collection admin + /// * Current token owner + /// * Address approved by current item owner /// - /// * item_id: ID of NFT to burn. + /// # Arguments /// - /// * from: owner of item + /// * `from`: The owner of the burning item. + /// * `collection_id`: ID of the collection to which the item belongs. + /// * `item_id`: ID of item to burn. + /// * `value`: Number of pieces to burn. + /// * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + /// * Fungible Mode: The desired number of pieces to burn. + /// * Re-Fungible Mode: The desired number of pieces to burn. #[weight = T::CommonWeightInfo::burn_from()] - #[transactional] pub fn burn_from(origin, collection_id: CollectionId, from: T::CrossAccountId, item_id: TokenId, value: u128) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let budget = budget::Value::new(NESTING_BUDGET); @@ -777,27 +929,24 @@ decl_module! { /// /// # Permissions /// - /// * Collection Owner - /// * Collection Admin - /// * Current NFT owner + /// * Collection owner + /// * Collection admin + /// * Current token owner /// /// # Arguments /// - /// * recipient: Address of token recipient. - /// - /// * collection_id. - /// - /// * item_id: ID of the item + /// * `recipient`: Address of token recipient. + /// * `collection_id`: ID of the collection the item belongs to. + /// * `item_id`: ID of the item. /// * Non-Fungible Mode: Required. /// * Fungible Mode: Ignored. /// * Re-Fungible Mode: Required. /// - /// * value: Amount to transfer. - /// * Non-Fungible Mode: Ignored - /// * Fungible Mode: Must specify transferred amount - /// * Re-Fungible Mode: Must specify transferred portion (between 0 and 1) + /// * `value`: Amount to transfer. + /// * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + /// * Fungible Mode: The desired number of pieces to transfer. + /// * Re-Fungible Mode: The desired number of pieces to transfer. #[weight = T::CommonWeightInfo::transfer()] - #[transactional] pub fn transfer(origin, recipient: T::CrossAccountId, collection_id: CollectionId, item_id: TokenId, value: u128) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let budget = budget::Value::new(NESTING_BUDGET); @@ -805,50 +954,53 @@ decl_module! { dispatch_tx::(collection_id, |d| d.transfer(sender, recipient, item_id, value, &budget)) } - /// Set, change, or remove approved address to transfer the ownership of the NFT. + /// Allow a non-permissioned address to transfer or burn an item. /// /// # Permissions /// - /// * Collection Owner - /// * Collection Admin - /// * Current NFT owner + /// * Collection owner + /// * Collection admin + /// * Current item owner /// /// # Arguments /// - /// * approved: Address that is approved to transfer this NFT or zero (if needed to remove approval). - /// - /// * collection_id. - /// - /// * item_id: ID of the item. + /// * `spender`: Account to be approved to make specific transactions on non-owned tokens. + /// * `collection_id`: ID of the collection the item belongs to. + /// * `item_id`: ID of the item transactions on which are now approved. + /// * `amount`: Number of pieces of the item approved for a transaction (maximum of 1 for NFTs). + /// Set to 0 to revoke the approval. #[weight = T::CommonWeightInfo::approve()] - #[transactional] pub fn approve(origin, spender: T::CrossAccountId, collection_id: CollectionId, item_id: TokenId, amount: u128) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); dispatch_tx::(collection_id, |d| d.approve(sender, spender, item_id, amount)) } - /// Change ownership of a NFT on behalf of the owner. See Approve method for additional information. After this method executes, the approval is removed so that the approved address will not be able to transfer this NFT again from this owner. + /// Change ownership of an item on behalf of the owner as a non-owner account. /// - /// # Permissions - /// * Collection Owner - /// * Collection Admin - /// * Current NFT owner - /// * Address approved by current NFT owner + /// See the [`approve`][`Pallet::approve`] method for additional information. /// - /// # Arguments + /// After this method executes, one approval is removed from the total so that + /// the approved address will not be able to transfer this item again from this owner. /// - /// * from: Address that owns token. - /// - /// * recipient: Address of token recipient. + /// # Permissions /// - /// * collection_id. + /// * Collection owner + /// * Collection admin + /// * Current item owner + /// * Address approved by current item owner /// - /// * item_id: ID of the item. + /// # Arguments /// - /// * value: Amount to transfer. + /// * `from`: Address that currently owns the token. + /// * `recipient`: Address of the new token-owner-to-be. + /// * `collection_id`: ID of the collection the item. + /// * `item_id`: ID of the item to be transferred. + /// * `value`: Amount to transfer. + /// * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + /// * Fungible Mode: The desired number of pieces to transfer. + /// * Re-Fungible Mode: The desired number of pieces to transfer. #[weight = T::CommonWeightInfo::transfer_from()] - #[transactional] pub fn transfer_from(origin, from: T::CrossAccountId, recipient: T::CrossAccountId, collection_id: CollectionId, item_id: TokenId, value: u128 ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let budget = budget::Value::new(NESTING_BUDGET); @@ -856,8 +1008,19 @@ decl_module! { dispatch_tx::(collection_id, |d| d.transfer_from(sender, from, recipient, item_id, value, &budget)) } + /// Set specific limits of a collection. Empty, or None fields mean chain default. + /// + /// # Permissions + /// + /// * Collection owner + /// * Collection admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + /// * `new_limit`: New limits of the collection. Fields that are not set (None) + /// will not overwrite the old ones. #[weight = >::set_collection_limits()] - #[transactional] pub fn set_collection_limits( origin, collection_id: CollectionId, @@ -878,12 +1041,23 @@ decl_module! { target_collection.save() } + /// Set specific permissions of a collection. Empty, or None fields mean chain default. + /// + /// # Permissions + /// + /// * Collection owner + /// * Collection admin + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + /// * `new_permission`: New permissions of the collection. Fields that are not set (None) + /// will not overwrite the old ones. #[weight = >::set_collection_limits()] - #[transactional] pub fn set_collection_permissions( origin, collection_id: CollectionId, - new_limit: CollectionPermissions, + new_permission: CollectionPermissions, ) -> DispatchResult { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); let mut target_collection = >::try_get(collection_id)?; @@ -891,7 +1065,7 @@ decl_module! { target_collection.check_is_owner_or_admin(&sender)?; let old_limit = &target_collection.permissions; - target_collection.permissions = >::clamp_permissions(target_collection.mode.clone(), &old_limit, new_limit)?; + target_collection.permissions = >::clamp_permissions(target_collection.mode.clone(), &old_limit, new_permission)?; >::deposit_event(Event::::CollectionPermissionSet( collection_id @@ -900,18 +1074,28 @@ decl_module! { target_collection.save() } + /// Re-partition a refungible token, while owning all of its parts/pieces. + /// + /// # Permissions + /// + /// * Token owner (must own every part) + /// + /// # Arguments + /// + /// * `collection_id`: ID of the collection the RFT belongs to. + /// * `token_id`: ID of the RFT. + /// * `amount`: New number of parts/pieces into which the token shall be partitioned. #[weight = T::RefungibleExtensionsWeightInfo::repartition()] - #[transactional] pub fn repartition( origin, collection_id: CollectionId, - token: TokenId, + token_id: TokenId, amount: u128, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); dispatch_tx::(collection_id, |d| { if let Some(refungible_extensions) = d.refungible_extensions() { - refungible_extensions.repartition(&sender, token, amount) + refungible_extensions.repartition(&sender, token_id, amount) } else { fail!(>::RepartitionCalledOnNonRefungibleCollection) } @@ -919,3 +1103,52 @@ decl_module! { } } } + +impl Pallet { + /// Force set `sponsor` for `collection`. + /// + /// Differs from [`set_collection_sponsor`][`Pallet::set_collection_sponsor`] in that confirmation + /// from the `sponsor` is not required. + /// + /// # Arguments + /// + /// * `sponsor`: ID of the account of the sponsor-to-be. + /// * `collection_id`: ID of the modified collection. + pub fn force_set_sponsor(sponsor: T::AccountId, collection_id: CollectionId) -> DispatchResult { + let mut target_collection = >::try_get(collection_id)?; + target_collection.check_is_internal()?; + target_collection.set_sponsor(sponsor.clone())?; + + Self::deposit_event(Event::::CollectionSponsorSet( + collection_id, + sponsor.clone(), + )); + + ensure!( + target_collection.confirm_sponsorship(&sponsor)?, + Error::::ConfirmUnsetSponsorFail + ); + + Self::deposit_event(Event::::SponsorshipConfirmed(collection_id, sponsor)); + + target_collection.save() + } + + /// Force remove `sponsor` for `collection`. + /// + /// Differs from `remove_sponsor` in that + /// it doesn't require consent from the `owner` of the collection. + /// + /// # Arguments + /// + /// * `collection_id`: ID of the modified collection. + pub fn force_remove_collection_sponsor(collection_id: CollectionId) -> DispatchResult { + let mut target_collection = >::try_get(collection_id)?; + target_collection.check_is_internal()?; + target_collection.sponsorship = SponsorshipState::Disabled; + + Self::deposit_event(Event::::CollectionSponsorRemoved(collection_id)); + + target_collection.save() + } +} diff --git a/pallets/unique/src/weights.rs b/pallets/unique/src/weights.rs index f13ae907fd..5a141355e2 100644 --- a/pallets/unique/src/weights.rs +++ b/pallets/unique/src/weights.rs @@ -3,7 +3,7 @@ //! Autogenerated weights for pallet_unique //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2022-06-28, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2022-08-15, STEPS: `50`, REPEAT: 80, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 // Executed Command: @@ -57,9 +57,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Common CollectionProperties (r:0 w:1) // Storage: Common CollectionById (r:0 w:1) fn create_collection() -> Weight { - (53_511_000 as Weight) - .saturating_add(T::DbWeight::get().reads(4 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(43_143_000) + .saturating_add(T::DbWeight::get().reads(4 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Common CollectionById (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:0) @@ -69,75 +69,75 @@ impl WeightInfo for SubstrateWeight { // Storage: Common AdminAmount (r:0 w:1) // Storage: Common CollectionProperties (r:0 w:1) fn destroy_collection() -> Weight { - (69_481_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(50_188_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(6 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common Allowlist (r:0 w:1) fn add_to_allow_list() -> Weight { - (22_892_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_238_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common Allowlist (r:0 w:1) fn remove_from_allow_list() -> Weight { - (22_973_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_084_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn change_collection_owner() -> Weight { - (22_392_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_265_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common IsAdmin (r:1 w:1) // Storage: Common AdminAmount (r:1 w:1) fn add_collection_admin() -> Weight { - (30_298_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(23_558_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common IsAdmin (r:1 w:1) // Storage: Common AdminAmount (r:1 w:1) fn remove_collection_admin() -> Weight { - (32_842_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(25_285_000) + .saturating_add(T::DbWeight::get().reads(3 as u64)) + .saturating_add(T::DbWeight::get().writes(2 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_collection_sponsor() -> Weight { - (22_613_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_885_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn confirm_sponsorship() -> Weight { - (22_462_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_897_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn remove_collection_sponsor() -> Weight { - (21_730_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_836_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_transfers_enabled_flag() -> Weight { - (10_941_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(9_714_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_collection_limits() -> Weight { - (22_363_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_166_000) + .saturating_add(T::DbWeight::get().reads(1 as u64)) + .saturating_add(T::DbWeight::get().writes(1 as u64)) } } @@ -150,9 +150,9 @@ impl WeightInfo for () { // Storage: Common CollectionProperties (r:0 w:1) // Storage: Common CollectionById (r:0 w:1) fn create_collection() -> Weight { - (53_511_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(4 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(43_143_000) + .saturating_add(RocksDbWeight::get().reads(4 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Common CollectionById (r:1 w:1) // Storage: Nonfungible TokenData (r:1 w:0) @@ -162,74 +162,74 @@ impl WeightInfo for () { // Storage: Common AdminAmount (r:0 w:1) // Storage: Common CollectionProperties (r:0 w:1) fn destroy_collection() -> Weight { - (69_481_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + Weight::from_ref_time(50_188_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(6 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common Allowlist (r:0 w:1) fn add_to_allow_list() -> Weight { - (22_892_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_238_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common Allowlist (r:0 w:1) fn remove_from_allow_list() -> Weight { - (22_973_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_084_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn change_collection_owner() -> Weight { - (22_392_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_265_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common IsAdmin (r:1 w:1) // Storage: Common AdminAmount (r:1 w:1) fn add_collection_admin() -> Weight { - (30_298_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(23_558_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Common CollectionById (r:1 w:0) // Storage: Common IsAdmin (r:1 w:1) // Storage: Common AdminAmount (r:1 w:1) fn remove_collection_admin() -> Weight { - (32_842_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + Weight::from_ref_time(25_285_000) + .saturating_add(RocksDbWeight::get().reads(3 as u64)) + .saturating_add(RocksDbWeight::get().writes(2 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_collection_sponsor() -> Weight { - (22_613_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_885_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn confirm_sponsorship() -> Weight { - (22_462_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_897_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn remove_collection_sponsor() -> Weight { - (21_730_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(17_836_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_transfers_enabled_flag() -> Weight { - (10_941_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(9_714_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } // Storage: Common CollectionById (r:1 w:1) fn set_collection_limits() -> Weight { - (22_363_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + Weight::from_ref_time(18_166_000) + .saturating_add(RocksDbWeight::get().reads(1 as u64)) + .saturating_add(RocksDbWeight::get().writes(1 as u64)) } } diff --git a/primitives/app_promotion_rpc/CHANGELOG.md b/primitives/app_promotion_rpc/CHANGELOG.md new file mode 100644 index 0000000000..7c3c8c0c96 --- /dev/null +++ b/primitives/app_promotion_rpc/CHANGELOG.md @@ -0,0 +1,5 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + diff --git a/primitives/app_promotion_rpc/Cargo.toml b/primitives/app_promotion_rpc/Cargo.toml new file mode 100644 index 0000000000..13189ea573 --- /dev/null +++ b/primitives/app_promotion_rpc/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "app-promotion-rpc" +version = "0.1.0" +license = "GPLv3" +edition = "2021" + +[dependencies] +pallet-common = { default-features = false, path = '../../pallets/common' } +up-data-structs = { default-features = false, path = '../data-structs' } +codec = { package = "parity-scale-codec", version = "3.1.2", default-features = false, features = [ + "derive", +] } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } + +[features] +default = ["std"] +std = [ + "codec/std", + "sp-core/std", + "sp-std/std", + "sp-api/std", + "sp-runtime/std", + "pallet-common/std", + "up-data-structs/std", +] diff --git a/primitives/app_promotion_rpc/src/lib.rs b/primitives/app_promotion_rpc/src/lib.rs new file mode 100644 index 0000000000..e381ed36d4 --- /dev/null +++ b/primitives/app_promotion_rpc/src/lib.rs @@ -0,0 +1,41 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#![cfg_attr(not(feature = "std"), no_std)] + +use sp_std::vec::Vec; +use codec::Decode; +use sp_runtime::{ + DispatchError, + traits::{AtLeast32BitUnsigned, Member}, +}; + +type Result = core::result::Result; + +sp_api::decl_runtime_apis! { + #[api_version(2)] + /// Trait for generate rpc. + pub trait AppPromotionApi where + BlockNumber: Decode + Member + AtLeast32BitUnsigned, + AccountId: Decode, + CrossAccountId: pallet_evm::account::CrossAccountId, + { + fn total_staked(staker: Option) -> Result; + fn total_staked_per_block(staker: CrossAccountId) -> Result>; + fn pending_unstake(staker: Option) -> Result; + fn pending_unstake_per_block(staker: CrossAccountId) -> Result>; + } +} diff --git a/primitives/common/CHANGELOG.md b/primitives/common/CHANGELOG.md new file mode 100644 index 0000000000..44524383d6 --- /dev/null +++ b/primitives/common/CHANGELOG.md @@ -0,0 +1,13 @@ + +## [v0.9.27] 2022-09-08 + +### Added +- Relay block constants. In particular, it is necessary to add the `AppPromotion` pallet at runtime. + +## [v0.9.25] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade up-common to polkadot-v0.9.26 3df3531cadd6d2ed23afe838d3a71321b0f12c2e \ No newline at end of file diff --git a/primitives/common/Cargo.toml b/primitives/common/Cargo.toml new file mode 100644 index 0000000000..08dcd7ab3a --- /dev/null +++ b/primitives/common/Cargo.toml @@ -0,0 +1,56 @@ +[package] +authors = ['Unique Network '] +description = 'Unique Runtime Common Primitives' +edition = '2021' +homepage = 'https://unique.network' +license = 'All Rights Reserved' +name = 'up-common' +repository = 'https://github.com/UniqueNetwork/unique-chain' +version = "0.9.30" + +[features] +default = ['std'] +std = [ + 'sp-std/std', + 'frame-support/std', + 'sp-runtime/std', + 'sp-core/std', + 'sp-consensus-aura/std', + 'fp-rpc/std', + 'pallet-evm/std', +] + +[dependencies.sp-std] +default-features = false +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" + +[dependencies.frame-support] +default-features = false +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" + +[dependencies.sp-runtime] +default-features = false +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" + +[dependencies.sp-core] +default-features = false +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" + +[dependencies.sp-consensus-aura] +default-features = false +git = "https://github.com/paritytech/substrate" +branch = "polkadot-v0.9.30" + +[dependencies.fp-rpc] +default-features = false +git = "https://github.com/uniquenetwork/frontier" +branch = "unique-polkadot-v0.9.30" + +[dependencies.pallet-evm] +default-features = false +git = "https://github.com/uniquenetwork/frontier" +branch = "unique-polkadot-v0.9.30" diff --git a/runtime/common/src/constants.rs b/primitives/common/src/constants.rs similarity index 78% rename from runtime/common/src/constants.rs rename to primitives/common/src/constants.rs index e1241c5dc4..5ff5c661e9 100644 --- a/runtime/common/src/constants.rs +++ b/primitives/common/src/constants.rs @@ -22,6 +22,7 @@ use frame_support::{ use crate::types::{BlockNumber, Balance}; pub const MILLISECS_PER_BLOCK: u64 = 12000; +pub const MILLISECS_PER_RELAY_BLOCK: u64 = 6000; pub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK; @@ -30,16 +31,21 @@ pub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber); pub const HOURS: BlockNumber = MINUTES * 60; pub const DAYS: BlockNumber = HOURS * 24; +// These time units are defined in number of relay blocks. +pub const RELAY_MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_RELAY_BLOCK as BlockNumber); +pub const RELAY_HOURS: BlockNumber = RELAY_MINUTES * 60; +pub const RELAY_DAYS: BlockNumber = RELAY_HOURS * 24; + pub const MICROUNIQUE: Balance = 1_000_000_000_000; pub const MILLIUNIQUE: Balance = 1_000 * MICROUNIQUE; pub const CENTIUNIQUE: Balance = 10 * MILLIUNIQUE; pub const UNIQUE: Balance = 100 * CENTIUNIQUE; // Targeting 0.1 UNQ per transfer -pub const WEIGHT_TO_FEE_COEFF: u32 = 207_890_902; +pub const WEIGHT_TO_FEE_COEFF: u32 = /**/207_163_598/**/; // Targeting 0.15 UNQ per transfer via ETH -pub const MIN_GAS_PRICE: u64 = 1_019_493_469_850; +pub const MIN_GAS_PRICE: u64 = /**/1_019_483_274_941/**/; /// We assume that ~10% of the block weight is consumed by `on_initalize` handlers. /// This is used to limit the maximal weight of a single extrinsic. @@ -48,10 +54,8 @@ pub const AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(10); /// by Operational extrinsics. pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75); /// We allow for 2 seconds of compute with a 6 second average block time. -pub const MAXIMUM_BLOCK_WEIGHT: Weight = WEIGHT_PER_SECOND / 2; +pub const MAXIMUM_BLOCK_WEIGHT: Weight = WEIGHT_PER_SECOND.saturating_div(2); parameter_types! { - pub const DefaultSponsoringRateLimit: BlockNumber = 1 * DAYS; - pub const TransactionByteFee: Balance = 501 * MICROUNIQUE; } diff --git a/runtime/common/src/lib.rs b/primitives/common/src/lib.rs similarity index 89% rename from runtime/common/src/lib.rs rename to primitives/common/src/lib.rs index dfdc9f5562..0ddacf9643 100644 --- a/runtime/common/src/lib.rs +++ b/primitives/common/src/lib.rs @@ -17,9 +17,4 @@ #![cfg_attr(not(feature = "std"), no_std)] pub mod constants; -pub mod dispatch; -pub mod eth_sponsoring; -pub mod runtime_apis; -pub mod sponsoring; pub mod types; -pub mod weights; diff --git a/runtime/common/src/types.rs b/primitives/common/src/types.rs similarity index 60% rename from runtime/common/src/types.rs rename to primitives/common/src/types.rs index f3241515ac..93bfebed0a 100644 --- a/runtime/common/src/types.rs +++ b/primitives/common/src/types.rs @@ -15,17 +15,40 @@ // along with Unique Network. If not, see . use sp_runtime::{ - traits::{Verify, IdentifyAccount, BlakeTwo256}, - generic, MultiSignature, + generic, + traits::{Verify, IdentifyAccount}, + MultiSignature, }; -pub use sp_runtime::OpaqueExtrinsic as UncheckedExtrinsic; +/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know +/// the specifics of the runtime. They can then be made to be agnostic over specific formats +/// of data like extrinsics, allowing for them to continue syncing the network through upgrades +/// to even the core data structures. +pub mod opaque { + pub use sp_runtime::{generic, traits::BlakeTwo256, OpaqueExtrinsic as UncheckedExtrinsic}; -/// Opaque block header type. -pub type Header = generic::Header; + pub use super::{BlockNumber, Signature, AccountId, Balance, Index, Hash, AuraId}; -/// Opaque block type. -pub type Block = generic::Block; + /// Opaque block header type. + pub type Header = generic::Header; + + /// Opaque block type. + pub type Block = generic::Block; + + pub trait RuntimeInstance { + type CrossAccountId: pallet_evm::account::CrossAccountId + + Send + + Sync + + 'static; + + type TransactionConverter: fp_rpc::ConvertTransaction + + Send + + Sync + + 'static; + + fn get_transaction_converter() -> Self::TransactionConverter; + } +} pub type SessionHandlers = (); @@ -56,17 +79,3 @@ pub type Hash = sp_core::H256; pub type DigestItem = generic::DigestItem; pub use sp_consensus_aura::sr25519::AuthorityId as AuraId; - -pub trait RuntimeInstance { - type CrossAccountId: pallet_evm::account::CrossAccountId - + Send - + Sync - + 'static; - - type TransactionConverter: fp_rpc::ConvertTransaction - + Send - + Sync - + 'static; - - fn get_transaction_converter() -> Self::TransactionConverter; -} diff --git a/primitives/data-structs/CHANGELOG.md b/primitives/data-structs/CHANGELOG.md new file mode 100644 index 0000000000..03bded8ed1 --- /dev/null +++ b/primitives/data-structs/CHANGELOG.md @@ -0,0 +1,39 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + +## [v0.2.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [v0.2.1] 2022-08-04 + +### Product changes + +- Now RefungibleMultipleItems may only receive single user on type level. + +### Other changes + +- refactor: Disallow invalid bulk mints 53fec71cf728dddd012257b407ea30441e699f88 + +`create_multiple_items_ex` was allowing invalid (that will be always +rejected at runtime level) refungible mint extrinsics, by passing +multiple users into `RefungibleMultipleItems` call. + +## [v0.2.0] - 2022-08-01 +### Deprecated +- `CreateReFungibleData::const_data` + +## [v0.1.2] - 2022-07-25 +### Added +- Type aliases `CollectionName`, `CollectionDescription`, `CollectionTokenPrefix` +## [v0.1.1] - 2022-07-22 +### Added +- Аields with properties to `CreateReFungibleData` and `CreateRefungibleExData`. \ No newline at end of file diff --git a/primitives/data-structs/Cargo.toml b/primitives/data-structs/Cargo.toml index a63ba3d076..5c01ad43fe 100644 --- a/primitives/data-structs/Cargo.toml +++ b/primitives/data-structs/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" license = 'GPLv3' homepage = "https://unique.network" repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.1.0' +version = "0.2.2" [dependencies] scale-info = { version = "2.0.1", default-features = false, features = [ @@ -18,15 +18,16 @@ codec = { package = "parity-scale-codec", version = "3.1.2", default-features = serde = { version = "1.0.130", features = [ 'derive', ], default-features = false, optional = true } -frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } +frame-support = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } derivative = { version = "2.2.0", features = ["use_core"] } struct-versioning = { path = "../../crates/struct-versioning" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } rmrk-traits = { default-features = false, path = "../rmrk-traits" } +bondrewd = { version = "0.1.14", features = ["derive"], default-features = false } [features] default = ["std"] diff --git a/primitives/data-structs/src/bondrewd_codec.rs b/primitives/data-structs/src/bondrewd_codec.rs new file mode 100644 index 0000000000..94bd4655e2 --- /dev/null +++ b/primitives/data-structs/src/bondrewd_codec.rs @@ -0,0 +1,31 @@ +//! Integration between bondrewd and parity scale codec +//! Maybe we can move it to scale-codec itself in future? + +#[macro_export] +macro_rules! bondrewd_codec { + ($T:ty) => { + impl Encode for $T { + fn encode_to(&self, dest: &mut O) { + dest.write(&self.into_bytes()) + } + } + impl codec::Decode for $T { + fn decode(from: &mut I) -> Result { + let mut bytes = [0; Self::BYTE_SIZE]; + from.read(&mut bytes)?; + Ok(Self::from_bytes(bytes)) + } + } + impl MaxEncodedLen for $T { + fn max_encoded_len() -> usize { + Self::BYTE_SIZE + } + } + impl TypeInfo for $T { + type Identity = [u8; Self::BYTE_SIZE]; + fn type_info() -> scale_info::Type { + <[u8; Self::BYTE_SIZE] as TypeInfo>::type_info() + } + } + }; +} diff --git a/primitives/data-structs/src/bounded.rs b/primitives/data-structs/src/bounded.rs index 244b383854..2049dea618 100644 --- a/primitives/data-structs/src/bounded.rs +++ b/primitives/data-structs/src/bounded.rs @@ -1,3 +1,21 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! This module contins implementations for support bounded structures ([`BoundedVec`], [`BoundedBTreeMap`], [`BoundedBTreeSet`]) in [`serde`]. + use core::fmt; use sp_std::collections::{btree_map::BTreeMap, btree_set::BTreeSet}; use sp_std::vec::Vec; @@ -7,7 +25,7 @@ use frame_support::{ storage::{bounded_btree_map::BoundedBTreeMap, bounded_btree_set::BoundedBTreeSet}, }; -/// BoundedVec doesn't supports serde +/// [`serde`] implementations for [`BoundedVec`]. #[cfg(feature = "serde1")] pub mod vec_serde { use core::convert::TryFrom; @@ -39,6 +57,7 @@ pub mod vec_serde { } } +/// Format [`BoundedVec`] for debug output. pub fn vec_debug(v: &BoundedVec, f: &mut fmt::Formatter) -> Result<(), fmt::Error> where V: fmt::Debug, @@ -49,6 +68,7 @@ where #[cfg(feature = "serde1")] #[allow(dead_code)] +/// [`serde`] implementations for [`BoundedBTreeMap`]. pub mod map_serde { use core::convert::TryFrom; use sp_std::collections::btree_map::BTreeMap; @@ -84,6 +104,7 @@ pub mod map_serde { } } +/// Format [`BoundedBTreeMap`] for debug output. pub fn map_debug( v: &BoundedBTreeMap, f: &mut fmt::Formatter, @@ -98,6 +119,7 @@ where #[cfg(feature = "serde1")] #[allow(dead_code)] +/// [`serde`] implementations for [`BoundedBTreeSet`]. pub mod set_serde { use core::convert::TryFrom; use sp_std::collections::btree_set::BTreeSet; @@ -129,6 +151,7 @@ pub mod set_serde { } } +/// Format [`BoundedBTreeSet`] for debug output. pub fn set_debug(v: &BoundedBTreeSet, f: &mut fmt::Formatter) -> Result<(), fmt::Error> where K: fmt::Debug + Ord, diff --git a/primitives/data-structs/src/lib.rs b/primitives/data-structs/src/lib.rs index 2e507606f9..6ea7ce09c7 100644 --- a/primitives/data-structs/src/lib.rs +++ b/primitives/data-structs/src/lib.rs @@ -14,6 +14,10 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +//! # Primitives crate. +//! +//! This crate contains types, traits and constants. + #![cfg_attr(not(feature = "std"), no_std)] use core::{ @@ -32,6 +36,7 @@ use serde::{Serialize, Deserialize}; use sp_core::U256; use sp_runtime::{ArithmeticError, sp_std::prelude::Vec, Permill}; use codec::{Decode, Encode, EncodeLike, MaxEncodedLen}; +use bondrewd::Bitfields; use frame_support::{BoundedVec, traits::ConstU32}; use derivative::Derivative; use scale_info::TypeInfo; @@ -50,43 +55,61 @@ pub use rmrk_traits::{ FixedPart as RmrkFixedPart, SlotPart as RmrkSlotPart, }; +mod bondrewd_codec; mod bounded; pub mod budget; pub mod mapping; mod migration; +/// Maximum of decimal points. pub const MAX_DECIMAL_POINTS: DecimalPoints = 30; + +/// Maximum pieces for refungible token. pub const MAX_REFUNGIBLE_PIECES: u128 = 1_000_000_000_000_000_000_000; pub const MAX_SPONSOR_TIMEOUT: u32 = 10_368_000; +/// Maximum tokens for user. pub const MAX_TOKEN_OWNERSHIP: u32 = if cfg!(not(feature = "limit-testing")) { 100_000 } else { 10 }; + +/// Maximum for collections can be created. pub const COLLECTION_NUMBER_LIMIT: u32 = if cfg!(not(feature = "limit-testing")) { 100_000 } else { 10 }; + +/// Maximum for various custom data of token. pub const CUSTOM_DATA_LIMIT: u32 = if cfg!(not(feature = "limit-testing")) { 2048 } else { 10 }; + +/// Maximum admins per collection. pub const COLLECTION_ADMINS_LIMIT: u32 = 5; + +/// Maximum tokens per collection. pub const COLLECTION_TOKEN_LIMIT: u32 = u32::MAX; + +/// Maximum tokens per account. pub const ACCOUNT_TOKEN_OWNERSHIP_LIMIT: u32 = if cfg!(not(feature = "limit-testing")) { 1_000_000 } else { 10 }; -// Timeouts for item types in passed blocks +/// Default timeout for transfer sponsoring NFT item. pub const NFT_SPONSOR_TRANSFER_TIMEOUT: u32 = 5; +/// Default timeout for transfer sponsoring fungible item. pub const FUNGIBLE_SPONSOR_TRANSFER_TIMEOUT: u32 = 5; +/// Default timeout for transfer sponsoring refungible item. pub const REFUNGIBLE_SPONSOR_TRANSFER_TIMEOUT: u32 = 5; +/// Default timeout for sponsored approving. pub const SPONSOR_APPROVE_TIMEOUT: u32 = 5; // Schema limits @@ -94,27 +117,44 @@ pub const OFFCHAIN_SCHEMA_LIMIT: u32 = 8192; pub const VARIABLE_ON_CHAIN_SCHEMA_LIMIT: u32 = 8192; pub const CONST_ON_CHAIN_SCHEMA_LIMIT: u32 = 32768; +// TODO: not used. Delete? pub const COLLECTION_FIELD_LIMIT: u32 = CONST_ON_CHAIN_SCHEMA_LIMIT; +/// Maximum length for collection name. pub const MAX_COLLECTION_NAME_LENGTH: u32 = 64; + +/// Maximum length for collection description. pub const MAX_COLLECTION_DESCRIPTION_LENGTH: u32 = 256; + +/// Maximal token prefix length. pub const MAX_TOKEN_PREFIX_LENGTH: u32 = 16; +/// Maximal lenght of property key. pub const MAX_PROPERTY_KEY_LENGTH: u32 = 256; + +/// Maximal lenght of property value. pub const MAX_PROPERTY_VALUE_LENGTH: u32 = 32768; + +/// Maximum properties that can be assigned to token. pub const MAX_PROPERTIES_PER_ITEM: u32 = 64; +/// Maximal lenght of extended property value. pub const MAX_AUX_PROPERTY_VALUE_LENGTH: u32 = 2048; +/// Maximum size for all collection properties. pub const MAX_COLLECTION_PROPERTIES_SIZE: u32 = 40960; + +/// Maximum size for all token properties. pub const MAX_TOKEN_PROPERTIES_SIZE: u32 = 32768; /// How much items can be created per single -/// create_many call +/// create_many call. pub const MAX_ITEMS_PER_BATCH: u32 = 200; +/// Used for limit bounded types of token custom data. pub type CustomDataLimit = ConstU32; +/// Collection id. #[derive( Encode, Decode, @@ -134,6 +174,7 @@ pub struct CollectionId(pub u32); impl EncodeLike for CollectionId {} impl EncodeLike for u32 {} +/// Token id. #[derive( Encode, Decode, @@ -154,6 +195,9 @@ impl EncodeLike for TokenId {} impl EncodeLike for u32 {} impl TokenId { + /// Try to get next token id. + /// + /// If next id cause overflow, then [`ArithmeticError::Overflow`] returned. pub fn try_next(self) -> Result { self.0 .checked_add(1) @@ -176,13 +220,23 @@ impl TryFrom for TokenId { } } +/// Token data. +#[struct_versioning::versioned(version = 2, upper)] #[derive(Encode, Decode, Clone, PartialEq, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct TokenData { + /// Properties of token. pub properties: Vec, + + /// Token owner. pub owner: Option, + + /// Token pieces. + #[version(2.., upper(0))] + pub pieces: u128, } +// TODO: unused type pub struct OverflowError; impl From for &'static str { fn from(_: OverflowError) -> Self { @@ -190,18 +244,27 @@ impl From for &'static str { } } +/// Alias for decimal points type. pub type DecimalPoints = u8; +/// Collection mode. +/// +/// Collection can represent various types of tokens. +/// Each collection can contain only one type of tokens at a time. +/// This type helps to understand which tokens the collection contains. #[derive(Encode, Decode, Eq, Debug, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum CollectionMode { + /// Non fungible tokens. NFT, - // decimal points + /// Fungible tokens. Fungible(DecimalPoints), + /// Refungible tokens. ReFungible, } impl CollectionMode { + /// Get collection mod as number. pub fn id(&self) -> u8 { match self { CollectionMode::NFT => 1, @@ -211,14 +274,18 @@ impl CollectionMode { } } +// TODO: unused trait pub trait SponsoringResolve { fn resolve(who: &AccountId, call: &Call) -> Option; } +/// Access mode for some token operations. #[derive(Encode, Decode, Eq, Debug, Clone, Copy, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum AccessMode { + /// Access grant for owner and admins. Used as default. Normal, + /// Like a [`Normal`](AccessMode::Normal) but also users in allow list. AllowList, } impl Default for AccessMode { @@ -227,6 +294,7 @@ impl Default for AccessMode { } } +// TODO: remove in future. #[derive(Encode, Decode, Eq, Debug, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum SchemaVersion { @@ -239,6 +307,7 @@ impl Default for SchemaVersion { } } +// TODO: unused type #[derive(Encode, Decode, Default, Debug, Clone, PartialEq, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct Ownership { @@ -246,17 +315,21 @@ pub struct Ownership { pub fraction: u128, } +/// The state of collection sponsorship. #[derive(Encode, Decode, Debug, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum SponsorshipState { - /// The fees are applied to the transaction sender + /// The fees are applied to the transaction sender. Disabled, + /// The sponsor is under consideration. Until the sponsor gives his consent, + /// the fee will still be charged to sender. Unconfirmed(AccountId), - /// Transactions are sponsored by specified account + /// Transactions are sponsored by specified account. Confirmed(AccountId), } impl SponsorshipState { + /// Get a sponsor of the collection who has confirmed his status. pub fn sponsor(&self) -> Option<&AccountId> { match self { Self::Confirmed(sponsor) => Some(sponsor), @@ -264,6 +337,7 @@ impl SponsorshipState { } } + /// Get a sponsor of the collection who has pending or confirmed status. pub fn pending_sponsor(&self) -> Option<&AccountId> { match self { Self::Unconfirmed(sponsor) | Self::Confirmed(sponsor) => Some(sponsor), @@ -271,6 +345,7 @@ impl SponsorshipState { } } + /// Whether the sponsorship is confirmed. pub fn confirmed(&self) -> bool { matches!(self, Self::Confirmed(_)) } @@ -282,17 +357,55 @@ impl Default for SponsorshipState { } } -/// Used in storage +pub type CollectionName = BoundedVec>; +pub type CollectionDescription = BoundedVec>; +pub type CollectionTokenPrefix = BoundedVec>; + +#[derive(Bitfields, Clone, Copy, PartialEq, Eq, Debug, Default)] +#[bondrewd(enforce_bytes = 1)] +pub struct CollectionFlags { + /// Tokens in foreign collections can be transferred, but not burnt + #[bondrewd(bits = "0..1")] + pub foreign: bool, + /// Supports ERC721Metadata + #[bondrewd(bits = "1..2")] + pub erc721metadata: bool, + /// External collections can't be managed using `unique` api + #[bondrewd(bits = "7..8")] + pub external: bool, + + #[bondrewd(reserve, bits = "2..7")] + pub reserved: u8, +} +bondrewd_codec!(CollectionFlags); + +/// Base structure for represent collection. +/// +/// Used to provide basic functionality for all types of collections. +/// +/// #### Note +/// Collection parameters, used in storage (see [`RpcCollection`] for the RPC version). #[struct_versioning::versioned(version = 2, upper)] #[derive(Encode, Decode, Clone, PartialEq, TypeInfo, MaxEncodedLen)] pub struct Collection { + /// Collection owner account. pub owner: AccountId, + + /// Collection mode. pub mode: CollectionMode, + + /// Access mode. #[version(..2)] pub access: AccessMode, - pub name: BoundedVec>, - pub description: BoundedVec>, - pub token_prefix: BoundedVec>, + + /// Collection name. + pub name: CollectionName, + + /// Collection description. + pub description: CollectionDescription, + + /// Token prefix. + pub token_prefix: CollectionTokenPrefix, #[version(..2)] pub mint_mode: bool, @@ -302,16 +415,19 @@ pub struct Collection { #[version(..2)] pub schema_version: SchemaVersion, + + /// The state of sponsorship of the collection. pub sponsorship: SponsorshipState, + /// Collection limits. pub limits: CollectionLimits, + /// Collection permissions. #[version(2.., upper(Default::default()))] pub permissions: CollectionPermissions, - /// Marks that this collection is not "unique", and managed from external. - #[version(2.., upper(false))] - pub external_collection: bool, + #[version(2.., upper(Default::default()))] + pub flags: CollectionFlags, #[version(..2)] pub variable_on_chain_schema: BoundedVec>, @@ -323,105 +439,233 @@ pub struct Collection { pub meta_update_permission: MetaUpdatePermission, } -/// Used in RPC calls +#[derive(Encode, Decode, Clone, PartialEq, TypeInfo)] +#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] +pub struct RpcCollectionFlags { + /// Is collection is foreign. + pub foreign: bool, + /// Collection supports ERC721Metadata. + pub erc721metadata: bool, +} + +/// Collection parameters, used in RPC calls (see [`Collection`] for the storage version). +#[struct_versioning::versioned(version = 2, upper)] #[derive(Encode, Decode, Clone, PartialEq, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct RpcCollection { + /// Collection owner account. pub owner: AccountId, + + /// Collection mode. pub mode: CollectionMode, + + /// Collection name. pub name: Vec, + + /// Collection description. pub description: Vec, + + /// Token prefix. pub token_prefix: Vec, + + /// The state of sponsorship of the collection. pub sponsorship: SponsorshipState, + + /// Collection limits. pub limits: CollectionLimits, + + /// Collection permissions. pub permissions: CollectionPermissions, + + /// Token property permissions. pub token_property_permissions: Vec, + + /// Collection properties. pub properties: Vec, + + /// Is collection read only. pub read_only: bool, + + /// Extra collection flags + #[version(2.., upper(RpcCollectionFlags {foreign: false, erc721metadata: false}))] + pub flags: RpcCollectionFlags, } +/// Data used for create collection. +/// +/// All fields are wrapped in [`Option`], where `None` means chain default. #[derive(Encode, Decode, Clone, PartialEq, TypeInfo, Derivative, MaxEncodedLen)] #[derivative(Debug, Default(bound = ""))] pub struct CreateCollectionData { + /// Collection mode. #[derivative(Default(value = "CollectionMode::NFT"))] pub mode: CollectionMode, + + /// Access mode. pub access: Option, - pub name: BoundedVec>, - pub description: BoundedVec>, - pub token_prefix: BoundedVec>, + + /// Collection name. + pub name: CollectionName, + + /// Collection description. + pub description: CollectionDescription, + + /// Token prefix. + pub token_prefix: CollectionTokenPrefix, + + /// Pending collection sponsor. pub pending_sponsor: Option, + + /// Collection limits. pub limits: Option, + + /// Collection permissions. pub permissions: Option, + + /// Token property permissions. pub token_property_permissions: CollectionPropertiesPermissionsVec, + + /// Collection properties. pub properties: CollectionPropertiesVec, } +/// Bounded vector of properties permissions. Max length is [`MAX_PROPERTIES_PER_ITEM`]. +// TODO: maybe rename to PropertiesPermissionsVec pub type CollectionPropertiesPermissionsVec = BoundedVec>; +/// Bounded vector of properties. Max length is [`MAX_PROPERTIES_PER_ITEM`]. pub type CollectionPropertiesVec = BoundedVec>; -/// All fields are wrapped in `Option`s, where None means chain default -// When adding/removing fields from this struct - don't forget to also update clamp_limits +/// Limits and restrictions of a collection. +/// +/// All fields are wrapped in [`Option`], where `None` means chain default. +/// +/// Update with `pallet_common::Pallet::clamp_limits`. +// IMPORTANT: When adding/removing fields from this struct - don't forget to also #[derive(Encode, Decode, Debug, Default, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] +// When adding/removing fields from this struct - don't forget to also update with `pallet_common::Pallet::clamp_limits`. +// TODO: move `pallet_common::Pallet::clamp_limits` into `impl CollectionLimits`. +// TODO: may be remove [`Option`] and **pub** from fields and create struct with default values. pub struct CollectionLimits { + /// How many tokens can a user have on one account. + /// * Default - [`ACCOUNT_TOKEN_OWNERSHIP_LIMIT`]. + /// * Limit - [`MAX_TOKEN_OWNERSHIP`]. pub account_token_ownership_limit: Option, + + /// How many bytes of data are available for sponsorship. + /// * Default - [`CUSTOM_DATA_LIMIT`]. + /// * Limit - [`CUSTOM_DATA_LIMIT`]. pub sponsored_data_size: Option, - /// FIXME should we delete this or repurpose it? - /// None - setVariableMetadata is not sponsored - /// Some(v) - setVariableMetadata is sponsored - /// if there is v block between txs + // FIXME should we delete this or repurpose it? + /// Times in how many blocks we sponsor data. + /// + /// If is `Some(v)` then **setVariableMetadata** is sponsored if there is `v` block between transactions. + /// + /// * Default - [`SponsoringDisabled`](SponsoringRateLimit::SponsoringDisabled). + /// * Limit - [`MAX_SPONSOR_TIMEOUT`]. + /// + /// In any case, chain default: [`SponsoringRateLimit::SponsoringDisabled`] pub sponsored_data_rate_limit: Option, + /// Maximum amount of tokens inside the collection. Chain default: [`COLLECTION_TOKEN_LIMIT`] + + /// How many tokens can be mined into this collection. + /// + /// * Default - [`COLLECTION_TOKEN_LIMIT`]. + /// * Limit - [`COLLECTION_TOKEN_LIMIT`]. pub token_limit: Option, - // Timeouts for item types in passed blocks + /// Timeouts for transfer sponsoring. + /// + /// * Default + /// - **Fungible** - [`FUNGIBLE_SPONSOR_TRANSFER_TIMEOUT`] + /// - **NFT** - [`NFT_SPONSOR_TRANSFER_TIMEOUT`] + /// - **Refungible** - [`REFUNGIBLE_SPONSOR_TRANSFER_TIMEOUT`] + /// * Limit - [`MAX_SPONSOR_TIMEOUT`]. pub sponsor_transfer_timeout: Option, + + /// Timeout for sponsoring an approval in passed blocks. + /// + /// * Default - [`SPONSOR_APPROVE_TIMEOUT`]. + /// * Limit - [`MAX_SPONSOR_TIMEOUT`]. pub sponsor_approve_timeout: Option, + + /// Whether the collection owner of the collection can send tokens (which belong to other users). + /// + /// * Default - **false**. pub owner_can_transfer: Option, + + /// Can the collection owner burn other people's tokens. + /// + /// * Default - **true**. pub owner_can_destroy: Option, + + /// Is it possible to send tokens from this collection between users. + /// + /// * Default - **true**. pub transfers_enabled: Option, } impl CollectionLimits { + /// Get effective value for [`account_token_ownership_limit`](self.account_token_ownership_limit). pub fn account_token_ownership_limit(&self) -> u32 { self.account_token_ownership_limit .unwrap_or(ACCOUNT_TOKEN_OWNERSHIP_LIMIT) .min(MAX_TOKEN_OWNERSHIP) } + + /// Get effective value for [`sponsored_data_size`](self.sponsored_data_size). pub fn sponsored_data_size(&self) -> u32 { self.sponsored_data_size .unwrap_or(CUSTOM_DATA_LIMIT) .min(CUSTOM_DATA_LIMIT) } + + /// Get effective value for [`token_limit`](self.token_limit). pub fn token_limit(&self) -> u32 { self.token_limit .unwrap_or(COLLECTION_TOKEN_LIMIT) .min(COLLECTION_TOKEN_LIMIT) } + + // TODO: may be replace u32 to mode? + /// Get effective value for [`sponsor_transfer_timeout`](self.sponsor_transfer_timeout). pub fn sponsor_transfer_timeout(&self, default: u32) -> u32 { self.sponsor_transfer_timeout .unwrap_or(default) .min(MAX_SPONSOR_TIMEOUT) } + + /// Get effective value for [`sponsor_approve_timeout`](self.sponsor_approve_timeout). pub fn sponsor_approve_timeout(&self) -> u32 { self.sponsor_approve_timeout .unwrap_or(SPONSOR_APPROVE_TIMEOUT) .min(MAX_SPONSOR_TIMEOUT) } + + /// Get effective value for [`owner_can_transfer`](self.owner_can_transfer). pub fn owner_can_transfer(&self) -> bool { self.owner_can_transfer.unwrap_or(false) } + + /// Get effective value for [`owner_can_transfer_instaled`](self.owner_can_transfer_instaled). pub fn owner_can_transfer_instaled(&self) -> bool { self.owner_can_transfer.is_some() } + + /// Get effective value for [`owner_can_destroy`](self.owner_can_destroy). pub fn owner_can_destroy(&self) -> bool { self.owner_can_destroy.unwrap_or(true) } + + /// Get effective value for [`transfers_enabled`](self.transfers_enabled). pub fn transfers_enabled(&self) -> bool { self.transfers_enabled.unwrap_or(true) } + + /// Get effective value for [`sponsored_data_rate_limit`](self.sponsored_data_rate_limit). pub fn sponsored_data_rate_limit(&self) -> Option { match self .sponsored_data_rate_limit @@ -433,22 +677,47 @@ impl CollectionLimits { } } -// When adding/removing fields from this struct - don't forget to also update clamp_limits +/// Permissions on certain operations within a collection. +/// +/// Some fields are wrapped in [`Option`], where `None` means chain default. +/// +/// Update with `pallet_common::Pallet::clamp_permissions`. #[derive(Encode, Decode, Debug, Default, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] +// When adding/removing fields from this struct - don't forget to also update `pallet_common::Pallet::clamp_permissions`. +// TODO: move `pallet_common::Pallet::clamp_permissions` into `impl CollectionPermissions`. pub struct CollectionPermissions { + /// Access mode. + /// + /// * Default - [`AccessMode::Normal`]. pub access: Option, + + /// Minting allowance. + /// + /// * Default - **false**. pub mint_mode: Option, + + /// Permissions for nesting. + /// + /// * Default + /// - `token_owner` - **false** + /// - `collection_admin` - **false** + /// - `restricted` - **None** pub nesting: Option, } impl CollectionPermissions { + /// Get effective value for [`access`](self.access). pub fn access(&self) -> AccessMode { self.access.unwrap_or(AccessMode::Normal) } + + /// Get effective value for [`mint_mode`](self.mint_mode). pub fn mint_mode(&self) -> bool { self.mint_mode.unwrap_or(false) } + + /// Get effective value for [`nesting`](self.nesting). pub fn nesting(&self) -> &NestingPermissions { static DEFAULT: NestingPermissions = NestingPermissions { token_owner: false, @@ -461,8 +730,10 @@ impl CollectionPermissions { } } +/// Inner set for collections allowed to nest. type OwnerRestrictedSetInner = BoundedBTreeSet>; +/// Wraper for collections set allowing nest. #[derive(Encode, Decode, Clone, PartialEq, TypeInfo, MaxEncodedLen, Derivative)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] #[derivative(Debug)] @@ -471,7 +742,9 @@ pub struct OwnerRestrictedSet( #[derivative(Debug(format_with = "bounded::set_debug"))] pub OwnerRestrictedSetInner, ); + impl OwnerRestrictedSet { + /// Create new set. pub fn new() -> Self { Self(Default::default()) } @@ -488,54 +761,70 @@ impl core::ops::DerefMut for OwnerRestrictedSet { } } +/// Part of collection permissions, if set, defines who is able to nest tokens into other tokens. #[derive(Encode, Decode, Clone, PartialEq, TypeInfo, MaxEncodedLen, Derivative)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] #[derivative(Debug)] pub struct NestingPermissions { - /// Owner of token can nest tokens under it + /// Owner of token can nest tokens under it. pub token_owner: bool, - /// Admin of token collection can nest tokens under token + /// Admin of token collection can nest tokens under token. pub collection_admin: bool, - /// If set - only tokens from specified collections can be nested + /// If set - only tokens from specified collections can be nested. pub restricted: Option, #[cfg(feature = "runtime-benchmarks")] - /// Anyone can nest tokens, mutually exclusive with `token_owner`, `admin` + /// Anyone can nest tokens, mutually exclusive with `token_owner`, `admin`. pub permissive: bool, } +/// Enum denominating how often can sponsoring occur if it is enabled. +/// +/// Used for [`collection limits`](CollectionLimits). #[derive(Encode, Decode, Debug, Clone, Copy, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum SponsoringRateLimit { + /// Sponsoring is disabled, and the collection sponsor will not pay for transactions SponsoringDisabled, + /// Once per how many blocks can sponsorship of a transaction type occur Blocks(u32), } +/// Data used to describe an NFT at creation. #[derive(Encode, Decode, MaxEncodedLen, Default, PartialEq, Clone, Derivative, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] #[derivative(Debug)] pub struct CreateNftData { + /// Key-value pairs used to describe the token as metadata #[cfg_attr(feature = "serde1", serde(with = "bounded::vec_serde"))] #[derivative(Debug(format_with = "bounded::vec_debug"))] + /// Properties that wil be assignet to created item. pub properties: CollectionPropertiesVec, } +/// Data used to describe a Fungible token at creation. #[derive(Encode, Decode, MaxEncodedLen, Default, Debug, Clone, PartialEq, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct CreateFungibleData { + /// Number of fungible coins minted pub value: u128, } +/// Data used to describe a Refungible token at creation. #[derive(Encode, Decode, MaxEncodedLen, Default, PartialEq, Clone, Derivative, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] #[derivative(Debug)] pub struct CreateReFungibleData { + /// Number of pieces the RFT is split into + pub pieces: u128, + + /// Key-value pairs used to describe the token as metadata #[cfg_attr(feature = "serde1", serde(with = "bounded::vec_serde"))] #[derivative(Debug(format_with = "bounded::vec_debug"))] - pub const_data: BoundedVec, - pub pieces: u128, + pub properties: CollectionPropertiesVec, } +// TODO: remove this. #[derive(Encode, Decode, Debug, Clone, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum MetaUpdatePermission { @@ -544,58 +833,77 @@ pub enum MetaUpdatePermission { None, } +/// Enum holding data used for creation of all three item types. +/// Unified data for create item. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, Debug, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub enum CreateItemData { + /// Data for create NFT. NFT(CreateNftData), + /// Data for create Fungible item. Fungible(CreateFungibleData), + /// Data for create ReFungible item. ReFungible(CreateReFungibleData), } +/// Extended data for create NFT. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, TypeInfo, Derivative)] #[derivative(Debug)] pub struct CreateNftExData { + /// Properties that wil be assignet to created item. #[derivative(Debug(format_with = "bounded::vec_debug"))] pub properties: CollectionPropertiesVec, + + /// Owner of creating item. pub owner: CrossAccountId, } +/// Extended data for create ReFungible item. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, TypeInfo, Derivative)] #[derivative(Debug(bound = "CrossAccountId: fmt::Debug + Ord"))] -pub struct CreateRefungibleExData { - #[derivative(Debug(format_with = "bounded::vec_debug"))] - pub const_data: BoundedVec, +pub struct CreateRefungibleExMultipleOwners { #[derivative(Debug(format_with = "bounded::map_debug"))] pub users: BoundedBTreeMap>, + #[derivative(Debug(format_with = "bounded::vec_debug"))] + pub properties: CollectionPropertiesVec, +} + +/// Extended data for create ReFungible item. +#[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, TypeInfo, Derivative)] +#[derivative(Debug(bound = "CrossAccountId: fmt::Debug"))] +pub struct CreateRefungibleExSingleOwner { + pub user: CrossAccountId, + pub pieces: u128, + #[derivative(Debug(format_with = "bounded::vec_debug"))] + pub properties: CollectionPropertiesVec, } +/// Unified extended data for creating item. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, TypeInfo, Derivative)] #[derivative(Debug(bound = "CrossAccountId: fmt::Debug + Ord"))] pub enum CreateItemExData { + /// Extended data for create NFT. NFT( #[derivative(Debug(format_with = "bounded::vec_debug"))] BoundedVec, ConstU32>, ), + + /// Extended data for create Fungible item. Fungible( #[derivative(Debug(format_with = "bounded::map_debug"))] BoundedBTreeMap>, ), - /// Many tokens, each may have only one owner + + /// Extended data for create ReFungible item in case of + /// many tokens, each may have only one owner RefungibleMultipleItems( #[derivative(Debug(format_with = "bounded::vec_debug"))] - BoundedVec, ConstU32>, + BoundedVec, ConstU32>, ), - /// Single token, which may have many owners - RefungibleMultipleOwners(CreateRefungibleExData), -} -impl CreateItemData { - pub fn data_size(&self) -> usize { - match self { - CreateItemData::ReFungible(data) => data.const_data.len(), - _ => 0, - } - } + /// Extended data for create ReFungible item in case of + /// single token, which may have many owners + RefungibleMultipleOwners(CreateRefungibleExMultipleOwners), } impl From for CreateItemData { @@ -616,22 +924,33 @@ impl From for CreateItemData { } } +/// Token's address, dictated by its collection and token IDs. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, Debug, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] // todo possibly rename to be used generally as an address pair pub struct TokenChild { + /// Token id. pub token: TokenId, + + /// Collection id. pub collection: CollectionId, } +/// Collection statistics. #[derive(Encode, Decode, MaxEncodedLen, PartialEq, Clone, Debug, TypeInfo)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct CollectionStats { + /// Number of created items. pub created: u32, + + /// Number of burned items. pub destroyed: u32, + + /// Number of current items. pub alive: u32, } +/// This type works like [`PhantomData`] but supports generating _scale-info_ descriptions to generate node metadata. #[derive(Encode, Decode, Clone, Debug)] #[cfg_attr(feature = "std", derive(PartialEq))] pub struct PhantomType(core::marker::PhantomData); @@ -657,22 +976,36 @@ impl MaxEncodedLen for PhantomType { } } +/// Bounded vector of bytes. pub type BoundedBytes = BoundedVec; +/// Extra properties for external collections. pub type AuxPropertyValue = BoundedBytes>; +/// Property key. pub type PropertyKey = BoundedBytes>; + +/// Property value. pub type PropertyValue = BoundedBytes>; +/// Property permission. #[derive(Encode, Decode, TypeInfo, Debug, MaxEncodedLen, PartialEq, Clone)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct PropertyPermission { + /// Permission to change the property and property permission. + /// + /// If it **false** then you can not change corresponding property even if [`collection_admin`] and [`token_owner`] are **true**. pub mutable: bool, + + /// Change permission for the collection administrator. pub collection_admin: bool, + + /// Permission to change the property for the owner of the token. pub token_owner: bool, } impl PropertyPermission { + /// Creates mutable property permission but changes restricted for collection admin and token owner. pub fn none() -> Self { Self { mutable: true, @@ -682,12 +1015,15 @@ impl PropertyPermission { } } +/// Property is simpl key-value record. #[derive(Encode, Decode, Debug, TypeInfo, Clone, PartialEq, MaxEncodedLen)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct Property { + /// Property key. #[cfg_attr(feature = "serde1", serde(with = "bounded::vec_serde"))] pub key: PropertyKey, + /// Property value. #[cfg_attr(feature = "serde1", serde(with = "bounded::vec_serde"))] pub value: PropertyValue, } @@ -698,12 +1034,15 @@ impl Into<(PropertyKey, PropertyValue)> for Property { } } +/// Record for proprty key permission. #[derive(Encode, Decode, TypeInfo, Debug, MaxEncodedLen, PartialEq, Clone)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct PropertyKeyPermission { + /// Key. #[cfg_attr(feature = "serde1", serde(with = "bounded::vec_serde"))] pub key: PropertyKey, + /// Permission. pub permission: PropertyPermission, } @@ -713,15 +1052,35 @@ impl Into<(PropertyKey, PropertyPermission)> for PropertyKeyPermission { } } +/// Errors for properties actions. #[derive(Debug)] pub enum PropertiesError { + /// The space allocated for properties has run out. + /// + /// * Limit for colection - [`MAX_COLLECTION_PROPERTIES_SIZE`]. + /// * Limit for token - [`MAX_TOKEN_PROPERTIES_SIZE`]. NoSpaceForProperty, + + /// The property limit has been reached. + /// + /// * Limit - [`MAX_PROPERTIES_PER_ITEM`]. PropertyLimitReached, + + /// Property key contains not allowed character. InvalidCharacterInPropertyKey, + + /// Property key length is too long. + /// + /// * Limit - [`MAX_PROPERTY_KEY_LENGTH`]. PropertyKeyIsTooLong, + + /// Property key is empty. EmptyPropertyKey, } +/// Marker for scope of property. +/// +/// Scoped property can't be changed by user. Used for external collections. #[derive(Encode, Decode, MaxEncodedLen, TypeInfo, PartialEq, Clone, Copy)] pub enum PropertyScope { None, @@ -729,6 +1088,7 @@ pub enum PropertyScope { } impl PropertyScope { + /// Apply scope to property key. pub fn apply(self, key: PropertyKey) -> Result { let scope_str: &[u8] = match self { Self::None => return Ok(key), @@ -742,9 +1102,11 @@ impl PropertyScope { } } +/// Trait for operate with properties. pub trait TrySetProperty: Sized { type Value; + /// Try to set property with scope. fn try_scoped_set( &mut self, scope: PropertyScope, @@ -752,6 +1114,7 @@ pub trait TrySetProperty: Sized { value: Self::Value, ) -> Result<(), PropertiesError>; + /// Try to set property with scope from iterator. fn try_scoped_set_from_iter( &mut self, scope: PropertyScope, @@ -769,10 +1132,12 @@ pub trait TrySetProperty: Sized { Ok(()) } + /// Try to set property. fn try_set(&mut self, key: PropertyKey, value: Self::Value) -> Result<(), PropertiesError> { self.try_scoped_set(PropertyScope::None, key, value) } + /// Try to set property from iterator. fn try_set_from_iter(&mut self, iter: I) -> Result<(), PropertiesError> where I: Iterator, @@ -782,6 +1147,7 @@ pub trait TrySetProperty: Sized { } } +/// Wrapped map for storing properties. #[derive(Encode, Decode, TypeInfo, Derivative, Clone, PartialEq, MaxEncodedLen)] #[derivative(Default(bound = ""))] pub struct PropertiesMap( @@ -789,24 +1155,29 @@ pub struct PropertiesMap( ); impl PropertiesMap { + /// Create new property map. pub fn new() -> Self { Self(BoundedBTreeMap::new()) } + /// Remove property from map. pub fn remove(&mut self, key: &PropertyKey) -> Result, PropertiesError> { Self::check_property_key(key)?; Ok(self.0.remove(key)) } + /// Get property with appropriate key from map. pub fn get(&self, key: &PropertyKey) -> Option<&Value> { self.0.get(key) } + /// Check if map contains key. pub fn contains_key(&self, key: &PropertyKey) -> bool { self.0.contains_key(key) } + /// Check if map contains key with key validation. fn check_property_key(key: &PropertyKey) -> Result<(), PropertiesError> { if key.is_empty() { return Err(PropertiesError::EmptyPropertyKey); @@ -859,8 +1230,10 @@ impl TrySetProperty for PropertiesMap { } } +/// Alias for property permissions map. pub type PropertiesPermissionMap = PropertiesMap; +/// Wrapper for properties map with consumed space control. #[derive(Encode, Decode, TypeInfo, Clone, PartialEq, MaxEncodedLen)] pub struct Properties { map: PropertiesMap, @@ -869,6 +1242,7 @@ pub struct Properties { } impl Properties { + /// Create new properies container. pub fn new(space_limit: u32) -> Self { Self { map: PropertiesMap::new(), @@ -877,6 +1251,7 @@ impl Properties { } } + /// Remove propery with appropiate key. pub fn remove(&mut self, key: &PropertyKey) -> Result, PropertiesError> { let value = self.map.remove(key)?; @@ -888,6 +1263,7 @@ impl Properties { Ok(value) } + /// Get property with appropriate key. pub fn get(&self, key: &PropertyKey) -> Option<&PropertyValue> { self.map.get(key) } @@ -927,6 +1303,7 @@ impl TrySetProperty for Properties { } } +/// Utility struct for using in `StorageMap`. pub struct CollectionProperties; impl Get for CollectionProperties { @@ -935,6 +1312,7 @@ impl Get for CollectionProperties { } } +/// Utility struct for using in `StorageMap`. pub struct TokenProperties; impl Get for TokenProperties { diff --git a/primitives/data-structs/src/mapping.rs b/primitives/data-structs/src/mapping.rs index 857eec4c70..23d95a3dd2 100644 --- a/primitives/data-structs/src/mapping.rs +++ b/primitives/data-structs/src/mapping.rs @@ -1,3 +1,21 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +//! This module contains mapping between different addresses. + use core::marker::PhantomData; use sp_core::H160; @@ -5,12 +23,19 @@ use sp_core::H160; use crate::{CollectionId, TokenId}; use pallet_evm::account::CrossAccountId; +/// Trait for mapping between token id and some `Address`. pub trait TokenAddressMapping
{ + /// Map token id to `Address`. fn token_to_address(collection: CollectionId, token: TokenId) -> Address; + + /// Map `Address` to token id. fn address_to_token(address: &Address) -> Option<(CollectionId, TokenId)>; + + /// Check is address for token. fn is_token_address(address: &Address) -> bool; } +/// Unit struct for mapping token id to/from *Evm address* represented by [`H160`]. pub struct EvmTokenAddressMapping; /// 0xf8238ccfff8ed887463fd5e00000000100000002 - collection 1, token 2 @@ -46,6 +71,7 @@ impl TokenAddressMapping for EvmTokenAddressMapping { } } +/// Unit struct for mapping token id to/from [`CrossAccountId`]. pub struct CrossTokenAddressMapping(PhantomData); impl> TokenAddressMapping for CrossTokenAddressMapping { diff --git a/primitives/data-structs/src/migration.rs b/primitives/data-structs/src/migration.rs index ab9fdc689f..cbcdbf43cc 100644 --- a/primitives/data-structs/src/migration.rs +++ b/primitives/data-structs/src/migration.rs @@ -38,3 +38,26 @@ fn sponsoring_rate_limit_has_same_encoding_as_option_u32() { test_to_option(SponsoringRateLimit::SponsoringDisabled); test_to_option(SponsoringRateLimit::Blocks(10)); } + +#[test] +fn collection_flags_have_same_encoding_as_bool() { + use crate::CollectionFlags; + use codec::Encode; + + assert_eq!( + true.encode(), + CollectionFlags { + external: true, + ..Default::default() + } + .encode() + ); + assert_eq!( + false.encode(), + CollectionFlags { + external: false, + ..Default::default() + } + .encode() + ); +} diff --git a/primitives/rmrk-rpc/CHANGELOG.md b/primitives/rmrk-rpc/CHANGELOG.md new file mode 100644 index 0000000000..b6bf56646d --- /dev/null +++ b/primitives/rmrk-rpc/CHANGELOG.md @@ -0,0 +1,10 @@ + +## [v0.0.2] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/primitives/rmrk-rpc/Cargo.toml b/primitives/rmrk-rpc/Cargo.toml index 0aeac55339..874fccdc61 100644 --- a/primitives/rmrk-rpc/Cargo.toml +++ b/primitives/rmrk-rpc/Cargo.toml @@ -1,15 +1,17 @@ [package] name = "rmrk-rpc" -version = "0.0.1" +version = "0.0.2" license = "" edition = "2021" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -sp-core = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-std = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-api = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-runtime = { default-features = false, git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ + "derive", +] } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } serde = { version = "1.0.130", default-features = false, features = ["derive"] } rmrk-traits = { default-features = false, path = "../rmrk-traits" } diff --git a/primitives/rmrk-traits/Cargo.toml b/primitives/rmrk-traits/Cargo.toml index bce6d5fee5..6ce88878d9 100644 --- a/primitives/rmrk-traits/Cargo.toml +++ b/primitives/rmrk-traits/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" license = 'GPLv3' homepage = "https://unique.network" repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.1.0' +version = "0.1.0" [dependencies] scale-info = { version = "2.0.1", default-features = false, features = ["derive"] } diff --git a/primitives/rmrk-traits/src/resource.rs b/primitives/rmrk-traits/src/resource.rs index 80dae24e0d..bd75ef03c9 100644 --- a/primitives/rmrk-traits/src/resource.rs +++ b/primitives/rmrk-traits/src/resource.rs @@ -151,13 +151,13 @@ pub enum ResourceTypes { "#) )] pub struct ResourceInfo { - /// id is a 5-character string of reasonable uniqueness. - /// The combination of base ID and resource id should be unique across the entire RMRK - /// ecosystem which + /// ID is a unique identifier for a resource across all those of a single NFT. + /// The combination of a collection ID, an NFT ID, and the resource ID must be + /// unique across the entire RMRK ecosystem. //#[cfg_attr(feature = "std", serde(with = "serialize::vec"))] pub id: ResourceId, - /// Resource + /// Resource type and the accordingly structured data stored pub resource: ResourceTypes, /// If resource is sent to non-rootowned NFT, pending will be false and need to be accepted diff --git a/primitives/rpc/CHANGELOG.md b/primitives/rpc/CHANGELOG.md new file mode 100644 index 0000000000..59bdd2c127 --- /dev/null +++ b/primitives/rpc/CHANGELOG.md @@ -0,0 +1,21 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + +## [v0.1.3] 2022-08-16 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b + +## [0.1.1] - 2022-07-14 + +### Added + + - Implementation of RPC method `token_owners` returning 10 owners in no particular order. + This was an internal request to improve the web interface and support fractionalization event. diff --git a/primitives/rpc/Cargo.toml b/primitives/rpc/Cargo.toml index 52dcce4e3e..43b647aeff 100644 --- a/primitives/rpc/Cargo.toml +++ b/primitives/rpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "up-rpc" -version = "0.1.0" +version = "0.1.3" license = "GPLv3" edition = "2021" @@ -10,11 +10,11 @@ up-data-structs = { default-features = false, path = '../data-structs' } codec = { package = "parity-scale-codec", version = "3.1.2", default-features = false, features = [ "derive", ] } -sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.24" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +sp-core = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-api = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } [features] default = ["std"] diff --git a/primitives/rpc/src/lib.rs b/primitives/rpc/src/lib.rs index 9375a545ff..21bf5fefcc 100644 --- a/primitives/rpc/src/lib.rs +++ b/primitives/rpc/src/lib.rs @@ -16,10 +16,13 @@ #![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; + use up_data_structs::{ CollectionId, TokenId, RpcCollection, CollectionStats, CollectionLimits, Property, - PropertyKeyPermission, TokenData, TokenChild, + PropertyKeyPermission, TokenData, TokenChild, RpcCollectionVersion1, TokenDataVersion1, }; + use sp_std::vec::Vec; use codec::Decode; use sp_runtime::DispatchError; @@ -27,44 +30,70 @@ use sp_runtime::DispatchError; type Result = core::result::Result; sp_api::decl_runtime_apis! { - #[api_version(2)] + #[api_version(3)] + /// Trait for generate rpc. pub trait UniqueApi where AccountId: Decode, CrossAccountId: pallet_evm::account::CrossAccountId, { - #[changed_in(2)] - fn token_owner(collection: CollectionId, token: TokenId) -> Result; - + /// Get number of tokens in collection owned by account. fn account_tokens(collection: CollectionId, account: CrossAccountId) -> Result>; + + /// Number of existing tokens in collection. fn collection_tokens(collection: CollectionId) -> Result>; + + /// Check token exist. fn token_exists(collection: CollectionId, token: TokenId) -> Result; + /// Get token owner. fn token_owner(collection: CollectionId, token: TokenId) -> Result>; + + /// Get real owner of nested token. fn topmost_token_owner(collection: CollectionId, token: TokenId) -> Result>; + + /// Get nested tokens for the specified item. fn token_children(collection: CollectionId, token: TokenId) -> Result>; + /// Get collection properties. fn collection_properties(collection: CollectionId, properties: Option>>) -> Result>; + /// Get token properties. fn token_properties( collection: CollectionId, token_id: TokenId, properties: Option>> ) -> Result>; + /// Get permissions for token properties. fn property_permissions( collection: CollectionId, properties: Option>> ) -> Result>; + /// Get token data. fn token_data( collection: CollectionId, token_id: TokenId, keys: Option>> ) -> Result>; + #[changed_in(3)] + fn token_data( + collection: CollectionId, + token_id: TokenId, + keys: Option>> + ) -> Result>; + + /// Total number of tokens in collection. fn total_supply(collection: CollectionId) -> Result; + + /// Get account balance for collection (sum of tokens pieces). fn account_balance(collection: CollectionId, account: CrossAccountId) -> Result; + + /// Get account balance for specified token. fn balance(collection: CollectionId, account: CrossAccountId, token: TokenId) -> Result; + + /// Amount of token pieces allowed to spend from granded account. fn allowance( collection: CollectionId, sender: CrossAccountId, @@ -72,13 +101,36 @@ sp_api::decl_runtime_apis! { token: TokenId, ) -> Result; + /// Get list of collection admins. fn adminlist(collection: CollectionId) -> Result>; + + /// Get list of users that allowet to mint tikens in collection. fn allowlist(collection: CollectionId) -> Result>; + + /// Check that user is in allowed list (see [`allowlist`]). fn allowed(collection: CollectionId, user: CrossAccountId) -> Result; + + /// Last minted token id. fn last_token_id(collection: CollectionId) -> Result; + + /// Get collection by id. fn collection_by_id(collection: CollectionId) -> Result>>; + + #[changed_in(3)] + fn collection_by_id(collection: CollectionId) -> Result>>; + + /// Get collection stats. fn collection_stats() -> Result; + + /// Get the number of blocks through which sponsorship will be available. fn next_sponsored(collection: CollectionId, account: CrossAccountId, token: TokenId) -> Result>; + + /// Get effective colletion limits. fn effective_collection_limits(collection_id: CollectionId) -> Result>; + + /// Get total pieces of token. + fn total_pieces(collection_id: CollectionId, token_id: TokenId) -> Result>; + + fn token_owners(collection: CollectionId, token: TokenId) -> Result>; } } diff --git a/runtime/common/Cargo.toml b/runtime/common/Cargo.toml deleted file mode 100644 index 2992413835..0000000000 --- a/runtime/common/Cargo.toml +++ /dev/null @@ -1,115 +0,0 @@ -[package] -authors = ['Unique Network '] -description = 'Unique Runtime Common' -edition = '2021' -homepage = 'https://unique.network' -license = 'All Rights Reserved' -name = 'unique-runtime-common' -repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.9.24' - -[features] -default = ['std'] -std = [ - 'sp-core/std', - 'sp-std/std', - 'sp-runtime/std', - 'codec/std', - 'frame-support/std', - 'frame-system/std', - 'sp-consensus-aura/std', - 'pallet-common/std', - 'pallet-unique/std', - 'pallet-fungible/std', - 'pallet-nonfungible/std', - 'pallet-refungible/std', - 'up-data-structs/std', - 'pallet-evm/std', - 'fp-rpc/std', -] -runtime-benchmarks = [ - 'sp-runtime/runtime-benchmarks', - 'frame-support/runtime-benchmarks', - 'frame-system/runtime-benchmarks', -] - -[dependencies.sp-core] -default-features = false -git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -[dependencies.sp-std] -default-features = false -git = 'https://github.com/paritytech/substrate' -branch = 'polkadot-v0.9.24' - -[dependencies.sp-runtime] -default-features = false -git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -[dependencies.codec] -default-features = false -features = ['derive'] -package = 'parity-scale-codec' -version = '3.1.2' - -[dependencies.scale-info] -default-features = false -features = ["derive"] -version = "2.0.1" - -[dependencies.frame-support] -default-features = false -git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -[dependencies.frame-system] -default-features = false -git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -[dependencies.pallet-common] -default-features = false -path = "../../pallets/common" - -[dependencies.pallet-unique] -default-features = false -path = "../../pallets/unique" - -[dependencies.pallet-fungible] -default-features = false -path = "../../pallets/fungible" - -[dependencies.pallet-nonfungible] -default-features = false -path = "../../pallets/nonfungible" - -[dependencies.pallet-refungible] -default-features = false -path = "../../pallets/refungible" - -[dependencies.pallet-unique-scheduler] -default-features = false -path = "../../pallets/scheduler" - -[dependencies.up-data-structs] -default-features = false -path = "../../primitives/data-structs" - -[dependencies.sp-consensus-aura] -default-features = false -git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -[dependencies.fp-rpc] -default-features = false -git = "https://github.com/uniquenetwork/frontier" -branch = "unique-polkadot-v0.9.24" - -[dependencies] -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -evm-coder = { default-features = false, path = '../../crates/evm-coder' } -up-sponsorship = { default-features = false, git = "https://github.com/UniqueNetwork/pallet-sponsoring", branch = 'polkadot-v0.9.24' } - -rmrk-rpc = { default-features = false, path = "../../primitives/rmrk-rpc" } diff --git a/runtime/common/config/ethereum.rs b/runtime/common/config/ethereum.rs new file mode 100644 index 0000000000..8ee74a9711 --- /dev/null +++ b/runtime/common/config/ethereum.rs @@ -0,0 +1,124 @@ +use sp_core::{U256, H160}; +use frame_support::{ + weights::{Weight, constants::WEIGHT_PER_SECOND}, + traits::{FindAuthor}, + parameter_types, ConsensusEngineId, +}; +use sp_runtime::{RuntimeAppPublic, Perbill}; +use crate::{ + runtime_common::{ + dispatch::CollectionDispatchT, ethereum::sponsoring::EvmSponsorshipHandler, + config::sponsoring::DefaultSponsoringRateLimit, DealWithFees, + }, + Runtime, Aura, Balances, RuntimeEvent, ChainId, +}; +use pallet_evm::{EnsureAddressTruncated, HashedAddressMapping}; +use up_common::constants::*; + +pub type CrossAccountId = pallet_evm::account::BasicCrossAccountId; + +impl pallet_evm::account::Config for Runtime { + type CrossAccountId = CrossAccountId; + type EvmAddressMapping = pallet_evm::HashedAddressMapping; + type EvmBackwardsAddressMapping = fp_evm_mapping::MapBackwardsAddressTruncated; +} + +// Assuming slowest ethereum opcode is SSTORE, with gas price of 20000 as our worst case +// (contract, which only writes a lot of data), +// approximating on top of our real store write weight +parameter_types! { + pub const WritesPerSecond: u64 = WEIGHT_PER_SECOND.ref_time() / ::DbWeight::get().write; + pub const GasPerSecond: u64 = WritesPerSecond::get() * 20000; + pub const WeightPerGas: u64 = WEIGHT_PER_SECOND.ref_time() / GasPerSecond::get(); +} + +/// Limiting EVM execution to 50% of block for substrate users and management tasks +/// EVM transaction consumes more weight than substrate's, so we can't rely on them being +/// scheduled fairly +const EVM_DISPATCH_RATIO: Perbill = Perbill::from_percent(50); +parameter_types! { + pub BlockGasLimit: U256 = U256::from((NORMAL_DISPATCH_RATIO * EVM_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT / WeightPerGas::get()).ref_time()); +} + +pub enum FixedGasWeightMapping {} +impl pallet_evm::GasWeightMapping for FixedGasWeightMapping { + fn gas_to_weight(gas: u64) -> Weight { + Weight::from_ref_time(gas).saturating_mul(WeightPerGas::get()) + } + fn weight_to_gas(weight: Weight) -> u64 { + (weight / WeightPerGas::get()).ref_time() + } +} + +pub struct EthereumFindAuthor(core::marker::PhantomData); +impl> FindAuthor for EthereumFindAuthor { + fn find_author<'a, I>(digests: I) -> Option + where + I: 'a + IntoIterator, + { + if let Some(author_index) = F::find_author(digests) { + let authority_id = Aura::authorities()[author_index as usize].clone(); + return Some(H160::from_slice(&authority_id.to_raw_vec()[4..24])); + } + None + } +} + +impl pallet_evm::Config for Runtime { + type BlockGasLimit = BlockGasLimit; + type FeeCalculator = pallet_configuration::FeeCalculator; + type GasWeightMapping = FixedGasWeightMapping; + type BlockHashMapping = pallet_ethereum::EthereumBlockHashMapping; + type CallOrigin = EnsureAddressTruncated; + type WithdrawOrigin = EnsureAddressTruncated; + type AddressMapping = HashedAddressMapping; + type PrecompilesType = (); + type PrecompilesValue = (); + type Currency = Balances; + type RuntimeEvent = RuntimeEvent; + type OnMethodCall = ( + pallet_evm_migration::OnMethodCall, + pallet_evm_contract_helpers::HelpersOnMethodCall, + CollectionDispatchT, + pallet_unique::eth::CollectionHelpersOnMethodCall, + ); + type OnCreate = pallet_evm_contract_helpers::HelpersOnCreate; + type ChainId = ChainId; + type Runner = pallet_evm::runner::stack::Runner; + type OnChargeTransaction = pallet_evm::EVMCurrencyAdapter; + type TransactionValidityHack = pallet_evm_transaction_payment::TransactionValidityHack; + type FindAuthor = EthereumFindAuthor; +} + +impl pallet_evm_migration::Config for Runtime { + type WeightInfo = pallet_evm_migration::weights::SubstrateWeight; +} + +impl pallet_ethereum::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type StateRoot = pallet_ethereum::IntermediateStateRoot; +} + +parameter_types! { + // 0x842899ECF380553E8a4de75bF534cdf6fBF64049 + pub const HelpersContractAddress: H160 = H160([ + 0x84, 0x28, 0x99, 0xec, 0xf3, 0x80, 0x55, 0x3e, 0x8a, 0x4d, 0xe7, 0x5b, 0xf5, 0x34, 0xcd, 0xf6, 0xfb, 0xf6, 0x40, 0x49, + ]); + + // 0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f + pub const EvmCollectionHelpersAddress: H160 = H160([ + 0x6c, 0x4e, 0x9f, 0xe1, 0xae, 0x37, 0xa4, 0x1e, 0x93, 0xce, 0xe4, 0x29, 0xe8, 0xe1, 0x88, 0x1a, 0xbd, 0xcb, 0xb5, 0x4f, + ]); +} + +impl pallet_evm_contract_helpers::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type ContractAddress = HelpersContractAddress; + type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; +} + +impl pallet_evm_coder_substrate::Config for Runtime {} + +impl pallet_evm_transaction_payment::Config for Runtime { + type EvmSponsorshipHandler = EvmSponsorshipHandler; +} diff --git a/runtime/common/config/mod.rs b/runtime/common/config/mod.rs new file mode 100644 index 0000000000..ebebc43c32 --- /dev/null +++ b/runtime/common/config/mod.rs @@ -0,0 +1,23 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +pub mod ethereum; +pub mod orml; +pub mod pallets; +pub mod parachain; +pub mod sponsoring; +pub mod substrate; +pub mod xcm; diff --git a/runtime/common/config/orml.rs b/runtime/common/config/orml.rs new file mode 100644 index 0000000000..28b96b0b16 --- /dev/null +++ b/runtime/common/config/orml.rs @@ -0,0 +1,144 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + parameter_types, + traits::{Contains, Everything}, +}; +use frame_system::EnsureSigned; +use orml_traits::{location::AbsoluteReserveProvider, parameter_type_with_key}; +use sp_runtime::traits::Convert; +use xcm::v1::{Junction::*, Junctions::*, MultiLocation, NetworkId}; +use xcm::latest::Weight; +use xcm_builder::LocationInverter; +use xcm_executor::XcmExecutor; +use sp_std::{vec, vec::Vec}; +use pallet_foreign_assets::{CurrencyId, NativeCurrency}; +use crate::{ + Runtime, RuntimeEvent, RelayChainBlockNumberProvider, + runtime_common::config::{ + xcm::{ + SelfLocation, Weigher, XcmConfig, Ancestry, + xcm_assets::{CurrencyIdConvert}, + }, + pallets::TreasuryAccountId, + substrate::{MaxLocks, MaxReserves}, + }, +}; + +use up_common::{ + types::{AccountId, Balance}, + constants::*, +}; + +// Signed version of balance +pub type Amount = i128; + +parameter_types! { + pub const MinVestedTransfer: Balance = 10 * UNIQUE; + pub const MaxVestingSchedules: u32 = 28; + + pub const BaseXcmWeight: Weight = 100_000_000; // TODO: recheck this + pub const MaxAssetsForTransfer: usize = 2; +} + +parameter_type_with_key! { + pub ParachainMinFee: |_location: MultiLocation| -> Option { + Some(100_000_000_000) + }; +} + +parameter_type_with_key! { + pub ExistentialDeposits: |currency_id: CurrencyId| -> Balance { + match currency_id { + CurrencyId::NativeAssetId(symbol) => match symbol { + NativeCurrency::Here => 0, + NativeCurrency::Parent=> 0, + }, + _ => 100_000 + } + }; +} + +pub fn get_all_module_accounts() -> Vec { + vec![TreasuryAccountId::get()] +} + +pub struct DustRemovalWhitelist; +impl Contains for DustRemovalWhitelist { + fn contains(a: &AccountId) -> bool { + get_all_module_accounts().contains(a) + } +} + +pub struct AccountIdToMultiLocation; +impl Convert for AccountIdToMultiLocation { + fn convert(account: AccountId) -> MultiLocation { + X1(AccountId32 { + network: NetworkId::Any, + id: account.into(), + }) + .into() + } +} + +impl orml_vesting::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type Currency = pallet_balances::Pallet; + type MinVestedTransfer = MinVestedTransfer; + type VestedTransferOrigin = EnsureSigned; + type WeightInfo = (); + type MaxVestingSchedules = MaxVestingSchedules; + type BlockNumberProvider = RelayChainBlockNumberProvider; +} + +impl orml_tokens::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type Balance = Balance; + type Amount = Amount; + type CurrencyId = CurrencyId; + type WeightInfo = (); + type ExistentialDeposits = ExistentialDeposits; + type OnDust = orml_tokens::TransferDust; + type OnSlash = (); + type OnTransfer = (); + type OnDeposit = (); + type MaxLocks = MaxLocks; + type MaxReserves = MaxReserves; + // TODO: Add all module accounts + type DustRemovalWhitelist = DustRemovalWhitelist; + /// The id type for named reserves. + type ReserveIdentifier = (); + type OnNewTokenAccount = (); + type OnKilledTokenAccount = (); +} + +impl orml_xtokens::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type Balance = Balance; + type CurrencyId = CurrencyId; + type CurrencyIdConvert = CurrencyIdConvert; + type AccountIdToMultiLocation = AccountIdToMultiLocation; + type SelfLocation = SelfLocation; + type XcmExecutor = XcmExecutor>; + type Weigher = Weigher; + type BaseXcmWeight = BaseXcmWeight; + type LocationInverter = LocationInverter; + type MaxAssetsForTransfer = MaxAssetsForTransfer; + type MinXcmFee = ParachainMinFee; + type MultiLocationsFilter = Everything; + type ReserveProvider = AbsoluteReserveProvider; +} diff --git a/runtime/common/config/pallets/app_promotion.rs b/runtime/common/config/pallets/app_promotion.rs new file mode 100644 index 0000000000..e59bb1743f --- /dev/null +++ b/runtime/common/config/pallets/app_promotion.rs @@ -0,0 +1,63 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use crate::{ + runtime_common::config::pallets::{TreasuryAccountId, RelayChainBlockNumberProvider}, + Runtime, Balances, BlockNumber, Unique, RuntimeEvent, EvmContractHelpers, +}; + +use frame_support::{parameter_types, PalletId}; +use sp_arithmetic::Perbill; +use up_common::{ + constants::{UNIQUE, RELAY_DAYS}, + types::Balance, +}; + +#[cfg(all(not(feature = "unique-runtime"), not(feature = "quartz-runtime")))] +parameter_types! { + pub const AppPromotionId: PalletId = PalletId(*b"appstake"); + pub const RecalculationInterval: BlockNumber = 20; + pub const PendingInterval: BlockNumber = 10; + pub const Nominal: Balance = UNIQUE; + // pub const Day: BlockNumber = DAYS; + pub IntervalIncome: Perbill = Perbill::from_rational(RecalculationInterval::get(), RELAY_DAYS) * Perbill::from_rational(5u32, 10_000); +} + +#[cfg(any(feature = "unique-runtime", feature = "quartz-runtime"))] +parameter_types! { + pub const AppPromotionId: PalletId = PalletId(*b"appstake"); + pub const RecalculationInterval: BlockNumber = RELAY_DAYS; + pub const PendingInterval: BlockNumber = 7 * RELAY_DAYS; + pub const Nominal: Balance = UNIQUE; + // pub const Day: BlockNumber = RELAY_DAYS; + pub IntervalIncome: Perbill = Perbill::from_rational(5u32, 10_000); +} + +impl pallet_app_promotion::Config for Runtime { + type PalletId = AppPromotionId; + type CollectionHandler = Unique; + type ContractHandler = EvmContractHelpers; + type Currency = Balances; + type WeightInfo = pallet_app_promotion::weights::SubstrateWeight; + type TreasuryAccountId = TreasuryAccountId; + type RelayBlockNumberProvider = RelayChainBlockNumberProvider; + type RecalculationInterval = RecalculationInterval; + type PendingInterval = PendingInterval; + // type Day = Day; + type Nominal = Nominal; + type IntervalIncome = IntervalIncome; + type RuntimeEvent = RuntimeEvent; +} diff --git a/runtime/common/config/pallets/foreign_asset.rs b/runtime/common/config/pallets/foreign_asset.rs new file mode 100644 index 0000000000..a93e3679fc --- /dev/null +++ b/runtime/common/config/pallets/foreign_asset.rs @@ -0,0 +1,9 @@ +use crate::{Runtime, RuntimeEvent, Balances}; +use up_common::types::AccountId; + +impl pallet_foreign_assets::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type Currency = Balances; + type RegisterOrigin = frame_system::EnsureRoot; + type WeightInfo = pallet_foreign_assets::weights::SubstrateWeight; +} diff --git a/runtime/common/config/pallets/mod.rs b/runtime/common/config/pallets/mod.rs new file mode 100644 index 0000000000..21efae6b35 --- /dev/null +++ b/runtime/common/config/pallets/mod.rs @@ -0,0 +1,110 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::parameter_types; +use sp_runtime::traits::AccountIdConversion; +use crate::{ + runtime_common::{ + dispatch::CollectionDispatchT, + config::{substrate::TreasuryModuleId, ethereum::EvmCollectionHelpersAddress}, + weights::CommonWeights, + RelayChainBlockNumberProvider, + }, + Runtime, RuntimeEvent, RuntimeCall, Balances, +}; +use frame_support::traits::{ConstU32, ConstU64}; +use up_common::{ + types::{AccountId, Balance, BlockNumber}, + constants::*, +}; +use up_data_structs::{ + mapping::{EvmTokenAddressMapping, CrossTokenAddressMapping}, +}; + +#[cfg(feature = "rmrk")] +pub mod rmrk; + +#[cfg(feature = "scheduler")] +pub mod scheduler; + +#[cfg(feature = "foreign-assets")] +pub mod foreign_asset; + +#[cfg(feature = "app-promotion")] +pub mod app_promotion; + +parameter_types! { + pub TreasuryAccountId: AccountId = TreasuryModuleId::get().into_account_truncating(); + pub const CollectionCreationPrice: Balance = 2 * UNIQUE; +} + +impl pallet_common::Config for Runtime { + type WeightInfo = pallet_common::weights::SubstrateWeight; + type RuntimeEvent = RuntimeEvent; + type Currency = Balances; + type CollectionCreationPrice = CollectionCreationPrice; + type TreasuryAccountId = TreasuryAccountId; + type CollectionDispatch = CollectionDispatchT; + + type EvmTokenAddressMapping = EvmTokenAddressMapping; + type CrossTokenAddressMapping = CrossTokenAddressMapping; + type ContractAddress = EvmCollectionHelpersAddress; +} + +impl pallet_structure::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type WeightInfo = pallet_structure::weights::SubstrateWeight; +} + +impl pallet_fungible::Config for Runtime { + type WeightInfo = pallet_fungible::weights::SubstrateWeight; +} +impl pallet_refungible::Config for Runtime { + type WeightInfo = pallet_refungible::weights::SubstrateWeight; +} +impl pallet_nonfungible::Config for Runtime { + type WeightInfo = pallet_nonfungible::weights::SubstrateWeight; +} + +parameter_types! { + pub const InflationBlockInterval: BlockNumber = 100; // every time per how many blocks inflation is applied +} + +/// Used for the pallet inflation +impl pallet_inflation::Config for Runtime { + type Currency = Balances; + type TreasuryAccountId = TreasuryAccountId; + type InflationBlockInterval = InflationBlockInterval; + type BlockNumberProvider = RelayChainBlockNumberProvider; +} + +impl pallet_unique::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type WeightInfo = pallet_unique::weights::SubstrateWeight; + type CommonWeightInfo = CommonWeights; + type RefungibleExtensionsWeightInfo = CommonWeights; +} + +impl pallet_configuration::Config for Runtime { + type DefaultWeightToFeeCoefficient = ConstU32<{ up_common::constants::WEIGHT_TO_FEE_COEFF }>; + type DefaultMinGasPrice = ConstU64<{ up_common::constants::MIN_GAS_PRICE }>; +} + +impl pallet_maintenance::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type WeightInfo = pallet_maintenance::weights::SubstrateWeight; +} diff --git a/runtime/common/config/pallets/rmrk.rs b/runtime/common/config/pallets/rmrk.rs new file mode 100644 index 0000000000..0f5cfcd066 --- /dev/null +++ b/runtime/common/config/pallets/rmrk.rs @@ -0,0 +1,27 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use crate::{Runtime, RuntimeEvent}; + +impl pallet_proxy_rmrk_core::Config for Runtime { + type WeightInfo = pallet_proxy_rmrk_core::weights::SubstrateWeight; + type RuntimeEvent = RuntimeEvent; +} + +impl pallet_proxy_rmrk_equip::Config for Runtime { + type WeightInfo = pallet_proxy_rmrk_equip::weights::SubstrateWeight; + type RuntimeEvent = RuntimeEvent; +} diff --git a/runtime/common/config/pallets/scheduler.rs b/runtime/common/config/pallets/scheduler.rs new file mode 100644 index 0000000000..fb5dd293a3 --- /dev/null +++ b/runtime/common/config/pallets/scheduler.rs @@ -0,0 +1,59 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{traits::PrivilegeCmp, weights::Weight, parameter_types}; +use frame_system::EnsureSigned; +use sp_runtime::Perbill; +use sp_std::cmp::Ordering; +use crate::{ + runtime_common::{scheduler::SchedulerPaymentExecutor, config::substrate::RuntimeBlockWeights}, + Runtime, Call, Event, Origin, OriginCaller, Balances, +}; +use up_common::types::AccountId; + +parameter_types! { + pub MaximumSchedulerWeight: Weight = Perbill::from_percent(50) * + RuntimeBlockWeights::get().max_block; + pub const MaxScheduledPerBlock: u32 = 50; + + pub const NoPreimagePostponement: Option = Some(10); + pub const Preimage: Option = Some(10); +} + +/// Used the compare the privilege of an origin inside the scheduler. +pub struct OriginPrivilegeCmp; + +impl PrivilegeCmp for OriginPrivilegeCmp { + fn cmp_privilege(_left: &OriginCaller, _right: &OriginCaller) -> Option { + Some(Ordering::Equal) + } +} + +impl pallet_unique_scheduler::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type RuntimeOrigin = RuntimeOrigin; + type Currency = Balances; + type PalletsOrigin = OriginCaller; + type RuntimeCall = RuntimeCall; + type MaximumWeight = MaximumSchedulerWeight; + type ScheduleOrigin = EnsureSigned; + type MaxScheduledPerBlock = MaxScheduledPerBlock; + type WeightInfo = (); + type CallExecutor = SchedulerPaymentExecutor; + type OriginPrivilegeCmp = OriginPrivilegeCmp; + type PreimageProvider = (); + type NoPreimagePostponement = NoPreimagePostponement; +} diff --git a/runtime/common/config/parachain.rs b/runtime/common/config/parachain.rs new file mode 100644 index 0000000000..5618e4bb17 --- /dev/null +++ b/runtime/common/config/parachain.rs @@ -0,0 +1,45 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{weights::Weight, parameter_types}; +use crate::{Runtime, RuntimeEvent, XcmpQueue, DmpQueue}; +use up_common::constants::*; + +parameter_types! { + pub const ReservedDmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT.saturating_div(4); + pub const ReservedXcmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT.saturating_div(4); +} + +impl cumulus_pallet_parachain_system::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type SelfParaId = parachain_info::Pallet; + type OnSystemEvent = (); + // type DownwardMessageHandlers = cumulus_primitives_utility::UnqueuedDmpAsParent< + // MaxDownwardMessageWeight, + // XcmExecutor, + // Call, + // >; + type OutboundXcmpMessageSource = XcmpQueue; + type DmpMessageHandler = DmpQueue; + type ReservedDmpWeight = ReservedDmpWeight; + type ReservedXcmpWeight = ReservedXcmpWeight; + type XcmpMessageHandler = XcmpQueue; + type CheckAssociatedRelayNumber = cumulus_pallet_parachain_system::RelayNumberStrictlyIncreases; +} + +impl parachain_info::Config for Runtime {} + +impl cumulus_pallet_aura_ext::Config for Runtime {} diff --git a/runtime/common/config/sponsoring.rs b/runtime/common/config/sponsoring.rs new file mode 100644 index 0000000000..be421b40d3 --- /dev/null +++ b/runtime/common/config/sponsoring.rs @@ -0,0 +1,37 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use crate::{ + runtime_common::{sponsoring::UniqueSponsorshipHandler}, + Runtime, +}; +use frame_support::parameter_types; +use sp_core::U256; +use up_common::{constants::*, types::BlockNumber}; + +parameter_types! { + pub const DefaultSponsoringRateLimit: BlockNumber = 1 * DAYS; + pub const DefaultSponsoringFeeLimit: U256 = U256::MAX; +} + +type SponsorshipHandler = ( + UniqueSponsorshipHandler, + pallet_evm_transaction_payment::BridgeSponsorshipHandler, +); + +impl pallet_charge_transaction::Config for Runtime { + type SponsorshipHandler = SponsorshipHandler; +} diff --git a/runtime/common/config/substrate.rs b/runtime/common/config/substrate.rs new file mode 100644 index 0000000000..0b4362e391 --- /dev/null +++ b/runtime/common/config/substrate.rs @@ -0,0 +1,218 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + traits::{Everything, ConstU32, NeverEnsureOrigin}, + weights::{ + constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight}, + ConstantMultiplier, + }, + dispatch::DispatchClass, + parameter_types, PalletId, +}; +use sp_runtime::{ + generic, + traits::{BlakeTwo256, AccountIdLookup}, + Perbill, Permill, Percent, +}; +use frame_system::{ + limits::{BlockLength, BlockWeights}, + EnsureRoot, +}; +use crate::{ + runtime_common::DealWithFees, Runtime, RuntimeEvent, RuntimeCall, RuntimeOrigin, PalletInfo, + System, Balances, Treasury, SS58Prefix, Version, +}; +use up_common::{types::*, constants::*}; + +parameter_types! { + pub const BlockHashCount: BlockNumber = 2400; + pub RuntimeBlockLength: BlockLength = + BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO); + pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); + pub const MaximumBlockLength: u32 = 5 * 1024 * 1024; + pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder() + .base_block(BlockExecutionWeight::get()) + .for_class(DispatchClass::all(), |weights| { + weights.base_extrinsic = ExtrinsicBaseWeight::get(); + }) + .for_class(DispatchClass::Normal, |weights| { + weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); + }) + .for_class(DispatchClass::Operational, |weights| { + weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); + // Operational transactions have some extra reserved space, so that they + // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. + weights.reserved = Some( + MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT + ); + }) + .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) + .build_or_panic(); +} + +impl frame_system::Config for Runtime { + /// The data to be stored in an account. + type AccountData = pallet_balances::AccountData; + /// The identifier used to distinguish between accounts. + type AccountId = AccountId; + /// The basic call filter to use in dispatchable. + type BaseCallFilter = Everything; + /// Maximum number of block number to block hash mappings to keep (oldest pruned first). + type BlockHashCount = BlockHashCount; + /// The maximum length of a block (in bytes). + type BlockLength = RuntimeBlockLength; + /// The index type for blocks. + type BlockNumber = BlockNumber; + /// The weight of the overhead invoked on the block import process, independent of the extrinsics included in that block. + type BlockWeights = RuntimeBlockWeights; + /// The aggregated dispatch type that is available for extrinsics. + type RuntimeCall = RuntimeCall; + /// The weight of database operations that the runtime can invoke. + type DbWeight = RocksDbWeight; + /// The ubiquitous event type. + type RuntimeEvent = RuntimeEvent; + /// The type for hashing blocks and tries. + type Hash = Hash; + /// The hashing algorithm used. + type Hashing = BlakeTwo256; + /// The header type. + type Header = generic::Header; + /// The index type for storing how many extrinsics an account has signed. + type Index = Index; + /// The lookup mechanism to get account ID from whatever is passed in dispatchers. + type Lookup = AccountIdLookup; + /// What to do if an account is fully reaped from the system. + type OnKilledAccount = (); + /// What to do if a new account is created. + type OnNewAccount = (); + type OnSetCode = cumulus_pallet_parachain_system::ParachainSetCode; + /// The ubiquitous origin type. + type RuntimeOrigin = RuntimeOrigin; + /// This type is being generated by `construct_runtime!`. + type PalletInfo = PalletInfo; + /// This is used as an identifier of the chain. 42 is the generic substrate prefix. + type SS58Prefix = SS58Prefix; + /// Weight information for the extrinsics of this pallet. + type SystemWeightInfo = frame_system::weights::SubstrateWeight; + /// Version of the runtime. + type Version = Version; + type MaxConsumers = ConstU32<16>; +} + +impl pallet_randomness_collective_flip::Config for Runtime {} + +parameter_types! { + pub const MinimumPeriod: u64 = SLOT_DURATION / 2; +} + +impl pallet_timestamp::Config for Runtime { + /// A timestamp: milliseconds since the unix epoch. + type Moment = u64; + type OnTimestampSet = (); + type MinimumPeriod = MinimumPeriod; + type WeightInfo = (); +} + +parameter_types! { + // pub const ExistentialDeposit: u128 = 500; + pub const ExistentialDeposit: u128 = 0; + pub const MaxLocks: u32 = 50; + pub const MaxReserves: u32 = 50; +} + +impl pallet_balances::Config for Runtime { + type MaxLocks = MaxLocks; + type MaxReserves = MaxReserves; + type ReserveIdentifier = [u8; 16]; + /// The type for recording an account's balance. + type Balance = Balance; + /// The ubiquitous event type. + type RuntimeEvent = RuntimeEvent; + type DustRemoval = Treasury; + type ExistentialDeposit = ExistentialDeposit; + type AccountStore = System; + type WeightInfo = pallet_balances::weights::SubstrateWeight; +} + +parameter_types! { + /// This value increases the priority of `Operational` transactions by adding + /// a "virtual tip" that's equal to the `OperationalFeeMultiplier * final_fee`. + pub const OperationalFeeMultiplier: u8 = 5; +} + +impl pallet_transaction_payment::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; + type LengthToFee = ConstantMultiplier; + type OperationalFeeMultiplier = OperationalFeeMultiplier; + type WeightToFee = pallet_configuration::WeightToFee; + type FeeMultiplierUpdate = (); +} + +parameter_types! { + pub const ProposalBond: Permill = Permill::from_percent(5); + pub const ProposalBondMinimum: Balance = 1 * UNIQUE; + pub const ProposalBondMaximum: Balance = 1000 * UNIQUE; + pub const SpendPeriod: BlockNumber = 5 * MINUTES; + pub const Burn: Permill = Permill::from_percent(0); + pub const TipCountdown: BlockNumber = 1 * DAYS; + pub const TipFindersFee: Percent = Percent::from_percent(20); + pub const TipReportDepositBase: Balance = 1 * UNIQUE; + pub const DataDepositPerByte: Balance = 1 * CENTIUNIQUE; + pub const BountyDepositBase: Balance = 1 * UNIQUE; + pub const BountyDepositPayoutDelay: BlockNumber = 1 * DAYS; + pub const TreasuryModuleId: PalletId = PalletId(*b"py/trsry"); + pub const BountyUpdatePeriod: BlockNumber = 14 * DAYS; + pub const MaximumReasonLength: u32 = 16384; + pub const BountyCuratorDeposit: Permill = Permill::from_percent(50); + pub const BountyValueMinimum: Balance = 5 * UNIQUE; + pub const MaxApprovals: u32 = 100; +} + +impl pallet_treasury::Config for Runtime { + type PalletId = TreasuryModuleId; + type Currency = Balances; + type ApproveOrigin = EnsureRoot; + type RejectOrigin = EnsureRoot; + type SpendOrigin = NeverEnsureOrigin; + type RuntimeEvent = RuntimeEvent; + type OnSlash = (); + type ProposalBond = ProposalBond; + type ProposalBondMinimum = ProposalBondMinimum; + type ProposalBondMaximum = ProposalBondMaximum; + type SpendPeriod = SpendPeriod; + type Burn = Burn; + type BurnDestination = (); + type SpendFunds = (); + type WeightInfo = pallet_treasury::weights::SubstrateWeight; + type MaxApprovals = MaxApprovals; +} + +impl pallet_sudo::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; +} + +parameter_types! { + pub const MaxAuthorities: u32 = 100_000; +} + +impl pallet_aura::Config for Runtime { + type AuthorityId = AuraId; + type DisabledValidators = (); + type MaxAuthorities = MaxAuthorities; +} diff --git a/runtime/common/config/xcm/foreignassets.rs b/runtime/common/config/xcm/foreignassets.rs new file mode 100644 index 0000000000..4e6a299a02 --- /dev/null +++ b/runtime/common/config/xcm/foreignassets.rs @@ -0,0 +1,198 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + traits::{Contains, Get, fungibles}, + parameter_types, +}; +use sp_runtime::traits::{Zero, Convert}; +use xcm::v1::{Junction::*, MultiLocation, Junctions::*}; +use xcm::latest::MultiAsset; +use xcm_builder::{FungiblesAdapter, ConvertedConcreteAssetId}; +use xcm_executor::traits::{Convert as ConvertXcm, JustTry, FilterAssetLocation}; +use pallet_foreign_assets::{ + AssetIds, AssetIdMapping, XcmForeignAssetIdMapping, NativeCurrency, FreeForAll, TryAsForeign, + ForeignAssetId, CurrencyId, +}; +use sp_std::{borrow::Borrow, marker::PhantomData}; +use crate::{Runtime, Balances, ParachainInfo, PolkadotXcm, ForeignAssets}; + +use super::{LocationToAccountId, RelayLocation}; + +use up_common::types::{AccountId, Balance}; + +parameter_types! { + pub CheckingAccount: AccountId = PolkadotXcm::check_account(); +} + +/// Allow checking in assets that have issuance > 0. +pub struct NonZeroIssuance(PhantomData<(AccountId, ForeignAssets)>); + +impl Contains<>::AssetId> + for NonZeroIssuance +where + ForeignAssets: fungibles::Inspect, +{ + fn contains(id: &>::AssetId) -> bool { + !ForeignAssets::total_issuance(*id).is_zero() + } +} + +pub struct AsInnerId(PhantomData<(AssetId, ConvertAssetId)>); +impl> + ConvertXcm for AsInnerId +where + AssetId: Borrow, + AssetId: TryAsForeign, + AssetIds: Borrow, +{ + fn convert_ref(id: impl Borrow) -> Result { + let id = id.borrow(); + + log::trace!( + target: "xcm::AsInnerId::Convert", + "AsInnerId {:?}", + id + ); + + let parent = MultiLocation::parent(); + let here = MultiLocation::here(); + let self_location = MultiLocation::new(1, X1(Parachain(ParachainInfo::get().into()))); + + if *id == parent { + return ConvertAssetId::convert_ref(AssetIds::NativeAssetId(NativeCurrency::Parent)); + } + + if *id == here || *id == self_location { + return ConvertAssetId::convert_ref(AssetIds::NativeAssetId(NativeCurrency::Here)); + } + + match XcmForeignAssetIdMapping::::get_currency_id(id.clone()) { + Some(AssetIds::ForeignAssetId(foreign_asset_id)) => { + ConvertAssetId::convert_ref(AssetIds::ForeignAssetId(foreign_asset_id)) + } + _ => ConvertAssetId::convert_ref(AssetIds::ForeignAssetId(0)), + } + } + + fn reverse_ref(what: impl Borrow) -> Result { + log::trace!( + target: "xcm::AsInnerId::Reverse", + "AsInnerId", + ); + + let asset_id = what.borrow(); + + let parent_id = + ConvertAssetId::convert_ref(AssetIds::NativeAssetId(NativeCurrency::Parent)).unwrap(); + let here_id = + ConvertAssetId::convert_ref(AssetIds::NativeAssetId(NativeCurrency::Here)).unwrap(); + + if asset_id.clone() == parent_id { + return Ok(MultiLocation::parent()); + } + + if asset_id.clone() == here_id { + return Ok(MultiLocation::new( + 1, + X1(Parachain(ParachainInfo::get().into())), + )); + } + + match >::try_as_foreign(asset_id.clone()) { + Some(fid) => match XcmForeignAssetIdMapping::::get_multi_location(fid) { + Some(location) => Ok(location), + None => Err(()), + }, + None => Err(()), + } + } +} + +/// Means for transacting assets besides the native currency on this chain. +pub type FungiblesTransactor = FungiblesAdapter< + // Use this fungibles implementation: + ForeignAssets, + // Use this currency when it is a fungible asset matching the given location or name: + ConvertedConcreteAssetId, JustTry>, + // Convert an XCM MultiLocation into a local account id: + LocationToAccountId, + // Our chain's account ID type (we can't get away without mentioning it explicitly): + AccountId, + // We only want to allow teleports of known assets. We use non-zero issuance as an indication + // that this asset is known. + NonZeroIssuance, + // The account to use for tracking teleports. + CheckingAccount, +>; + +/// Means for transacting assets on this chain. +pub type AssetTransactors = FungiblesTransactor; + +pub struct AllAsset; +impl FilterAssetLocation for AllAsset { + fn filter_asset_location(_asset: &MultiAsset, _origin: &MultiLocation) -> bool { + true + } +} + +pub type IsReserve = AllAsset; + +pub type Trader = FreeForAll< + pallet_configuration::WeightToFee, + RelayLocation, + AccountId, + Balances, + (), +>; + +pub struct CurrencyIdConvert; +impl Convert> for CurrencyIdConvert { + fn convert(id: AssetIds) -> Option { + match id { + AssetIds::NativeAssetId(NativeCurrency::Here) => Some(MultiLocation::new( + 1, + X1(Parachain(ParachainInfo::get().into())), + )), + AssetIds::NativeAssetId(NativeCurrency::Parent) => Some(MultiLocation::parent()), + AssetIds::ForeignAssetId(foreign_asset_id) => { + XcmForeignAssetIdMapping::::get_multi_location(foreign_asset_id) + } + } + } +} + +impl Convert> for CurrencyIdConvert { + fn convert(location: MultiLocation) -> Option { + if location == MultiLocation::here() + || location == MultiLocation::new(1, X1(Parachain(ParachainInfo::get().into()))) + { + return Some(AssetIds::NativeAssetId(NativeCurrency::Here)); + } + + if location == MultiLocation::parent() { + return Some(AssetIds::NativeAssetId(NativeCurrency::Parent)); + } + + if let Some(currency_id) = + XcmForeignAssetIdMapping::::get_currency_id(location.clone()) + { + return Some(currency_id); + } + + None + } +} diff --git a/runtime/common/config/xcm/mod.rs b/runtime/common/config/xcm/mod.rs new file mode 100644 index 0000000000..199fdac373 --- /dev/null +++ b/runtime/common/config/xcm/mod.rs @@ -0,0 +1,267 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + traits::{Everything, Get}, + parameter_types, +}; +use frame_system::EnsureRoot; +use pallet_xcm::XcmPassthrough; +use polkadot_parachain::primitives::Sibling; +use xcm::v1::{Junction::*, MultiLocation, NetworkId}; +use xcm::latest::{prelude::*, Weight}; +use xcm_builder::{ + AccountId32Aliases, EnsureXcmOrigin, FixedWeightBounds, LocationInverter, ParentAsSuperuser, + RelayChainAsNative, SiblingParachainAsNative, SiblingParachainConvertsVia, + SignedAccountId32AsNative, SignedToAccountId32, SovereignSignedViaLocation, ParentIsPreset, +}; +use xcm_executor::{Config, XcmExecutor, traits::ShouldExecute}; +use sp_std::{marker::PhantomData, vec::Vec}; +use crate::{ + Runtime, RuntimeCall, RuntimeEvent, RuntimeOrigin, ParachainInfo, ParachainSystem, PolkadotXcm, + XcmpQueue, xcm_barrier::Barrier, +}; + +use up_common::types::AccountId; + +#[cfg(feature = "foreign-assets")] +pub mod foreignassets; + +#[cfg(not(feature = "foreign-assets"))] +pub mod nativeassets; + +#[cfg(feature = "foreign-assets")] +pub use foreignassets as xcm_assets; + +#[cfg(not(feature = "foreign-assets"))] +pub use nativeassets as xcm_assets; + +use xcm_assets::{AssetTransactors, IsReserve, Trader}; + +parameter_types! { + pub const RelayLocation: MultiLocation = MultiLocation::parent(); + pub const RelayNetwork: NetworkId = NetworkId::Polkadot; + pub RelayOrigin: RuntimeOrigin = cumulus_pallet_xcm::Origin::Relay.into(); + pub Ancestry: MultiLocation = Parachain(ParachainInfo::parachain_id().into()).into(); + pub SelfLocation: MultiLocation = MultiLocation::new(1, X1(Parachain(ParachainInfo::get().into()))); + + // One XCM operation is 1_000_000 weight - almost certainly a conservative estimate. + pub UnitWeightCost: Weight = 1_000_000; + pub const MaxInstructions: u32 = 100; +} + +/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used +/// when determining ownership of accounts for asset transacting and when attempting to use XCM +/// `Transact` in order to determine the dispatch Origin. +pub type LocationToAccountId = ( + // The parent (Relay-chain) origin converts to the default `AccountId`. + ParentIsPreset, + // Sibling parachain origins convert to AccountId via the `ParaId::into`. + SiblingParachainConvertsVia, + // Straight up local `AccountId32` origins just alias directly to `AccountId`. + AccountId32Aliases, +); + +/// No local origins on this chain are allowed to dispatch XCM sends/executions. +pub type LocalOriginToLocation = (SignedToAccountId32,); + +/// The means for routing XCM messages which are not for local execution into the right message +/// queues. +pub type XcmRouter = ( + // Two routers - use UMP to communicate with the relay chain: + cumulus_primitives_utility::ParentAsUmp, + // ..and XCMP to communicate with the sibling chains. + XcmpQueue, +); + +/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance, +/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can +/// biases the kind of local `Origin` it will become. +pub type XcmOriginToTransactDispatchOrigin = ( + // Sovereign account converter; this attempts to derive an `AccountId` from the origin location + // using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for + // foreign chains who want to have a local sovereign account on this chain which they control. + SovereignSignedViaLocation, + // Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when + // recognised. + RelayChainAsNative, + // Native converter for sibling Parachains; will convert to a `SiblingPara` origin when + // recognised. + SiblingParachainAsNative, + // Superuser converter for the Relay-chain (Parent) location. This will allow it to issue a + // transaction from the Root origin. + ParentAsSuperuser, + // Native signed account converter; this just converts an `AccountId32` origin into a normal + // `Origin::Signed` origin of the same 32-byte value. + SignedAccountId32AsNative, + // Xcm origins can be represented natively under the Xcm pallet's Xcm origin. + XcmPassthrough, +); + +pub trait TryPass { + fn try_pass(origin: &MultiLocation, message: &mut Xcm) -> Result<(), ()>; +} + +#[impl_trait_for_tuples::impl_for_tuples(30)] +impl TryPass for Tuple { + fn try_pass(origin: &MultiLocation, message: &mut Xcm) -> Result<(), ()> { + for_tuples!( #( + Tuple::try_pass(origin, message)?; + )* ); + + Ok(()) + } +} + +pub struct DenyTransact; +impl TryPass for DenyTransact { + fn try_pass(_origin: &MultiLocation, message: &mut Xcm) -> Result<(), ()> { + let transact_inst = message + .0 + .iter() + .find(|inst| matches![inst, Instruction::Transact { .. }]); + + if transact_inst.is_some() { + log::warn!( + target: "xcm::barrier", + "transact XCM rejected" + ); + + Err(()) + } else { + Ok(()) + } + } +} + +/// Deny executing the XCM if it matches any of the Deny filter regardless of anything else. +/// If it passes the Deny, and matches one of the Allow cases then it is let through. +pub struct DenyThenTry(PhantomData, PhantomData) +where + Deny: TryPass, + Allow: ShouldExecute; + +impl ShouldExecute for DenyThenTry +where + Deny: TryPass, + Allow: ShouldExecute, +{ + fn should_execute( + origin: &MultiLocation, + message: &mut Xcm, + max_weight: Weight, + weight_credit: &mut Weight, + ) -> Result<(), ()> { + Deny::try_pass(origin, message)?; + Allow::should_execute(origin, message, max_weight, weight_credit) + } +} + +// Allow xcm exchange only with locations in list +pub struct DenyExchangeWithUnknownLocation(PhantomData); +impl>> TryPass for DenyExchangeWithUnknownLocation { + fn try_pass(origin: &MultiLocation, message: &mut Xcm) -> Result<(), ()> { + let allowed_locations = T::get(); + + // Check if deposit or transfer belongs to allowed parachains + let mut allowed = allowed_locations.contains(origin); + + message.0.iter().for_each(|inst| match inst { + DepositReserveAsset { dest: dst, .. } => { + allowed |= allowed_locations.contains(dst); + } + TransferReserveAsset { dest: dst, .. } => { + allowed |= allowed_locations.contains(dst); + } + _ => {} + }); + + if allowed { + return Ok(()); + } + + log::warn!( + target: "xcm::barrier", + "Unexpected deposit or transfer location" + ); + // Deny + Err(()) + } +} + +pub type Weigher = FixedWeightBounds; + +pub struct XcmConfig(PhantomData); +impl Config for XcmConfig +where + T: pallet_configuration::Config, +{ + type RuntimeCall = RuntimeCall; + type XcmSender = XcmRouter; + // How to withdraw and deposit an asset. + type AssetTransactor = AssetTransactors; + type OriginConverter = XcmOriginToTransactDispatchOrigin; + type IsReserve = IsReserve; + type IsTeleporter = (); // Teleportation is disabled + type LocationInverter = LocationInverter; + type Barrier = Barrier; + type Weigher = Weigher; + type Trader = Trader; + type ResponseHandler = (); // Don't handle responses for now. + type SubscriptionService = PolkadotXcm; + + type AssetTrap = PolkadotXcm; + type AssetClaims = PolkadotXcm; +} + +impl pallet_xcm::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type SendXcmOrigin = EnsureXcmOrigin; + type XcmRouter = XcmRouter; + type ExecuteXcmOrigin = EnsureXcmOrigin; + type XcmExecuteFilter = Everything; + type XcmExecutor = XcmExecutor>; + type XcmTeleportFilter = Everything; + type XcmReserveTransferFilter = Everything; + type Weigher = FixedWeightBounds; + type LocationInverter = LocationInverter; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeCall = RuntimeCall; + const VERSION_DISCOVERY_QUEUE_SIZE: u32 = 100; + type AdvertisedXcmVersion = pallet_xcm::CurrentXcmVersion; +} + +impl cumulus_pallet_xcm::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type XcmExecutor = XcmExecutor>; +} + +impl cumulus_pallet_xcmp_queue::Config for Runtime { + type WeightInfo = (); + type RuntimeEvent = RuntimeEvent; + type XcmExecutor = XcmExecutor>; + type ChannelInfo = ParachainSystem; + type VersionWrapper = (); + type ExecuteOverweightOrigin = frame_system::EnsureRoot; + type ControllerOrigin = EnsureRoot; + type ControllerOriginConverter = XcmOriginToTransactDispatchOrigin; +} + +impl cumulus_pallet_dmp_queue::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type XcmExecutor = XcmExecutor>; + type ExecuteOverweightOrigin = frame_system::EnsureRoot; +} diff --git a/runtime/common/config/xcm/nativeassets.rs b/runtime/common/config/xcm/nativeassets.rs new file mode 100644 index 0000000000..2fecd60058 --- /dev/null +++ b/runtime/common/config/xcm/nativeassets.rs @@ -0,0 +1,143 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + traits::{tokens::currency::Currency as CurrencyT, OnUnbalanced as OnUnbalancedT, Get}, + weights::WeightToFeePolynomial, +}; +use sp_runtime::traits::{CheckedConversion, Zero, Convert}; +use xcm::v1::{Junction::*, MultiLocation, Junctions::*}; +use xcm::latest::{ + AssetId::{Concrete}, + Fungibility::Fungible as XcmFungible, + MultiAsset, Error as XcmError, Weight, +}; +use xcm_builder::{CurrencyAdapter, NativeAsset}; +use xcm_executor::{ + Assets, + traits::{MatchesFungible, WeightTrader}, +}; +use pallet_foreign_assets::{AssetIds, NativeCurrency}; +use sp_std::marker::PhantomData; +use crate::{Balances, ParachainInfo}; +use super::{LocationToAccountId, RelayLocation}; + +use up_common::types::{AccountId, Balance}; + +pub struct OnlySelfCurrency; +impl> MatchesFungible for OnlySelfCurrency { + fn matches_fungible(a: &MultiAsset) -> Option { + let paraid = Parachain(ParachainInfo::parachain_id().into()); + match (&a.id, &a.fun) { + ( + Concrete(MultiLocation { + parents: 1, + interior: X1(loc), + }), + XcmFungible(ref amount), + ) if paraid == *loc => CheckedConversion::checked_from(*amount), + ( + Concrete(MultiLocation { + parents: 0, + interior: Here, + }), + XcmFungible(ref amount), + ) => CheckedConversion::checked_from(*amount), + _ => None, + } + } +} + +/// Means for transacting assets on this chain. +pub type LocalAssetTransactor = CurrencyAdapter< + // Use this currency: + Balances, + // Use this currency when it is a fungible asset matching the given location or name: + OnlySelfCurrency, + // Do a simple punn to convert an AccountId32 MultiLocation into a native chain account ID: + LocationToAccountId, + // Our chain's account ID type (we can't get away without mentioning it explicitly): + AccountId, + // We don't track any teleports. + (), +>; + +pub type AssetTransactors = LocalAssetTransactor; + +pub type IsReserve = NativeAsset; + +pub struct UsingOnlySelfCurrencyComponents< + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + AccountId, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, +>( + Weight, + Currency::Balance, + PhantomData<(WeightToFee, AssetId, AccountId, Currency, OnUnbalanced)>, +); +impl< + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + AccountId, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, + > WeightTrader + for UsingOnlySelfCurrencyComponents +{ + fn new() -> Self { + Self(0, Zero::zero(), PhantomData) + } + + fn buy_weight(&mut self, _weight: Weight, payment: Assets) -> Result { + Ok(payment) + } +} +impl< + WeightToFee: WeightToFeePolynomial, + AssetId: Get, + AccountId, + Currency: CurrencyT, + OnUnbalanced: OnUnbalancedT, + > Drop + for UsingOnlySelfCurrencyComponents +{ + fn drop(&mut self) { + OnUnbalanced::on_unbalanced(Currency::issue(self.1)); + } +} + +pub type Trader = UsingOnlySelfCurrencyComponents< + pallet_configuration::WeightToFee, + RelayLocation, + AccountId, + Balances, + (), +>; + +pub struct CurrencyIdConvert; +impl Convert> for CurrencyIdConvert { + fn convert(id: AssetIds) -> Option { + match id { + AssetIds::NativeAssetId(NativeCurrency::Here) => Some(MultiLocation::new( + 1, + X1(Parachain(ParachainInfo::get().into())), + )), + _ => None, + } + } +} diff --git a/runtime/common/construct_runtime/mod.rs b/runtime/common/construct_runtime/mod.rs new file mode 100644 index 0000000000..db32f7d245 --- /dev/null +++ b/runtime/common/construct_runtime/mod.rs @@ -0,0 +1,101 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +mod util; + +#[macro_export] +macro_rules! construct_runtime { + ($select_runtime:ident) => { + $crate::construct_runtime_impl! { + select_runtime($select_runtime); + + pub enum Runtime where + Block = Block, + NodeBlock = opaque::Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + System: frame_system = 0, + + ParachainSystem: cumulus_pallet_parachain_system::{Pallet, Call, Config, Storage, Inherent, Event, ValidateUnsigned} = 20, + ParachainInfo: parachain_info::{Pallet, Storage, Config} = 21, + + Aura: pallet_aura::{Pallet, Config} = 22, + AuraExt: cumulus_pallet_aura_ext::{Pallet, Config} = 23, + + Balances: pallet_balances::{Pallet, Call, Storage, Config, Event} = 30, + RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Storage} = 31, + Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent} = 32, + TransactionPayment: pallet_transaction_payment::{Pallet, Storage, Event} = 33, + Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event} = 34, + Sudo: pallet_sudo::{Pallet, Call, Storage, Config, Event} = 35, + Vesting: orml_vesting::{Pallet, Storage, Call, Event, Config} = 37, + + XTokens: orml_xtokens = 38, + Tokens: orml_tokens = 39, + // Contracts: pallet_contracts::{Pallet, Call, Storage, Event} = 38, + + // XCM helpers. + XcmpQueue: cumulus_pallet_xcmp_queue::{Pallet, Call, Storage, Event} = 50, + PolkadotXcm: pallet_xcm::{Pallet, Call, Event, Origin} = 51, + CumulusXcm: cumulus_pallet_xcm::{Pallet, Call, Event, Origin} = 52, + DmpQueue: cumulus_pallet_dmp_queue::{Pallet, Call, Storage, Event} = 53, + + // Unique Pallets + Inflation: pallet_inflation::{Pallet, Call, Storage} = 60, + Unique: pallet_unique::{Pallet, Call, Storage, Event} = 61, + + // #[runtimes(opal)] + // Scheduler: pallet_unique_scheduler::{Pallet, Call, Storage, Event} = 62, + + Configuration: pallet_configuration::{Pallet, Call, Storage} = 63, + + Charging: pallet_charge_transaction::{Pallet, Call, Storage } = 64, + // ContractHelpers: pallet_contract_helpers::{Pallet, Call, Storage} = 65, + Common: pallet_common::{Pallet, Storage, Event} = 66, + Fungible: pallet_fungible::{Pallet, Storage} = 67, + + #[runtimes(opal, quartz)] + Refungible: pallet_refungible::{Pallet, Storage} = 68, + + Nonfungible: pallet_nonfungible::{Pallet, Storage} = 69, + Structure: pallet_structure::{Pallet, Call, Storage, Event} = 70, + + #[runtimes(opal)] + RmrkCore: pallet_proxy_rmrk_core::{Pallet, Call, Storage, Event} = 71, + + #[runtimes(opal)] + RmrkEquip: pallet_proxy_rmrk_equip::{Pallet, Call, Storage, Event} = 72, + + #[runtimes(opal)] + AppPromotion: pallet_app_promotion::{Pallet, Call, Storage, Event} = 73, + + #[runtimes(opal)] + ForeignAssets: pallet_foreign_assets::{Pallet, Call, Storage, Event} = 80, + + // Frontier + EVM: pallet_evm::{Pallet, Config, Call, Storage, Event} = 100, + Ethereum: pallet_ethereum::{Pallet, Config, Call, Storage, Event, Origin} = 101, + + EvmCoderSubstrate: pallet_evm_coder_substrate::{Pallet, Storage} = 150, + EvmContractHelpers: pallet_evm_contract_helpers::{Pallet, Storage, Event} = 151, + EvmTransactionPayment: pallet_evm_transaction_payment::{Pallet} = 152, + EvmMigration: pallet_evm_migration::{Pallet, Call, Storage} = 153, + + Maintenance: pallet_maintenance::{Pallet, Call, Storage, Event} = 154, + } + } + } +} diff --git a/runtime/common/construct_runtime/util.rs b/runtime/common/construct_runtime/util.rs new file mode 100644 index 0000000000..3f0cae4410 --- /dev/null +++ b/runtime/common/construct_runtime/util.rs @@ -0,0 +1,222 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +#[macro_export] +macro_rules! construct_runtime_impl { + ( + select_runtime($select_runtime:ident); + + pub enum Runtime where + $($where_ident:ident = $where_ty:ty),* $(,)? + { + $( + $(#[runtimes($($pallet_runtimes:ident),+ $(,)?)])? + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal + ),* + $(,)? + } + ) => { + $crate::construct_runtime_helper! { + select_runtime($select_runtime), + selected_pallets(), + + where_clause($($where_ident = $where_ty),*), + pallets( + $( + $(#[runtimes($($pallet_runtimes),+)])? + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index + ),*, + ) + } + } +} + +#[macro_export] +macro_rules! construct_runtime_helper { + ( + select_runtime($select_runtime:ident), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + #[runtimes($($pallet_runtimes:ident),+)] + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal, + + $($pallets_tl:tt)* + ) + ) => { + $crate::add_runtime_specific_pallets! { + select_runtime($select_runtime), + runtimes($($pallet_runtimes),+,), + selected_pallets($($selected_pallets)*), + + where_clause($($where_clause)*), + pallets( + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index, + $($pallets_tl)* + ) + } + }; + + ( + select_runtime($select_runtime:ident), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal, + + $($pallets_tl:tt)* + ) + ) => { + $crate::construct_runtime_helper! { + select_runtime($select_runtime), + selected_pallets( + $($selected_pallets)* + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index, + ), + + where_clause($($where_clause)*), + pallets($($pallets_tl)*) + } + }; + + ( + select_runtime($select_runtime:ident), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets() + ) => { + frame_support::construct_runtime! { + pub enum Runtime where + $($where_clause)* + { + $($selected_pallets)* + } + } + }; +} + +#[macro_export] +macro_rules! add_runtime_specific_pallets { + ( + select_runtime(opal), + runtimes(opal, $($_runtime_tl:tt)*), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal, + $($pallets_tl:tt)* + ) + ) => { + $crate::construct_runtime_helper! { + select_runtime(opal), + selected_pallets( + $($selected_pallets)* + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index, + ), + + where_clause($($where_clause)*), + pallets($($pallets_tl)*) + } + }; + + ( + select_runtime(quartz), + runtimes(quartz, $($_runtime_tl:tt)*), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal, + $($pallets_tl:tt)* + ) + ) => { + $crate::construct_runtime_helper! { + select_runtime(quartz), + selected_pallets( + $($selected_pallets)* + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index, + ), + + where_clause($($where_clause)*), + pallets($($pallets_tl)*) + } + }; + + ( + select_runtime(unique), + runtimes(unique, $($_runtime_tl:tt)*), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + $pallet_name:ident: $pallet_mod:ident$(::{$($pallet_parts:ty),*})? = $index:literal, + $($pallets_tl:tt)* + ) + ) => { + $crate::construct_runtime_helper! { + select_runtime(unique), + selected_pallets( + $($selected_pallets)* + $pallet_name: $pallet_mod$(::{$($pallet_parts),*})? = $index, + ), + + where_clause($($where_clause)*), + pallets($($pallets_tl)*) + } + }; + + ( + select_runtime($select_runtime:ident), + runtimes($_current_runtime:ident, $($runtime_tl:tt)*), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets($($pallets:tt)*) + ) => { + $crate::add_runtime_specific_pallets! { + select_runtime($select_runtime), + runtimes($($runtime_tl)*), + selected_pallets($($selected_pallets)*), + + where_clause($($where_clause)*), + pallets($($pallets)*) + } + }; + + ( + select_runtime($select_runtime:ident), + runtimes(), + selected_pallets($($selected_pallets:tt)*), + + where_clause($($where_clause:tt)*), + pallets( + $_pallet_name:ident: $_pallet_mod:ident$(::{$($_pallet_parts:ty),*})? = $_index:literal, + $($pallets_tl:tt)* + ) + ) => { + $crate::construct_runtime_helper! { + select_runtime($select_runtime), + selected_pallets($($selected_pallets)*), + + where_clause($($where_clause)*), + pallets($($pallets_tl)*) + } + }; +} diff --git a/runtime/common/src/dispatch.rs b/runtime/common/dispatch.rs similarity index 86% rename from runtime/common/src/dispatch.rs rename to runtime/common/dispatch.rs index 8e509b7054..ccd26b7f8c 100644 --- a/runtime/common/src/dispatch.rs +++ b/runtime/common/dispatch.rs @@ -17,6 +17,7 @@ use frame_support::{dispatch::DispatchResult, ensure}; use pallet_evm::{PrecompileHandle, PrecompileResult}; use sp_core::H160; +use sp_runtime::DispatchError; use sp_std::{borrow::ToOwned, vec::Vec}; use pallet_common::{ CollectionById, CollectionHandle, CommonCollectionOperations, erc::CommonEvmHandler, @@ -25,11 +26,17 @@ use pallet_common::{ pub use pallet_common::dispatch::CollectionDispatch; use pallet_fungible::{Pallet as PalletFungible, FungibleHandle}; use pallet_nonfungible::{Pallet as PalletNonfungible, NonfungibleHandle}; -use pallet_refungible::{Pallet as PalletRefungible, RefungibleHandle, erc::RefungibleTokenHandle}; +use pallet_refungible::{ + Pallet as PalletRefungible, RefungibleHandle, erc_token::RefungibleTokenHandle, +}; use up_data_structs::{ CollectionMode, CreateCollectionData, MAX_DECIMAL_POINTS, mapping::TokenAddressMapping, + CollectionId, CollectionFlags, }; +#[cfg(not(feature = "refungible"))] +use pallet_common::unsupported; + pub enum CollectionDispatchT where T: pallet_fungible::Config + pallet_nonfungible::Config + pallet_refungible::Config, @@ -48,21 +55,32 @@ where { fn create( sender: T::CrossAccountId, + payer: T::CrossAccountId, data: CreateCollectionData, - ) -> DispatchResult { - let _id = match data.mode { - CollectionMode::NFT => >::init_collection(sender, data, false)?, + flags: CollectionFlags, + ) -> Result { + let id = match data.mode { + CollectionMode::NFT => { + >::init_collection(sender, payer, data, flags)? + } CollectionMode::Fungible(decimal_points) => { // check params ensure!( decimal_points <= MAX_DECIMAL_POINTS, pallet_unique::Error::::CollectionDecimalPointLimitExceeded ); - >::init_collection(sender, data)? + >::init_collection(sender, payer, data, flags)? } - CollectionMode::ReFungible => >::init_collection(sender, data)?, + + #[cfg(feature = "refungible")] + CollectionMode::ReFungible => { + >::init_collection(sender, payer, data, flags)? + } + + #[cfg(not(feature = "refungible"))] + CollectionMode::ReFungible => return unsupported!(T), }; - Ok(()) + Ok(id) } fn destroy(sender: T::CrossAccountId, collection: CollectionHandle) -> DispatchResult { @@ -112,7 +130,7 @@ where + pallet_fungible::Config + pallet_nonfungible::Config + pallet_refungible::Config, - T::AccountId: From<[u8; 32]>, + T::AccountId: From<[u8; 32]> + AsRef<[u8; 32]>, { fn is_reserved(target: &H160) -> bool { map_eth_to_id(target).is_some() diff --git a/runtime/common/ethereum/mod.rs b/runtime/common/ethereum/mod.rs new file mode 100644 index 0000000000..3aed7d9b72 --- /dev/null +++ b/runtime/common/ethereum/mod.rs @@ -0,0 +1,19 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +pub mod self_contained_call; +pub mod sponsoring; +pub mod transaction_converter; diff --git a/runtime/common/ethereum/self_contained_call.rs b/runtime/common/ethereum/self_contained_call.rs new file mode 100644 index 0000000000..2b26d07af7 --- /dev/null +++ b/runtime/common/ethereum/self_contained_call.rs @@ -0,0 +1,92 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use sp_core::H160; +use sp_runtime::{ + traits::{Dispatchable, DispatchInfoOf, PostDispatchInfoOf}, + transaction_validity::{TransactionValidityError, TransactionValidity, InvalidTransaction}, +}; +use crate::{RuntimeOrigin, RuntimeCall, Maintenance}; + +impl fp_self_contained::SelfContainedCall for RuntimeCall { + type SignedInfo = H160; + + fn is_self_contained(&self) -> bool { + match self { + RuntimeCall::Ethereum(call) => call.is_self_contained(), + _ => false, + } + } + + fn check_self_contained(&self) -> Option> { + match self { + RuntimeCall::Ethereum(call) => { + if Maintenance::is_enabled() { + Some(Err(TransactionValidityError::Invalid( + InvalidTransaction::Call, + ))) + } else { + call.check_self_contained() + } + } + _ => None, + } + } + + fn validate_self_contained( + &self, + info: &Self::SignedInfo, + dispatch_info: &DispatchInfoOf, + len: usize, + ) -> Option { + match self { + RuntimeCall::Ethereum(call) => { + if Maintenance::is_enabled() { + Some(Err(TransactionValidityError::Invalid( + InvalidTransaction::Call, + ))) + } else { + call.validate_self_contained(info, dispatch_info, len) + } + } + _ => None, + } + } + + fn pre_dispatch_self_contained( + &self, + info: &Self::SignedInfo, + ) -> Option> { + match self { + RuntimeCall::Ethereum(call) => call.pre_dispatch_self_contained(info), + _ => None, + } + } + + fn apply_self_contained( + self, + info: Self::SignedInfo, + ) -> Option>> { + match self { + call @ RuntimeCall::Ethereum(pallet_ethereum::Call::transact { .. }) => { + Some(call.dispatch(RuntimeOrigin::from( + pallet_ethereum::RawOrigin::EthereumTransaction(info), + ))) + } + _ => None, + } + } +} diff --git a/runtime/common/src/eth_sponsoring.rs b/runtime/common/ethereum/sponsoring.rs similarity index 71% rename from runtime/common/src/eth_sponsoring.rs rename to runtime/common/ethereum/sponsoring.rs index 92e3aa8bd9..a78d8606d7 100644 --- a/runtime/common/src/eth_sponsoring.rs +++ b/runtime/common/ethereum/sponsoring.rs @@ -16,36 +16,47 @@ //! Implements EVM sponsoring logic via TransactionValidityHack +use core::{convert::TryInto, marker::PhantomData}; use evm_coder::{Call, abi::AbiReader}; use pallet_common::{CollectionHandle, eth::map_eth_to_id}; -use sp_core::H160; -use sp_std::prelude::*; -use up_sponsorship::SponsorshipHandler; -use core::marker::PhantomData; -use core::convert::TryInto; use pallet_evm::account::CrossAccountId; -use up_data_structs::{TokenId, CreateItemData, CreateNftData, CollectionMode}; +use pallet_evm_transaction_payment::CallContext; +use pallet_nonfungible::{ + Config as NonfungibleConfig, + erc::{ + UniqueNFTCall, ERC721UniqueExtensionsCall, ERC721UniqueMintableCall, ERC721Call, + TokenPropertiesCall, + }, +}; +use pallet_fungible::{ + Config as FungibleConfig, + erc::{UniqueFungibleCall, ERC20Call}, +}; +use pallet_refungible::Config as RefungibleConfig; use pallet_unique::Config as UniqueConfig; +use sp_std::prelude::*; +use up_data_structs::{CollectionMode, CreateItemData, CreateNftData, TokenId}; +use up_sponsorship::SponsorshipHandler; -use crate::sponsoring::*; +use crate::{Runtime, runtime_common::sponsoring::*}; -use pallet_nonfungible::erc::{ - UniqueNFTCall, ERC721UniqueExtensionsCall, ERC721MintableCall, ERC721Call, TokenPropertiesCall, -}; -use pallet_fungible::erc::{UniqueFungibleCall, ERC20Call}; -use pallet_fungible::Config as FungibleConfig; -use pallet_nonfungible::Config as NonfungibleConfig; -use pallet_refungible::Config as RefungibleConfig; +pub type EvmSponsorshipHandler = ( + UniqueEthSponsorshipHandler, + pallet_evm_contract_helpers::HelpersContractSponsoring, +); pub struct UniqueEthSponsorshipHandler(PhantomData<*const T>); impl - SponsorshipHandler)> for UniqueEthSponsorshipHandler + SponsorshipHandler for UniqueEthSponsorshipHandler { - fn get_sponsor(who: &T::CrossAccountId, call: &(H160, Vec)) -> Option { - let collection_id = map_eth_to_id(&call.0)?; + fn get_sponsor( + who: &T::CrossAccountId, + call_context: &CallContext, + ) -> Option { + let collection_id = map_eth_to_id(&call_context.contract_address)?; let collection = >::new(collection_id)?; let sponsor = collection.sponsorship.sponsor()?.clone(); - let (method_id, mut reader) = AbiReader::new_call(&call.1).ok()?; + let (method_id, mut reader) = AbiReader::new_call(&call_context.input).ok()?; Some(T::CrossAccountId::from_sub(match &collection.mode { CollectionMode::NFT => { let call = >::parse(method_id, &mut reader).ok()??; @@ -71,18 +82,17 @@ impl let token_id: TokenId = token_id.try_into().ok()?; withdraw_transfer::(&collection, &who, &token_id).map(|()| sponsor) } - UniqueNFTCall::ERC721Mintable( - ERC721MintableCall::Mint { token_id, .. } - | ERC721MintableCall::MintWithTokenUri { token_id, .. }, - ) => { - let _token_id: TokenId = token_id.try_into().ok()?; - withdraw_create_item::( - &collection, - &who, - &CreateItemData::NFT(CreateNftData::default()), - ) - .map(|()| sponsor) - } + UniqueNFTCall::ERC721UniqueMintable( + ERC721UniqueMintableCall::Mint { .. } + | ERC721UniqueMintableCall::MintCheckId { .. } + | ERC721UniqueMintableCall::MintWithTokenUri { .. } + | ERC721UniqueMintableCall::MintWithTokenUriCheckId { .. }, + ) => withdraw_create_item::( + &collection, + &who, + &CreateItemData::NFT(CreateNftData::default()), + ) + .map(|()| sponsor), UniqueNFTCall::ERC721(ERC721Call::TransferFrom { token_id, from, .. }) => { let token_id: TokenId = token_id.try_into().ok()?; let from = T::CrossAccountId::from_eth(from); diff --git a/runtime/common/ethereum/transaction_converter.rs b/runtime/common/ethereum/transaction_converter.rs new file mode 100644 index 0000000000..9a7e945f85 --- /dev/null +++ b/runtime/common/ethereum/transaction_converter.rs @@ -0,0 +1,42 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use codec::{Encode, Decode}; +use crate::{opaque, Runtime, UncheckedExtrinsic}; + +pub struct TransactionConverter; + +impl fp_rpc::ConvertTransaction for TransactionConverter { + fn convert_transaction(&self, transaction: pallet_ethereum::Transaction) -> UncheckedExtrinsic { + UncheckedExtrinsic::new_unsigned( + pallet_ethereum::Call::::transact { transaction }.into(), + ) + } +} + +impl fp_rpc::ConvertTransaction for TransactionConverter { + fn convert_transaction( + &self, + transaction: pallet_ethereum::Transaction, + ) -> opaque::UncheckedExtrinsic { + let extrinsic = UncheckedExtrinsic::new_unsigned( + pallet_ethereum::Call::::transact { transaction }.into(), + ); + let encoded = extrinsic.encode(); + opaque::UncheckedExtrinsic::decode(&mut &encoded[..]) + .expect("Encoded extrinsic is always valid") + } +} diff --git a/runtime/common/instance.rs b/runtime/common/instance.rs new file mode 100644 index 0000000000..dd28de5790 --- /dev/null +++ b/runtime/common/instance.rs @@ -0,0 +1,16 @@ +use crate::{ + runtime_common::{ + config::ethereum::CrossAccountId, ethereum::transaction_converter::TransactionConverter, + }, + Runtime, +}; +use up_common::types::opaque::RuntimeInstance; + +impl RuntimeInstance for Runtime { + type CrossAccountId = CrossAccountId; + type TransactionConverter = TransactionConverter; + + fn get_transaction_converter() -> TransactionConverter { + TransactionConverter + } +} diff --git a/runtime/common/maintenance.rs b/runtime/common/maintenance.rs new file mode 100644 index 0000000000..970e483c68 --- /dev/null +++ b/runtime/common/maintenance.rs @@ -0,0 +1,122 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use scale_info::TypeInfo; +use codec::{Encode, Decode}; +use up_common::types::AccountId; +use crate::{RuntimeCall, Maintenance}; + +use sp_runtime::{ + traits::{DispatchInfoOf, SignedExtension}, + transaction_validity::{ + TransactionValidity, ValidTransaction, InvalidTransaction, TransactionValidityError, + }, +}; + +#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] +pub struct CheckMaintenance; + +impl SignedExtension for CheckMaintenance { + type AccountId = AccountId; + type Call = RuntimeCall; + type AdditionalSigned = (); + type Pre = (); + + const IDENTIFIER: &'static str = "CheckMaintenance"; + + fn additional_signed(&self) -> Result { + Ok(()) + } + + fn pre_dispatch( + self, + who: &Self::AccountId, + call: &Self::Call, + info: &DispatchInfoOf, + len: usize, + ) -> Result { + self.validate(who, call, info, len).map(|_| ()) + } + + fn validate( + &self, + _who: &Self::AccountId, + call: &Self::Call, + _info: &DispatchInfoOf, + _len: usize, + ) -> TransactionValidity { + if Maintenance::is_enabled() { + match call { + RuntimeCall::EvmMigration(_) + | RuntimeCall::EVM(_) + | RuntimeCall::Ethereum(_) + | RuntimeCall::Inflation(_) + | RuntimeCall::Structure(_) + | RuntimeCall::Unique(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), + + #[cfg(feature = "scheduler")] + RuntimeCall::Scheduler(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), + + #[cfg(feature = "rmrk")] + RuntimeCall::RmrkCore(_) | RuntimeCall::RmrkEquip(_) => { + Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) + } + + #[cfg(feature = "app-promotion")] + RuntimeCall::AppPromotion(_) => { + Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) + } + + #[cfg(feature = "foreign-assets")] + RuntimeCall::ForeignAssets(_) => { + Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) + } + + #[cfg(feature = "pallet-test-utils")] + RuntimeCall::TestUtils(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), + + _ => Ok(ValidTransaction::default()), + } + } else { + Ok(ValidTransaction::default()) + } + } + + fn pre_dispatch_unsigned( + call: &Self::Call, + info: &DispatchInfoOf, + len: usize, + ) -> Result<(), TransactionValidityError> { + Self::validate_unsigned(call, info, len).map(|_| ()) + } + + fn validate_unsigned( + call: &Self::Call, + _info: &DispatchInfoOf, + _len: usize, + ) -> TransactionValidity { + if Maintenance::is_enabled() { + match call { + RuntimeCall::EVM(_) | RuntimeCall::Ethereum(_) | RuntimeCall::EvmMigration(_) => { + Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) + } + _ => Ok(ValidTransaction::default()), + } + } else { + Ok(ValidTransaction::default()) + } + } +} diff --git a/runtime/common/mod.rs b/runtime/common/mod.rs new file mode 100644 index 0000000000..29e5106259 --- /dev/null +++ b/runtime/common/mod.rs @@ -0,0 +1,176 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +pub mod config; +pub mod construct_runtime; +pub mod dispatch; +pub mod ethereum; +pub mod instance; +pub mod maintenance; +pub mod runtime_apis; + +#[cfg(feature = "scheduler")] +pub mod scheduler; + +pub mod sponsoring; +pub mod weights; + +#[cfg(test)] +pub mod tests; + +use sp_core::H160; +use frame_support::traits::{Currency, OnUnbalanced, Imbalance}; +use sp_runtime::{ + generic, + traits::{BlakeTwo256, BlockNumberProvider}, + impl_opaque_keys, +}; +use sp_std::vec::Vec; + +#[cfg(feature = "std")] +use sp_version::NativeVersion; + +use crate::{ + Runtime, RuntimeCall, Balances, Treasury, Aura, Signature, AllPalletsWithSystem, + InherentDataExt, +}; +use up_common::types::{AccountId, BlockNumber}; + +#[macro_export] +macro_rules! unsupported { + () => { + pallet_common::unsupported!($crate::Runtime) + }; +} + +/// The address format for describing accounts. +pub type Address = sp_runtime::MultiAddress; +/// Block header type as expected by this runtime. +pub type Header = generic::Header; +/// Block type as expected by this runtime. +pub type Block = generic::Block; +/// A Block signed with a Justification +pub type SignedBlock = generic::SignedBlock; +/// BlockId type as expected by this runtime. +pub type BlockId = generic::BlockId; + +impl_opaque_keys! { + pub struct SessionKeys { + pub aura: Aura, + } +} + +/// The version information used to identify this runtime when compiled natively. +#[cfg(feature = "std")] +pub fn native_version() -> NativeVersion { + NativeVersion { + runtime_version: crate::VERSION, + can_author_with: Default::default(), + } +} + +pub type ChargeTransactionPayment = pallet_charge_transaction::ChargeTransactionPayment; + +pub type SignedExtra = ( + frame_system::CheckSpecVersion, + frame_system::CheckTxVersion, + frame_system::CheckGenesis, + frame_system::CheckEra, + frame_system::CheckNonce, + frame_system::CheckWeight, + maintenance::CheckMaintenance, + ChargeTransactionPayment, + //pallet_contract_helpers::ContractHelpersExtension, + pallet_ethereum::FakeTransactionFinalizer, +); + +/// Unchecked extrinsic type as expected by this runtime. +pub type UncheckedExtrinsic = + fp_self_contained::UncheckedExtrinsic; + +/// Extrinsic type that has already been checked. +pub type CheckedExtrinsic = + fp_self_contained::CheckedExtrinsic; + +/// Executive: handles dispatch to the various modules. +pub type Executive = frame_executive::Executive< + Runtime, + Block, + frame_system::ChainContext, + Runtime, + AllPalletsWithSystem, +>; + +type NegativeImbalance = >::NegativeImbalance; + +pub struct DealWithFees; +impl OnUnbalanced for DealWithFees { + fn on_unbalanceds(mut fees_then_tips: impl Iterator) { + if let Some(fees) = fees_then_tips.next() { + // for fees, 100% to treasury + let mut split = fees.ration(100, 0); + if let Some(tips) = fees_then_tips.next() { + // for tips, if any, 100% to treasury + tips.ration_merge_into(100, 0, &mut split); + } + Treasury::on_unbalanced(split.0); + // Author::on_unbalanced(split.1); + } + } +} + +pub struct RelayChainBlockNumberProvider(sp_std::marker::PhantomData); + +impl BlockNumberProvider + for RelayChainBlockNumberProvider +{ + type BlockNumber = BlockNumber; + + fn current_block_number() -> Self::BlockNumber { + cumulus_pallet_parachain_system::Pallet::::validation_data() + .map(|d| d.relay_parent_number) + .unwrap_or_default() + } +} + +pub(crate) struct CheckInherents; + +impl cumulus_pallet_parachain_system::CheckInherents for CheckInherents { + fn check_inherents( + block: &Block, + relay_state_proof: &cumulus_pallet_parachain_system::RelayChainStateProof, + ) -> sp_inherents::CheckInherentsResult { + let relay_chain_slot = relay_state_proof + .read_slot() + .expect("Could not read the relay chain slot from the proof"); + + let inherent_data = + cumulus_primitives_timestamp::InherentDataProvider::from_relay_chain_slot_and_duration( + relay_chain_slot, + sp_std::time::Duration::from_secs(6), + ) + .create_inherent_data() + .expect("Could not create the timestamp inherent data"); + + inherent_data.check_extrinsics(block) + } +} + +#[derive(codec::Encode, codec::Decode)] +pub enum XCMPMessage { + /// Transfer tokens to the given account from the Parachain account. + TransferToken(XAccountId, XBalance), +} diff --git a/runtime/common/src/runtime_apis.rs b/runtime/common/runtime_apis.rs similarity index 65% rename from runtime/common/src/runtime_apis.rs rename to runtime/common/runtime_apis.rs index 1b5ddfea15..8a33141ba2 100644 --- a/runtime/common/src/runtime_apis.rs +++ b/runtime/common/runtime_apis.rs @@ -14,6 +14,16 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +#[macro_export] +macro_rules! dispatch_unique_runtime { + ($collection:ident.$method:ident($($name:ident),*)) => {{ + let collection = ::CollectionDispatch::dispatch(>::try_get($collection)?); + let dispatch = collection.as_dyn(); + + Ok::<_, DispatchError>(dispatch.$method($($name),*)) + }}; +} + #[macro_export] macro_rules! impl_common_runtime_apis { ( @@ -23,6 +33,31 @@ macro_rules! impl_common_runtime_apis { $($custom_apis:tt)+ )? ) => { + use sp_std::prelude::*; + use sp_api::impl_runtime_apis; + use sp_core::{crypto::KeyTypeId, OpaqueMetadata, H256, U256, H160}; + use sp_runtime::{ + Permill, + traits::Block as BlockT, + transaction_validity::{TransactionSource, TransactionValidity}, + ApplyExtrinsicResult, DispatchError, + }; + use fp_rpc::TransactionStatus; + use pallet_transaction_payment::{ + FeeDetails, RuntimeDispatchInfo, + }; + use pallet_evm::{ + Runner, account::CrossAccountId as _, + Account as EVMAccount, FeeCalculator, + }; + use runtime_common::{ + sponsoring::{SponsorshipPredict, UniqueSponsorshipPredict}, + dispatch::CollectionDispatch, + config::ethereum::CrossAccountId, + }; + use up_data_structs::*; + + impl_runtime_apis! { $($($custom_apis)+)? @@ -40,6 +75,11 @@ macro_rules! impl_common_runtime_apis { fn token_owner(collection: CollectionId, token: TokenId) -> Result, DispatchError> { dispatch_unique_runtime!(collection.token_owner(token)) } + + fn token_owners(collection: CollectionId, token: TokenId) -> Result, DispatchError> { + dispatch_unique_runtime!(collection.token_owners(token)) + } + fn topmost_token_owner(collection: CollectionId, token: TokenId) -> Result, DispatchError> { let budget = up_data_structs::budget::Value::new(10); @@ -89,7 +129,8 @@ macro_rules! impl_common_runtime_apis { ) -> Result, DispatchError> { let token_data = TokenData { properties: Self::token_properties(collection, token_id, keys)?, - owner: Self::token_owner(collection, token_id)? + owner: Self::token_owner(collection, token_id)?, + pieces: Self::total_pieces(collection, token_id)?.unwrap_or(0), }; Ok(token_data) @@ -132,16 +173,201 @@ macro_rules! impl_common_runtime_apis { Ok(>::collection_stats()) } fn next_sponsored(collection: CollectionId, account: CrossAccountId, token: TokenId) -> Result, DispatchError> { - Ok(<$crate::sponsoring::UniqueSponsorshipPredict as - $crate::sponsoring::SponsorshipPredict>::predict( + Ok( as SponsorshipPredict>::predict( collection, account, - token)) + token + )) } fn effective_collection_limits(collection: CollectionId) -> Result, DispatchError> { Ok(>::effective_collection_limits(collection)) } + + fn total_pieces(collection: CollectionId, token_id: TokenId) -> Result, DispatchError> { + dispatch_unique_runtime!(collection.total_pieces(token_id)) + } + } + + impl app_promotion_rpc::AppPromotionApi for Runtime { + #[allow(unused_variables)] + fn total_staked(staker: Option) -> Result { + #[cfg(not(feature = "app-promotion"))] + return unsupported!(); + + #[cfg(feature = "app-promotion")] + return Ok(>::cross_id_total_staked(staker).unwrap_or_default()); + } + + #[allow(unused_variables)] + fn total_staked_per_block(staker: CrossAccountId) -> Result, DispatchError> { + #[cfg(not(feature = "app-promotion"))] + return unsupported!(); + + #[cfg(feature = "app-promotion")] + return Ok(>::cross_id_total_staked_per_block(staker)); + } + + #[allow(unused_variables)] + fn pending_unstake(staker: Option) -> Result { + #[cfg(not(feature = "app-promotion"))] + return unsupported!(); + + #[cfg(feature = "app-promotion")] + return Ok(>::cross_id_pending_unstake(staker)); + } + + #[allow(unused_variables)] + fn pending_unstake_per_block(staker: CrossAccountId) -> Result, DispatchError> { + #[cfg(not(feature = "app-promotion"))] + return unsupported!(); + + #[cfg(feature = "app-promotion")] + return Ok(>::cross_id_pending_unstake_per_block(staker)) + } + } + + impl rmrk_rpc::RmrkApi< + Block, + AccountId, + RmrkCollectionInfo, + RmrkInstanceInfo, + RmrkResourceInfo, + RmrkPropertyInfo, + RmrkBaseInfo, + RmrkPartType, + RmrkTheme + > for Runtime { + fn last_collection_idx() -> Result { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::last_collection_idx::(); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn collection_by_id(collection_id: RmrkCollectionId) -> Result>, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::collection_by_id::(collection_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn nft_by_id(collection_id: RmrkCollectionId, nft_by_id: RmrkNftId) -> Result>, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::nft_by_id::(collection_id, nft_by_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn account_tokens(account_id: AccountId, collection_id: RmrkCollectionId) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::account_tokens::(account_id, collection_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn nft_children(collection_id: RmrkCollectionId, nft_id: RmrkNftId) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::nft_children::(collection_id, nft_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn collection_properties( + collection_id: RmrkCollectionId, + filter_keys: Option> + ) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::collection_properties::(collection_id, filter_keys); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn nft_properties( + collection_id: RmrkCollectionId, + nft_id: RmrkNftId, + filter_keys: Option> + ) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::nft_properties::(collection_id, nft_id, filter_keys); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn nft_resources(collection_id: RmrkCollectionId,nft_id: RmrkNftId) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::nft_resources::(collection_id, nft_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn nft_resource_priority( + collection_id: RmrkCollectionId, + nft_id: RmrkNftId, + resource_id: RmrkResourceId + ) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_core::rpc::nft_resource_priority::(collection_id, nft_id, resource_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn base(base_id: RmrkBaseId) -> Result>, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_equip::rpc::base::(base_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn base_parts(base_id: RmrkBaseId) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_equip::rpc::base_parts::(base_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn theme_names(base_id: RmrkBaseId) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_equip::rpc::theme_names::(base_id); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } + + #[allow(unused_variables)] + fn theme( + base_id: RmrkBaseId, + theme_name: RmrkThemeName, + filter_keys: Option> + ) -> Result, DispatchError> { + #[cfg(feature = "rmrk")] + return pallet_proxy_rmrk_equip::rpc::theme::(base_id, theme_name, filter_keys); + + #[cfg(not(feature = "rmrk"))] + return unsupported!(); + } } impl sp_api::Core for Runtime { @@ -332,7 +558,7 @@ macro_rules! impl_common_runtime_apis { fn extrinsic_filter(xts: Vec<::Extrinsic>) -> Vec { xts.into_iter().filter_map(|xt| match xt.0.function { - Call::Ethereum(pallet_ethereum::Call::transact { transaction }) => Some(transaction), + RuntimeCall::Ethereum(pallet_ethereum::Call::transact { transaction }) => Some(transaction), _ => None }).collect() } @@ -450,18 +676,25 @@ macro_rules! impl_common_runtime_apis { list_benchmark!(list, extra, pallet_unique, Unique); list_benchmark!(list, extra, pallet_structure, Structure); list_benchmark!(list, extra, pallet_inflation, Inflation); + list_benchmark!(list, extra, pallet_app_promotion, AppPromotion); list_benchmark!(list, extra, pallet_fungible, Fungible); - list_benchmark!(list, extra, pallet_refungible, Refungible); list_benchmark!(list, extra, pallet_nonfungible, Nonfungible); - list_benchmark!(list, extra, pallet_unique_scheduler, Scheduler); - #[cfg(not(feature = "unique-runtime"))] + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + list_benchmark!(list, extra, pallet_refungible, Refungible); + + // #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + // list_benchmark!(list, extra, pallet_unique_scheduler, Scheduler); + + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] list_benchmark!(list, extra, pallet_proxy_rmrk_core, RmrkCore); - #[cfg(not(feature = "unique-runtime"))] + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] list_benchmark!(list, extra, pallet_proxy_rmrk_equip, RmrkEquip); - list_benchmark!(list, extra, pallet_maintenance, Maintenance); + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + list_benchmark!(list, extra, pallet_foreign_assets, ForeignAssets); + // list_benchmark!(list, extra, pallet_evm_coder_substrate, EvmCoderSubstrate); @@ -503,18 +736,24 @@ macro_rules! impl_common_runtime_apis { add_benchmark!(params, batches, pallet_unique, Unique); add_benchmark!(params, batches, pallet_structure, Structure); add_benchmark!(params, batches, pallet_inflation, Inflation); + add_benchmark!(params, batches, pallet_app_promotion, AppPromotion); add_benchmark!(params, batches, pallet_fungible, Fungible); - add_benchmark!(params, batches, pallet_refungible, Refungible); add_benchmark!(params, batches, pallet_nonfungible, Nonfungible); - add_benchmark!(params, batches, pallet_unique_scheduler, Scheduler); - #[cfg(not(feature = "unique-runtime"))] + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + add_benchmark!(params, batches, pallet_refungible, Refungible); + + // #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + // add_benchmark!(params, batches, pallet_unique_scheduler, Scheduler); + + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] add_benchmark!(params, batches, pallet_proxy_rmrk_core, RmrkCore); - #[cfg(not(feature = "unique-runtime"))] + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] add_benchmark!(params, batches, pallet_proxy_rmrk_equip, RmrkEquip); - add_benchmark!(params, batches, pallet_maintenance, Maintenance); + #[cfg(not(any(feature = "unique-runtime", feature = "quartz-runtime")))] + add_benchmark!(params, batches, pallet_foreign_assets, ForeignAssets); // add_benchmark!(params, batches, pallet_evm_coder_substrate, EvmCoderSubstrate); @@ -525,14 +764,26 @@ macro_rules! impl_common_runtime_apis { #[cfg(feature = "try-runtime")] impl frame_try_runtime::TryRuntime for Runtime { - fn on_runtime_upgrade() -> (Weight, Weight) { + fn on_runtime_upgrade() -> (frame_support::pallet_prelude::Weight, frame_support::pallet_prelude::Weight) { log::info!("try-runtime::on_runtime_upgrade unique-chain."); let weight = Executive::try_runtime_upgrade().unwrap(); - (weight, RuntimeBlockWeights::get().max_block) + (weight, crate::config::substrate::RuntimeBlockWeights::get().max_block) } - fn execute_block_no_check(block: Block) -> Weight { - Executive::execute_block_no_check(block) + fn execute_block( + block: Block, + state_root_check: bool, + select: frame_try_runtime::TryStateSelect + ) -> frame_support::pallet_prelude::Weight { + log::info!( + target: "node-runtime", + "try-runtime: executing block {:?} / root checks: {:?} / try-state-select: {:?}", + block.header.hash(), + state_root_check, + select, + ); + + Executive::try_execute_block(block, state_root_check, select).unwrap() } } } diff --git a/runtime/common/scheduler.rs b/runtime/common/scheduler.rs new file mode 100644 index 0000000000..7c0e3104fe --- /dev/null +++ b/runtime/common/scheduler.rs @@ -0,0 +1,143 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + traits::NamedReservableCurrency, + weights::{GetDispatchInfo, PostDispatchInfo, DispatchInfo}, +}; +use sp_runtime::{ + traits::{Dispatchable, Applyable, Member}, + generic::Era, + transaction_validity::TransactionValidityError, + DispatchErrorWithPostInfo, DispatchError, +}; +use codec::Encode; +use crate::{Runtime, Call, Origin, Balances, ChargeTransactionPayment, maintenance}; +use up_common::types::{AccountId, Balance}; +use fp_self_contained::SelfContainedCall; +use pallet_unique_scheduler::DispatchCall; + +/// The SignedExtension to the basic transaction logic. +pub type SignedExtraScheduler = ( + frame_system::CheckSpecVersion, + frame_system::CheckGenesis, + frame_system::CheckEra, + frame_system::CheckNonce, + frame_system::CheckWeight, + maintenance::CheckMaintenance, + ChargeTransactionPayment, +); + +fn get_signed_extras(from: ::AccountId) -> SignedExtraScheduler { + ( + frame_system::CheckSpecVersion::::new(), + frame_system::CheckGenesis::::new(), + frame_system::CheckEra::::from(Era::Immortal), + frame_system::CheckNonce::::from(frame_system::Pallet::::account_nonce( + from, + )), + frame_system::CheckWeight::::new(), + maintenance::CheckMaintenance, + ) +} + +pub struct SchedulerPaymentExecutor; + +impl + DispatchCall for SchedulerPaymentExecutor +where + ::Call: Member + + Dispatchable + + SelfContainedCall + + GetDispatchInfo + + From>, + SelfContainedSignedInfo: Send + Sync + 'static, + Call: From<::Call> + + From<::Call> + + SelfContainedCall, + sp_runtime::AccountId32: From<::AccountId>, +{ + fn dispatch_call( + signer: ::AccountId, + call: ::Call, + ) -> Result< + Result>, + TransactionValidityError, + > { + let dispatch_info = call.get_dispatch_info(); + let extrinsic = fp_self_contained::CheckedExtrinsic::< + AccountId, + Call, + SignedExtraScheduler, + SelfContainedSignedInfo, + > { + signed: fp_self_contained::CheckedSignature::< + AccountId, + SignedExtraScheduler, + SelfContainedSignedInfo, + >::Signed(signer.clone().into(), get_signed_extras(signer.into())), + function: call.into(), + }; + + extrinsic.apply::(&dispatch_info, 0) + } + + fn reserve_balance( + id: [u8; 16], + sponsor: ::AccountId, + call: ::Call, + count: u32, + ) -> Result<(), DispatchError> { + let dispatch_info = call.get_dispatch_info(); + let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0) + .saturating_mul(count.into()); + + >::reserve_named( + &id, + &(sponsor.into()), + weight, + ) + } + + fn pay_for_call( + id: [u8; 16], + sponsor: ::AccountId, + call: ::Call, + ) -> Result { + let dispatch_info = call.get_dispatch_info(); + let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0); + Ok( + >::unreserve_named( + &id, + &(sponsor.into()), + weight, + ), + ) + } + + fn cancel_reserve( + id: [u8; 16], + sponsor: ::AccountId, + ) -> Result { + Ok( + >::unreserve_named( + &id, + &(sponsor.into()), + u128::MAX, + ), + ) + } +} diff --git a/runtime/common/src/sponsoring.rs b/runtime/common/sponsoring.rs similarity index 98% rename from runtime/common/src/sponsoring.rs rename to runtime/common/sponsoring.rs index 2b7375bd47..62452a9d29 100644 --- a/runtime/common/src/sponsoring.rs +++ b/runtime/common/sponsoring.rs @@ -156,17 +156,13 @@ pub fn withdraw_transfer( pub fn withdraw_create_item( collection: &CollectionHandle, who: &T::CrossAccountId, - _properties: &CreateItemData, + properties: &CreateItemData, ) -> Option<()> { - if _properties.data_size() as u32 > collection.limits.sponsored_data_size() { - return None; - } - // sponsor timeout let block_number = >::block_number() as T::BlockNumber; let limit = collection .limits - .sponsor_transfer_timeout(match _properties { + .sponsor_transfer_timeout(match properties { CreateItemData::NFT(_) => NFT_SPONSOR_TRANSFER_TIMEOUT, CreateItemData::Fungible(_) => FUNGIBLE_SPONSOR_TRANSFER_TIMEOUT, CreateItemData::ReFungible(_) => REFUNGIBLE_SPONSOR_TRANSFER_TIMEOUT, diff --git a/runtime/common/tests/mod.rs b/runtime/common/tests/mod.rs new file mode 100644 index 0000000000..887da689af --- /dev/null +++ b/runtime/common/tests/mod.rs @@ -0,0 +1,46 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use sp_runtime::BuildStorage; +use sp_core::{Public, Pair}; +use sp_std::vec; +use up_common::types::AuraId; +use crate::{GenesisConfig, ParachainInfoConfig, AuraConfig}; + +pub mod xcm; + +fn get_from_seed(seed: &str) -> ::Public { + TPublic::Pair::from_string(&format!("//{}", seed), None) + .expect("static values are valid; qed") + .public() +} + +fn new_test_ext(para_id: u32) -> sp_io::TestExternalities { + let cfg = GenesisConfig { + aura: AuraConfig { + authorities: vec![ + get_from_seed::("Alice"), + get_from_seed::("Bob"), + ], + }, + parachain_info: ParachainInfoConfig { + parachain_id: para_id.into(), + }, + ..GenesisConfig::default() + }; + + cfg.build_storage().unwrap().into() +} diff --git a/runtime/common/tests/xcm.rs b/runtime/common/tests/xcm.rs new file mode 100644 index 0000000000..405a7ce843 --- /dev/null +++ b/runtime/common/tests/xcm.rs @@ -0,0 +1,162 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use xcm_executor::traits::ShouldExecute; +use xcm::latest::prelude::*; +use logtest::Logger; +use crate::RuntimeCall; +use super::new_test_ext; + +fn catch_xcm_barrier_log(logger: &mut Logger, expected_msg: &str) -> Result<(), String> { + for record in logger { + if record.target() == "xcm::barrier" && record.args() == expected_msg { + return Ok(()); + } + } + + Err(format!( + "the expected XCM barrier log `{}` is not found", + expected_msg + )) +} + +/// WARNING: Uses log capturing +/// See https://docs.rs/logtest/latest/logtest/index.html#constraints +pub fn barrier_denies_transact(logger: &mut Logger) { + let location = MultiLocation { + parents: 0, + interior: Junctions::Here, + }; + + // We will never decode this "call", + // so it is irrelevant what we are passing to the `transact` cmd. + let fake_encoded_call = vec![0u8]; + + let transact_inst = Transact { + origin_type: OriginKind::Superuser, + require_weight_at_most: 0, + call: fake_encoded_call.into(), + }; + + let mut xcm_program = Xcm::(vec![transact_inst]); + + let max_weight = 100_000; + let mut weight_credit = 100_000_000; + + let result = B::should_execute(&location, &mut xcm_program, max_weight, &mut weight_credit); + + assert!( + result.is_err(), + "the barrier should disallow the XCM transact cmd" + ); + + catch_xcm_barrier_log(logger, "transact XCM rejected").unwrap(); +} + +fn xcm_execute( + self_para_id: u32, + location: &MultiLocation, + xcm: &mut Xcm, +) -> Result<(), ()> { + new_test_ext(self_para_id).execute_with(|| { + let max_weight = 100_000; + let mut weight_credit = 100_000_000; + + B::should_execute(&location, xcm, max_weight, &mut weight_credit) + }) +} + +fn make_multiassets(location: &MultiLocation) -> MultiAssets { + let id = AssetId::Concrete(location.clone()); + let fun = Fungibility::Fungible(42); + let multiasset = MultiAsset { id, fun }; + + multiasset.into() +} + +fn make_transfer_reserve_asset(location: &MultiLocation) -> Xcm { + let assets = make_multiassets(location); + let inst = TransferReserveAsset { + assets, + dest: location.clone(), + xcm: Xcm(vec![]), + }; + + Xcm::(vec![inst]) +} + +fn make_deposit_reserve_asset(location: &MultiLocation) -> Xcm { + let assets = make_multiassets(location); + let inst = DepositReserveAsset { + assets: assets.into(), + max_assets: 42, + dest: location.clone(), + xcm: Xcm(vec![]), + }; + + Xcm::(vec![inst]) +} + +fn expect_transfer_location_denied( + logger: &mut Logger, + self_para_id: u32, + location: &MultiLocation, + xcm: &mut Xcm, +) -> Result<(), String> { + let result = xcm_execute::(self_para_id, location, xcm); + + if result.is_ok() { + return Err("the barrier should deny the unknown location".into()); + } + + catch_xcm_barrier_log(logger, "Unexpected deposit or transfer location") +} + +/// WARNING: Uses log capturing +/// See https://docs.rs/logtest/latest/logtest/index.html#constraints +pub fn barrier_denies_transfer_from_unknown_location( + logger: &mut Logger, + self_para_id: u32, +) -> Result<(), String> +where + B: ShouldExecute, +{ + const UNKNOWN_PARACHAIN_ID: u32 = 4057; + + let unknown_location = MultiLocation { + parents: 1, + interior: X1(Parachain(UNKNOWN_PARACHAIN_ID)), + }; + + let mut transfer_reserve_asset = make_transfer_reserve_asset(&unknown_location); + let mut deposit_reserve_asset = make_deposit_reserve_asset(&unknown_location); + + expect_transfer_location_denied::( + logger, + self_para_id, + &unknown_location, + &mut transfer_reserve_asset, + )?; + + expect_transfer_location_denied::( + logger, + self_para_id, + &unknown_location, + &mut deposit_reserve_asset, + )?; + + Ok(()) +} diff --git a/runtime/common/src/weights.rs b/runtime/common/weights.rs similarity index 74% rename from runtime/common/src/weights.rs rename to runtime/common/weights.rs index f926c8d5ab..600983d0a4 100644 --- a/runtime/common/src/weights.rs +++ b/runtime/common/weights.rs @@ -20,25 +20,42 @@ use pallet_common::{CommonWeightInfo, dispatch::dispatch_weight, RefungibleExten use pallet_fungible::{Config as FungibleConfig, common::CommonWeights as FungibleWeights}; use pallet_nonfungible::{Config as NonfungibleConfig, common::CommonWeights as NonfungibleWeights}; + +#[cfg(feature = "refungible")] use pallet_refungible::{ Config as RefungibleConfig, weights::WeightInfo, common::CommonWeights as RefungibleWeights, }; use up_data_structs::{CreateItemExData, CreateItemData}; macro_rules! max_weight_of { - ($method:ident ( $($args:tt)* )) => { - >::$method($($args)*) - .max(>::$method($($args)*)) - .max(>::$method($($args)*)) - }; + ($method:ident ( $($args:tt)* )) => {{ + let max_weight = >::$method($($args)*) + .max(>::$method($($args)*)); + + #[cfg(feature = "refungible")] + let max_weight = max_weight.max(>::$method($($args)*)); + + max_weight + }}; } -pub struct CommonWeights(PhantomData) -where - T: FungibleConfig + NonfungibleConfig + RefungibleConfig; +#[cfg(not(feature = "refungible"))] +pub trait CommonWeightConfigs: FungibleConfig + NonfungibleConfig {} + +#[cfg(not(feature = "refungible"))] +impl CommonWeightConfigs for T {} + +#[cfg(feature = "refungible")] +pub trait CommonWeightConfigs: FungibleConfig + NonfungibleConfig + RefungibleConfig {} + +#[cfg(feature = "refungible")] +impl CommonWeightConfigs for T {} + +pub struct CommonWeights(PhantomData); + impl CommonWeightInfo for CommonWeights where - T: FungibleConfig + NonfungibleConfig + RefungibleConfig, + T: CommonWeightConfigs, { fn create_item() -> Weight { dispatch_weight::() + max_weight_of!(create_item()) @@ -99,8 +116,13 @@ where fn burn_recursively_breadth_raw(amount: u32) -> Weight { max_weight_of!(burn_recursively_breadth_raw(amount)) } + + fn token_owner() -> Weight { + max_weight_of!(token_owner()) + } } +#[cfg(feature = "refungible")] impl RefungibleExtensionsWeightInfo for CommonWeights where T: FungibleConfig + NonfungibleConfig + RefungibleConfig, @@ -109,3 +131,13 @@ where dispatch_weight::() + <::WeightInfo>::repartition_item() } } + +#[cfg(not(feature = "refungible"))] +impl RefungibleExtensionsWeightInfo for CommonWeights +where + T: FungibleConfig + NonfungibleConfig, +{ + fn repartition() -> Weight { + dispatch_weight::() + } +} diff --git a/runtime/opal/CHANGELOG.md b/runtime/opal/CHANGELOG.md new file mode 100644 index 0000000000..f28cd71f83 --- /dev/null +++ b/runtime/opal/CHANGELOG.md @@ -0,0 +1,25 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + + +## [v0.9.27] 2022-09-08 + +### Added + +- `AppPromotion` pallet to runtime. + +## [v0.9.27] 2022-08-16 + +### Bugfixes + +- Add missing config keys 74f532ac28dce15c15e7d576c074a58eba658c08 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/runtime/opal/Cargo.toml b/runtime/opal/Cargo.toml index c34621fd63..2daa9a8cbf 100644 --- a/runtime/opal/Cargo.toml +++ b/runtime/opal/Cargo.toml @@ -1,6 +1,8 @@ ################################################################################ # Package +cargo-features = ["workspace-inheritance"] + [package] authors = ['Unique Network '] build = 'build.rs' @@ -10,7 +12,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'opal-runtime' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.9.24' +version = "0.9.30" [package.metadata.docs.rs] targets = ['x86_64-unknown-linux-gnu'] @@ -35,8 +37,10 @@ runtime-benchmarks = [ 'pallet-nonfungible/runtime-benchmarks', 'pallet-proxy-rmrk-core/runtime-benchmarks', 'pallet-proxy-rmrk-equip/runtime-benchmarks', + 'pallet-foreign-assets/runtime-benchmarks', 'pallet-unique/runtime-benchmarks', 'pallet-inflation/runtime-benchmarks', + 'pallet-app-promotion/runtime-benchmarks', 'pallet-unique-scheduler/runtime-benchmarks', 'pallet-xcm/runtime-benchmarks', 'sp-runtime/runtime-benchmarks', @@ -46,7 +50,46 @@ runtime-benchmarks = [ try-runtime = [ 'frame-try-runtime', 'frame-executive/try-runtime', + 'frame-support/try-runtime', 'frame-system/try-runtime', + 'cumulus-pallet-parachain-system/try-runtime', + 'parachain-info/try-runtime', + 'pallet-aura/try-runtime', + 'cumulus-pallet-aura-ext/try-runtime', + 'pallet-balances/try-runtime', + 'pallet-randomness-collective-flip/try-runtime', + 'pallet-timestamp/try-runtime', + 'pallet-transaction-payment/try-runtime', + 'pallet-treasury/try-runtime', + 'pallet-sudo/try-runtime', + 'orml-vesting/try-runtime', + 'orml-xtokens/try-runtime', + 'orml-tokens/try-runtime', + 'cumulus-pallet-xcmp-queue/try-runtime', + 'pallet-xcm/try-runtime', + 'cumulus-pallet-xcm/try-runtime', + 'cumulus-pallet-dmp-queue/try-runtime', + 'pallet-inflation/try-runtime', + 'pallet-unique/try-runtime', + 'pallet-unique-scheduler/try-runtime', + 'pallet-configuration/try-runtime', + 'pallet-charge-transaction/try-runtime', + 'pallet-common/try-runtime', + 'pallet-fungible/try-runtime', + 'pallet-refungible/try-runtime', + 'pallet-nonfungible/try-runtime', + 'pallet-structure/try-runtime', + 'pallet-proxy-rmrk-core/try-runtime', + 'pallet-proxy-rmrk-equip/try-runtime', + 'pallet-app-promotion/try-runtime', + 'pallet-foreign-assets/try-runtime', + 'pallet-evm/try-runtime', + 'pallet-ethereum/try-runtime', + 'pallet-evm-coder-substrate/try-runtime', + 'pallet-evm-contract-helpers/try-runtime', + 'pallet-evm-transaction-payment/try-runtime', + 'pallet-evm-migration/try-runtime', + 'pallet-maintenance/try-runtime', ] std = [ 'codec/std', @@ -73,7 +116,6 @@ std = [ 'pallet-transaction-payment/std', 'pallet-transaction-payment-rpc-runtime-api/std', 'pallet-treasury/std', - # 'pallet-vesting/std', 'pallet-evm/std', 'pallet-evm-migration/std', 'pallet-evm-contract-helpers/std', @@ -83,11 +125,14 @@ std = [ 'pallet-base-fee/std', 'fp-rpc/std', 'up-rpc/std', + 'app-promotion-rpc/std', 'fp-evm-mapping/std', 'fp-self-contained/std', 'parachain-info/std', 'serde', 'pallet-inflation/std', + 'pallet-configuration/std', + 'pallet-app-promotion/std', 'pallet-common/std', 'pallet-structure/std', 'pallet-fungible/std', @@ -114,15 +159,27 @@ std = [ 'xcm/std', 'xcm-builder/std', 'xcm-executor/std', - 'unique-runtime-common/std', + 'up-common/std', 'rmrk-rpc/std', + 'evm-coder/std', + 'up-sponsorship/std', "orml-vesting/std", + "orml-tokens/std", + "orml-xtokens/std", + "orml-traits/std", + "pallet-foreign-assets/std", 'pallet-maintenance/std', ] limit-testing = ['pallet-unique/limit-testing', 'up-data-structs/limit-testing'] -opal-runtime = [] +opal-runtime = ['refungible', 'rmrk', 'app-promotion', 'foreign-assets'] + +refungible = [] +scheduler = [] +rmrk = [] +foreign-assets = [] +app-promotion = [] ################################################################################ # Substrate Dependencies @@ -137,39 +194,39 @@ version = '3.1.2' default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-try-runtime] default-features = false -git = 'https://github.com/paritytech/substrate' +git = "https://github.com/paritytech/substrate" optional = true -branch = 'polkadot-v0.9.24' +branch = "polkadot-v0.9.30" [dependencies.frame-executive] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-support] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-benchmarking] default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.hex-literal] optional = true @@ -184,12 +241,12 @@ version = '1.0.130' [dependencies.pallet-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-balances] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" # Contracts specific packages # [dependencies.pallet-contracts] @@ -213,102 +270,97 @@ branch = "polkadot-v0.9.24" [dependencies.pallet-randomness-collective-flip] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-sudo] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-timestamp] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-treasury] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -# [dependencies.pallet-vesting] -# default-features = false -# git = 'https://github.com/paritytech/substrate' -# branch = 'master' +branch = "polkadot-v0.9.30" [dependencies.sp-arithmetic] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-block-builder] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-consensus-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-inherents] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-io] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-offchain] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-runtime] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-session] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-std] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-transaction-pool] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-version] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.smallvec] version = '1.6.1' @@ -319,46 +371,46 @@ version = '1.6.1' [dependencies.parachain-info] default-features = false git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-pallet-aura-ext] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-parachain-system] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-core] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcm] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-dmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-utility] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-timestamp] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false ################################################################################ @@ -366,51 +418,53 @@ default-features = false [dependencies.polkadot-parachain] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-builder] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-executor] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.pallet-xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" -default-features = false - -[dependencies.orml-vesting] -git = "https://github.com/uniquenetwork/open-runtime-module-library" -branch = "unique-polkadot-v0.9.24" -version = "0.4.1-dev" +branch = "release-v0.9.30" default-features = false ################################################################################ # local dependencies [dependencies] +orml-vesting.workspace = true +orml-xtokens.workspace = true +orml-tokens.workspace = true +orml-traits.workspace = true + log = { version = "0.4.16", default-features = false } -unique-runtime-common = { path = "../common", default-features = false } +up-common = { path = "../../primitives/common", default-features = false } scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } derivative = "2.2.0" pallet-unique = { path = '../../pallets/unique', default-features = false } up-rpc = { path = "../../primitives/rpc", default-features = false } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc", default-features = false } rmrk-rpc = { path = "../../primitives/rmrk-rpc", default-features = false } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-inflation = { path = '../../pallets/inflation', default-features = false } +pallet-app-promotion = { path = '../../pallets/app-promotion', default-features = false } up-data-structs = { path = '../../primitives/data-structs', default-features = false } +pallet-configuration = { default-features = false, path = "../../pallets/configuration" } pallet-common = { default-features = false, path = "../../pallets/common" } pallet-structure = { default-features = false, path = "../../pallets/structure" } pallet-fungible = { default-features = false, path = "../../pallets/fungible" } @@ -419,22 +473,35 @@ pallet-nonfungible = { default-features = false, path = "../../pallets/nonfungib pallet-proxy-rmrk-core = { default-features = false, path = "../../pallets/proxy-rmrk-core", package = "pallet-rmrk-core" } pallet-proxy-rmrk-equip = { default-features = false, path = "../../pallets/proxy-rmrk-equip", package = "pallet-rmrk-equip" } pallet-unique-scheduler = { path = '../../pallets/scheduler', default-features = false } -# pallet-contract-helpers = { path = '../pallets/contract-helpers', default-features = false, version = '0.1.0' } -pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } +pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } pallet-evm-migration = { path = '../../pallets/evm-migration', default-features = false } pallet-evm-contract-helpers = { path = '../../pallets/evm-contract-helpers', default-features = false } pallet-evm-transaction-payment = { path = '../../pallets/evm-transaction-payment', default-features = false } pallet-evm-coder-substrate = { default-features = false, path = "../../pallets/evm-coder-substrate" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +evm-coder = { default-features = false, path = '../../crates/evm-coder' } +up-sponsorship = { default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = 'polkadot-v0.9.30' } +pallet-foreign-assets = { default-features = false, path = "../../pallets/foreign-assets" } pallet-maintenance = { default-features = false, path = "../../pallets/maintenance" } +################################################################################ +# Other Dependencies + +impl-trait-for-tuples = "0.2.2" + +################################################################################ +# Dev Dependencies + +[dev-dependencies.logtest] +version = "2.0.0" + ################################################################################ # Build Dependencies [build-dependencies.substrate-wasm-builder] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" diff --git a/runtime/opal/src/lib.rs b/runtime/opal/src/lib.rs index 09335581be..65edd5b179 100644 --- a/runtime/opal/src/lib.rs +++ b/runtime/opal/src/lib.rs @@ -25,1477 +25,47 @@ #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); -use sp_api::impl_runtime_apis; -use sp_core::{crypto::KeyTypeId, OpaqueMetadata, H256, U256, H160}; -use sp_runtime::DispatchError; -use fp_self_contained::*; -// #[cfg(any(feature = "std", test))] -// pub use sp_runtime::BuildStorage; +use frame_support::parameter_types; -use scale_info::TypeInfo; -use sp_runtime::{ - Permill, Perbill, Percent, create_runtime_str, generic, impl_opaque_keys, - traits::{ - AccountIdLookup, BlakeTwo256, Block as BlockT, AccountIdConversion, Zero, Member, - SignedExtension, - }, - transaction_validity::{ - TransactionSource, TransactionValidity, ValidTransaction, InvalidTransaction, - }, - ApplyExtrinsicResult, RuntimeAppPublic, -}; - -use sp_std::prelude::*; - -#[cfg(feature = "std")] -use sp_version::NativeVersion; use sp_version::RuntimeVersion; -pub use pallet_transaction_payment::{ - Multiplier, TargetedFeeAdjustment, FeeDetails, RuntimeDispatchInfo, -}; -// A few exports that help ease life for downstream crates. -pub use pallet_balances::Call as BalancesCall; -pub use pallet_evm::{ - EnsureAddressTruncated, HashedAddressMapping, Runner, account::CrossAccountId as _, - OnMethodCall, Account as EVMAccount, FeeCalculator, GasWeightMapping, -}; -pub use frame_support::{ - construct_runtime, match_types, - dispatch::DispatchResult, - PalletId, parameter_types, StorageValue, ConsensusEngineId, - traits::{ - tokens::currency::Currency as CurrencyT, OnUnbalanced as OnUnbalancedT, Everything, - Currency, ExistenceRequirement, Get, IsInVec, KeyOwnerProofSystem, LockIdentifier, - OnUnbalanced, Randomness, FindAuthor, ConstU32, Imbalance, PrivilegeCmp, - }, - weights::{ - constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND}, - DispatchClass, DispatchInfo, GetDispatchInfo, IdentityFee, Pays, PostDispatchInfo, Weight, - WeightToFeePolynomial, WeightToFeeCoefficient, WeightToFeeCoefficients, ConstantMultiplier, - WeightToFee, - }, -}; -use pallet_unique_scheduler::DispatchCall; -use up_data_structs::{ - CollectionId, TokenId, TokenData, Property, PropertyKeyPermission, CollectionLimits, - CollectionStats, RpcCollection, - mapping::{EvmTokenAddressMapping, CrossTokenAddressMapping}, - TokenChild, RmrkCollectionInfo, RmrkInstanceInfo, RmrkResourceInfo, RmrkPropertyInfo, - RmrkBaseInfo, RmrkPartType, RmrkTheme, RmrkThemeName, RmrkCollectionId, RmrkNftId, - RmrkNftChild, RmrkPropertyKey, RmrkResourceId, RmrkBaseId, -}; +use sp_runtime::create_runtime_str; -// use pallet_contracts::weights::WeightInfo; -// #[cfg(any(feature = "std", test))] -use frame_system::{ - self as frame_system, EnsureRoot, EnsureSigned, - limits::{BlockWeights, BlockLength}, -}; -use sp_arithmetic::{ - traits::{BaseArithmetic, Unsigned}, -}; -use smallvec::smallvec; -use codec::{Encode, Decode}; -use fp_rpc::TransactionStatus; -use sp_runtime::{ - traits::{ - Applyable, BlockNumberProvider, Dispatchable, PostDispatchInfoOf, DispatchInfoOf, - Saturating, CheckedConversion, - }, - generic::Era, - transaction_validity::TransactionValidityError, - DispatchErrorWithPostInfo, SaturatedConversion, -}; +use up_common::types::*; -// pub use pallet_timestamp::Call as TimestampCall; -pub use sp_consensus_aura::sr25519::AuthorityId as AuraId; +#[path = "../../common/mod.rs"] +mod runtime_common; -// Polkadot imports -use pallet_xcm::XcmPassthrough; -use polkadot_parachain::primitives::Sibling; -use xcm::v1::{BodyId, Junction::*, MultiLocation, NetworkId, Junctions::*}; -use xcm_builder::{ - AccountId32Aliases, AllowTopLevelPaidExecutionFrom, AllowUnpaidExecutionFrom, CurrencyAdapter, - EnsureXcmOrigin, FixedWeightBounds, LocationInverter, NativeAsset, ParentAsSuperuser, - RelayChainAsNative, SiblingParachainAsNative, SiblingParachainConvertsVia, - SignedAccountId32AsNative, SignedToAccountId32, SovereignSignedViaLocation, TakeWeightCredit, - ParentIsPreset, -}; -use xcm_executor::{Config, XcmExecutor, Assets}; -use sp_std::{cmp::Ordering, marker::PhantomData}; +pub mod xcm_barrier; -use xcm::latest::{ - // Xcm, - AssetId::{Concrete}, - Fungibility::Fungible as XcmFungible, - MultiAsset, - Error as XcmError, -}; -use xcm_executor::traits::{MatchesFungible, WeightTrader}; -//use xcm_executor::traits::MatchesFungible; +#[cfg(test)] +mod tests; -use unique_runtime_common::{ - impl_common_runtime_apis, - types::*, - constants::*, - dispatch::{CollectionDispatchT, CollectionDispatch}, - sponsoring::UniqueSponsorshipHandler, - eth_sponsoring::UniqueEthSponsorshipHandler, - weights::CommonWeights, -}; +pub use runtime_common::*; pub const RUNTIME_NAME: &str = "opal"; pub const TOKEN_SYMBOL: &str = "OPL"; -type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - -impl RuntimeInstance for Runtime { - type CrossAccountId = self::CrossAccountId; - type TransactionConverter = self::TransactionConverter; - - fn get_transaction_converter() -> TransactionConverter { - TransactionConverter - } -} - -/// The type for looking up accounts. We don't expect more than 4 billion of them, but you -/// never know... -pub type AccountIndex = u32; - -/// Balance of an account. -pub type Balance = u128; - -/// Index of a transaction in the chain. -pub type Index = u32; - -/// A hash of some data used by the chain. -pub type Hash = sp_core::H256; - -/// Digest item type. -pub type DigestItem = generic::DigestItem; - -/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know -/// the specifics of the runtime. They can then be made to be agnostic over specific formats -/// of data like extrinsics, allowing for them to continue syncing the network through upgrades -/// to even the core data structures. -pub mod opaque { - use sp_std::prelude::*; - use sp_runtime::impl_opaque_keys; - use super::Aura; - - pub use unique_runtime_common::types::*; - - impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } - } -} - /// This runtime version. pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!(RUNTIME_NAME), impl_name: create_runtime_str!(RUNTIME_NAME), authoring_version: 1, - spec_version: 924013, + spec_version: 930032, impl_version: 0, apis: RUNTIME_API_VERSIONS, - transaction_version: 1, + transaction_version: 2, state_version: 0, }; -#[derive(codec::Encode, codec::Decode)] -pub enum XCMPMessage { - /// Transfer tokens to the given account from the Parachain account. - TransferToken(XAccountId, XBalance), -} - -/// The version information used to identify this runtime when compiled natively. -#[cfg(feature = "std")] -pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } -} - -type NegativeImbalance = >::NegativeImbalance; - -pub struct DealWithFees; -impl OnUnbalanced for DealWithFees { - fn on_unbalanceds(mut fees_then_tips: impl Iterator) { - if let Some(fees) = fees_then_tips.next() { - // for fees, 100% to treasury - let mut split = fees.ration(100, 0); - if let Some(tips) = fees_then_tips.next() { - // for tips, if any, 100% to treasury - tips.ration_merge_into(100, 0, &mut split); - } - Treasury::on_unbalanced(split.0); - // Author::on_unbalanced(split.1); - } - } -} - parameter_types! { - pub const BlockHashCount: BlockNumber = 2400; - pub RuntimeBlockLength: BlockLength = - BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO); - pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); - pub const MaximumBlockLength: u32 = 5 * 1024 * 1024; - pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); - // Operational transactions have some extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); pub const Version: RuntimeVersion = VERSION; pub const SS58Prefix: u8 = 42; -} - -parameter_types! { pub const ChainId: u64 = 8882; } -pub struct FixedFee; -impl FeeCalculator for FixedFee { - fn min_gas_price() -> (U256, u64) { - (MIN_GAS_PRICE.into(), 0) - } -} - -// Assuming slowest ethereum opcode is SSTORE, with gas price of 20000 as our worst case -// (contract, which only writes a lot of data), -// approximating on top of our real store write weight -parameter_types! { - pub const WritesPerSecond: u64 = WEIGHT_PER_SECOND / ::DbWeight::get().write; - pub const GasPerSecond: u64 = WritesPerSecond::get() * 20000; - pub const WeightPerGas: u64 = WEIGHT_PER_SECOND / GasPerSecond::get(); -} - -/// Limiting EVM execution to 50% of block for substrate users and management tasks -/// EVM transaction consumes more weight than substrate's, so we can't rely on them being -/// scheduled fairly -const EVM_DISPATCH_RATIO: Perbill = Perbill::from_percent(50); -parameter_types! { - pub BlockGasLimit: U256 = U256::from(NORMAL_DISPATCH_RATIO * EVM_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT / WeightPerGas::get()); -} - -pub enum FixedGasWeightMapping {} -impl GasWeightMapping for FixedGasWeightMapping { - fn gas_to_weight(gas: u64) -> Weight { - gas.saturating_mul(WeightPerGas::get()) - } - fn weight_to_gas(weight: Weight) -> u64 { - weight / WeightPerGas::get() - } -} - -impl pallet_evm::account::Config for Runtime { - type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - type EvmAddressMapping = pallet_evm::HashedAddressMapping; - type EvmBackwardsAddressMapping = fp_evm_mapping::MapBackwardsAddressTruncated; -} - -impl pallet_evm::Config for Runtime { - type BlockGasLimit = BlockGasLimit; - type FeeCalculator = FixedFee; - type GasWeightMapping = FixedGasWeightMapping; - type BlockHashMapping = pallet_ethereum::EthereumBlockHashMapping; - type CallOrigin = EnsureAddressTruncated; - type WithdrawOrigin = EnsureAddressTruncated; - type AddressMapping = HashedAddressMapping; - type PrecompilesType = (); - type PrecompilesValue = (); - type Currency = Balances; - type Event = Event; - type OnMethodCall = ( - pallet_evm_migration::OnMethodCall, - pallet_evm_contract_helpers::HelpersOnMethodCall, - CollectionDispatchT, - pallet_unique::eth::CollectionHelpersOnMethodCall, - ); - type OnCreate = pallet_evm_contract_helpers::HelpersOnCreate; - type ChainId = ChainId; - type Runner = pallet_evm::runner::stack::Runner; - type OnChargeTransaction = pallet_evm::EVMCurrencyAdapter; - type TransactionValidityHack = pallet_evm_transaction_payment::TransactionValidityHack; - type FindAuthor = EthereumFindAuthor; -} - -impl pallet_evm_migration::Config for Runtime { - type WeightInfo = pallet_evm_migration::weights::SubstrateWeight; -} - -pub struct EthereumFindAuthor(core::marker::PhantomData); -impl> FindAuthor for EthereumFindAuthor { - fn find_author<'a, I>(digests: I) -> Option - where - I: 'a + IntoIterator, - { - if let Some(author_index) = F::find_author(digests) { - let authority_id = Aura::authorities()[author_index as usize].clone(); - return Some(H160::from_slice(&authority_id.to_raw_vec()[4..24])); - } - None - } -} - -impl pallet_ethereum::Config for Runtime { - type Event = Event; - type StateRoot = pallet_ethereum::IntermediateStateRoot; -} +construct_runtime!(opal); -impl pallet_randomness_collective_flip::Config for Runtime {} - -impl frame_system::Config for Runtime { - /// The data to be stored in an account. - type AccountData = pallet_balances::AccountData; - /// The identifier used to distinguish between accounts. - type AccountId = AccountId; - /// The basic call filter to use in dispatchable. - type BaseCallFilter = Everything; - /// Maximum number of block number to block hash mappings to keep (oldest pruned first). - type BlockHashCount = BlockHashCount; - /// The maximum length of a block (in bytes). - type BlockLength = RuntimeBlockLength; - /// The index type for blocks. - type BlockNumber = BlockNumber; - /// The weight of the overhead invoked on the block import process, independent of the extrinsics included in that block. - type BlockWeights = RuntimeBlockWeights; - /// The aggregated dispatch type that is available for extrinsics. - type Call = Call; - /// The weight of database operations that the runtime can invoke. - type DbWeight = RocksDbWeight; - /// The ubiquitous event type. - type Event = Event; - /// The type for hashing blocks and tries. - type Hash = Hash; - /// The hashing algorithm used. - type Hashing = BlakeTwo256; - /// The header type. - type Header = generic::Header; - /// The index type for storing how many extrinsics an account has signed. - type Index = Index; - /// The lookup mechanism to get account ID from whatever is passed in dispatchers. - type Lookup = AccountIdLookup; - /// What to do if an account is fully reaped from the system. - type OnKilledAccount = (); - /// What to do if a new account is created. - type OnNewAccount = (); - type OnSetCode = cumulus_pallet_parachain_system::ParachainSetCode; - /// The ubiquitous origin type. - type Origin = Origin; - /// This type is being generated by `construct_runtime!`. - type PalletInfo = PalletInfo; - /// This is used as an identifier of the chain. 42 is the generic substrate prefix. - type SS58Prefix = SS58Prefix; - /// Weight information for the extrinsics of this pallet. - type SystemWeightInfo = frame_system::weights::SubstrateWeight; - /// Version of the runtime. - type Version = Version; - type MaxConsumers = ConstU32<16>; -} - -parameter_types! { - pub const MinimumPeriod: u64 = SLOT_DURATION / 2; -} - -impl pallet_timestamp::Config for Runtime { - /// A timestamp: milliseconds since the unix epoch. - type Moment = u64; - type OnTimestampSet = (); - type MinimumPeriod = MinimumPeriod; - type WeightInfo = (); -} - -parameter_types! { - // pub const ExistentialDeposit: u128 = 500; - pub const ExistentialDeposit: u128 = 0; - pub const MaxLocks: u32 = 50; - pub const MaxReserves: u32 = 50; -} - -impl pallet_balances::Config for Runtime { - type MaxLocks = MaxLocks; - type MaxReserves = MaxReserves; - type ReserveIdentifier = [u8; 16]; - /// The type for recording an account's balance. - type Balance = Balance; - /// The ubiquitous event type. - type Event = Event; - type DustRemoval = Treasury; - type ExistentialDeposit = ExistentialDeposit; - type AccountStore = System; - type WeightInfo = pallet_balances::weights::SubstrateWeight; -} - -pub const fn deposit(items: u32, bytes: u32) -> Balance { - items as Balance * 15 * CENTIUNIQUE + (bytes as Balance) * 6 * CENTIUNIQUE -} - -/* -parameter_types! { - pub TombstoneDeposit: Balance = deposit( - 1, - sp_std::mem::size_of::> as u32, - ); - pub DepositPerContract: Balance = TombstoneDeposit::get(); - pub const DepositPerStorageByte: Balance = deposit(0, 1); - pub const DepositPerStorageItem: Balance = deposit(1, 0); - pub RentFraction: Perbill = Perbill::from_rational(1u32, 30 * DAYS); - pub const SurchargeReward: Balance = 150 * MILLIUNIQUE; - pub const SignedClaimHandicap: u32 = 2; - pub const MaxDepth: u32 = 32; - pub const MaxValueSize: u32 = 16 * 1024; - pub const MaxCodeSize: u32 = 1024 * 1024 * 25; // 25 Mb - // The lazy deletion runs inside on_initialize. - pub DeletionWeightLimit: Weight = AVERAGE_ON_INITIALIZE_RATIO * - RuntimeBlockWeights::get().max_block; - // The weight needed for decoding the queue should be less or equal than a fifth - // of the overall weight dedicated to the lazy deletion. - pub DeletionQueueDepth: u32 = ((DeletionWeightLimit::get() / ( - ::WeightInfo::on_initialize_per_queue_item(1) - - ::WeightInfo::on_initialize_per_queue_item(0) - )) / 5) as u32; - pub Schedule: pallet_contracts::Schedule = Default::default(); -} - -impl pallet_contracts::Config for Runtime { - type Time = Timestamp; - type Randomness = RandomnessCollectiveFlip; - type Currency = Balances; - type Event = Event; - type RentPayment = (); - type SignedClaimHandicap = SignedClaimHandicap; - type TombstoneDeposit = TombstoneDeposit; - type DepositPerContract = DepositPerContract; - type DepositPerStorageByte = DepositPerStorageByte; - type DepositPerStorageItem = DepositPerStorageItem; - type RentFraction = RentFraction; - type SurchargeReward = SurchargeReward; - type WeightPrice = pallet_transaction_payment::Pallet; - type WeightInfo = pallet_contracts::weights::SubstrateWeight; - type ChainExtension = NFTExtension; - type DeletionQueueDepth = DeletionQueueDepth; - type DeletionWeightLimit = DeletionWeightLimit; - type Schedule = Schedule; - type CallStack = [pallet_contracts::Frame; 31]; -} -*/ - -parameter_types! { - /// This value increases the priority of `Operational` transactions by adding - /// a "virtual tip" that's equal to the `OperationalFeeMultiplier * final_fee`. - pub const OperationalFeeMultiplier: u8 = 5; -} - -/// Linear implementor of `WeightToFeePolynomial` -pub struct LinearFee(sp_std::marker::PhantomData); - -impl WeightToFeePolynomial for LinearFee -where - T: BaseArithmetic + From + Copy + Unsigned, -{ - type Balance = T; - - fn polynomial() -> WeightToFeeCoefficients { - smallvec!(WeightToFeeCoefficient { - coeff_integer: WEIGHT_TO_FEE_COEFF.into(), - coeff_frac: Perbill::zero(), - negative: false, - degree: 1, - }) - } -} - -impl pallet_transaction_payment::Config for Runtime { - type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; - type LengthToFee = ConstantMultiplier; - type OperationalFeeMultiplier = OperationalFeeMultiplier; - type WeightToFee = LinearFee; - type FeeMultiplierUpdate = (); -} - -parameter_types! { - pub const ProposalBond: Permill = Permill::from_percent(5); - pub const ProposalBondMinimum: Balance = 1 * UNIQUE; - pub const ProposalBondMaximum: Balance = 1000 * UNIQUE; - pub const SpendPeriod: BlockNumber = 5 * MINUTES; - pub const Burn: Permill = Permill::from_percent(0); - pub const TipCountdown: BlockNumber = 1 * DAYS; - pub const TipFindersFee: Percent = Percent::from_percent(20); - pub const TipReportDepositBase: Balance = 1 * UNIQUE; - pub const DataDepositPerByte: Balance = 1 * CENTIUNIQUE; - pub const BountyDepositBase: Balance = 1 * UNIQUE; - pub const BountyDepositPayoutDelay: BlockNumber = 1 * DAYS; - pub const TreasuryModuleId: PalletId = PalletId(*b"py/trsry"); - pub const BountyUpdatePeriod: BlockNumber = 14 * DAYS; - pub const MaximumReasonLength: u32 = 16384; - pub const BountyCuratorDeposit: Permill = Permill::from_percent(50); - pub const BountyValueMinimum: Balance = 5 * UNIQUE; - pub const MaxApprovals: u32 = 100; -} - -impl pallet_treasury::Config for Runtime { - type PalletId = TreasuryModuleId; - type Currency = Balances; - type ApproveOrigin = EnsureRoot; - type RejectOrigin = EnsureRoot; - type Event = Event; - type OnSlash = (); - type ProposalBond = ProposalBond; - type ProposalBondMinimum = ProposalBondMinimum; - type ProposalBondMaximum = ProposalBondMaximum; - type SpendPeriod = SpendPeriod; - type Burn = Burn; - type BurnDestination = (); - type SpendFunds = (); - type WeightInfo = pallet_treasury::weights::SubstrateWeight; - type MaxApprovals = MaxApprovals; -} - -impl pallet_sudo::Config for Runtime { - type Event = Event; - type Call = Call; -} - -pub struct RelayChainBlockNumberProvider(sp_std::marker::PhantomData); - -impl BlockNumberProvider - for RelayChainBlockNumberProvider -{ - type BlockNumber = BlockNumber; - - fn current_block_number() -> Self::BlockNumber { - cumulus_pallet_parachain_system::Pallet::::validation_data() - .map(|d| d.relay_parent_number) - .unwrap_or_default() - } -} - -parameter_types! { - pub const MinVestedTransfer: Balance = 10 * UNIQUE; - pub const MaxVestingSchedules: u32 = 28; -} - -impl orml_vesting::Config for Runtime { - type Event = Event; - type Currency = pallet_balances::Pallet; - type MinVestedTransfer = MinVestedTransfer; - type VestedTransferOrigin = EnsureSigned; - type WeightInfo = (); - type MaxVestingSchedules = MaxVestingSchedules; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub const ReservedDmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; - pub const ReservedXcmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; -} - -impl cumulus_pallet_parachain_system::Config for Runtime { - type Event = Event; - type SelfParaId = parachain_info::Pallet; - type OnSystemEvent = (); - // type DownwardMessageHandlers = cumulus_primitives_utility::UnqueuedDmpAsParent< - // MaxDownwardMessageWeight, - // XcmExecutor, - // Call, - // >; - type OutboundXcmpMessageSource = XcmpQueue; - type DmpMessageHandler = DmpQueue; - type ReservedDmpWeight = ReservedDmpWeight; - type ReservedXcmpWeight = ReservedXcmpWeight; - type XcmpMessageHandler = XcmpQueue; -} - -impl parachain_info::Config for Runtime {} - -impl cumulus_pallet_aura_ext::Config for Runtime {} - -parameter_types! { - pub const RelayLocation: MultiLocation = MultiLocation::parent(); - pub const RelayNetwork: NetworkId = NetworkId::Polkadot; - pub RelayOrigin: Origin = cumulus_pallet_xcm::Origin::Relay.into(); - pub Ancestry: MultiLocation = Parachain(ParachainInfo::parachain_id().into()).into(); -} - -/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used -/// when determining ownership of accounts for asset transacting and when attempting to use XCM -/// `Transact` in order to determine the dispatch Origin. -pub type LocationToAccountId = ( - // The parent (Relay-chain) origin converts to the default `AccountId`. - ParentIsPreset, - // Sibling parachain origins convert to AccountId via the `ParaId::into`. - SiblingParachainConvertsVia, - // Straight up local `AccountId32` origins just alias directly to `AccountId`. - AccountId32Aliases, -); - -pub struct OnlySelfCurrency; -impl> MatchesFungible for OnlySelfCurrency { - fn matches_fungible(a: &MultiAsset) -> Option { - match (&a.id, &a.fun) { - (Concrete(_), XcmFungible(ref amount)) => CheckedConversion::checked_from(*amount), - _ => None, - } - } -} - -/// Means for transacting assets on this chain. -pub type LocalAssetTransactor = CurrencyAdapter< - // Use this currency: - Balances, - // Use this currency when it is a fungible asset matching the given location or name: - OnlySelfCurrency, - // Do a simple punn to convert an AccountId32 MultiLocation into a native chain account ID: - LocationToAccountId, - // Our chain's account ID type (we can't get away without mentioning it explicitly): - AccountId, - // We don't track any teleports. - (), ->; - -/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance, -/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can -/// biases the kind of local `Origin` it will become. -pub type XcmOriginToTransactDispatchOrigin = ( - // Sovereign account converter; this attempts to derive an `AccountId` from the origin location - // using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for - // foreign chains who want to have a local sovereign account on this chain which they control. - SovereignSignedViaLocation, - // Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when - // recognised. - RelayChainAsNative, - // Native converter for sibling Parachains; will convert to a `SiblingPara` origin when - // recognised. - SiblingParachainAsNative, - // Superuser converter for the Relay-chain (Parent) location. This will allow it to issue a - // transaction from the Root origin. - ParentAsSuperuser, - // Native signed account converter; this just converts an `AccountId32` origin into a normal - // `Origin::Signed` origin of the same 32-byte value. - SignedAccountId32AsNative, - // Xcm origins can be represented natively under the Xcm pallet's Xcm origin. - XcmPassthrough, -); - -parameter_types! { - // One XCM operation is 1_000_000 weight - almost certainly a conservative estimate. - pub UnitWeightCost: Weight = 1_000_000; - // 1200 UNIQUEs buy 1 second of weight. - pub const WeightPrice: (MultiLocation, u128) = (MultiLocation::parent(), 1_200 * UNIQUE); - pub const MaxInstructions: u32 = 100; - pub const MaxAuthorities: u32 = 100_000; -} - -match_types! { - pub type ParentOrParentsUnitPlurality: impl Contains = { - MultiLocation { parents: 1, interior: Here } | - MultiLocation { parents: 1, interior: X1(Plurality { id: BodyId::Unit, .. }) } - }; -} - -pub type Barrier = ( - TakeWeightCredit, - AllowTopLevelPaidExecutionFrom, - AllowUnpaidExecutionFrom, - // ^^^ Parent & its unit plurality gets free execution -); - -pub struct UsingOnlySelfCurrencyComponents< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, ->( - Weight, - Currency::Balance, - PhantomData<(WeightToFee, AssetId, AccountId, Currency, OnUnbalanced)>, -); -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > WeightTrader - for UsingOnlySelfCurrencyComponents -{ - fn new() -> Self { - Self(0, Zero::zero(), PhantomData) - } - - fn buy_weight(&mut self, weight: Weight, payment: Assets) -> Result { - let amount = WeightToFee::weight_to_fee(&weight); - let u128_amount: u128 = amount.try_into().map_err(|_| XcmError::Overflow)?; - - // location to this parachain through relay chain - let option1: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 1, - interior: X1(Parachain(ParachainInfo::parachain_id().into())), - }); - // direct location - let option2: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 0, - interior: Here, - }); - - let required = if payment.fungible.contains_key(&option1) { - (option1, u128_amount).into() - } else if payment.fungible.contains_key(&option2) { - (option2, u128_amount).into() - } else { - (Concrete(MultiLocation::default()), u128_amount).into() - }; - - let unused = payment - .checked_sub(required) - .map_err(|_| XcmError::TooExpensive)?; - self.0 = self.0.saturating_add(weight); - self.1 = self.1.saturating_add(amount); - Ok(unused) - } - - fn refund_weight(&mut self, weight: Weight) -> Option { - let weight = weight.min(self.0); - let amount = WeightToFee::weight_to_fee(&weight); - self.0 -= weight; - self.1 = self.1.saturating_sub(amount); - let amount: u128 = amount.saturated_into(); - if amount > 0 { - Some((AssetId::get(), amount).into()) - } else { - None - } - } -} -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > Drop - for UsingOnlySelfCurrencyComponents -{ - fn drop(&mut self) { - OnUnbalanced::on_unbalanced(Currency::issue(self.1)); - } -} - -pub struct XcmConfig; -impl Config for XcmConfig { - type Call = Call; - type XcmSender = XcmRouter; - // How to withdraw and deposit an asset. - type AssetTransactor = LocalAssetTransactor; - type OriginConverter = XcmOriginToTransactDispatchOrigin; - type IsReserve = NativeAsset; - type IsTeleporter = (); // Teleportation is disabled - type LocationInverter = LocationInverter; - type Barrier = Barrier; - type Weigher = FixedWeightBounds; - type Trader = - UsingOnlySelfCurrencyComponents, RelayLocation, AccountId, Balances, ()>; - type ResponseHandler = (); // Don't handle responses for now. - type SubscriptionService = PolkadotXcm; - - type AssetTrap = PolkadotXcm; - type AssetClaims = PolkadotXcm; -} - -// parameter_types! { -// pub const MaxDownwardMessageWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 10; -// } - -/// No local origins on this chain are allowed to dispatch XCM sends/executions. -pub type LocalOriginToLocation = (SignedToAccountId32,); - -/// The means for routing XCM messages which are not for local execution into the right message -/// queues. -pub type XcmRouter = ( - // Two routers - use UMP to communicate with the relay chain: - cumulus_primitives_utility::ParentAsUmp, - // ..and XCMP to communicate with the sibling chains. - XcmpQueue, -); - -impl pallet_evm_coder_substrate::Config for Runtime {} - -impl pallet_xcm::Config for Runtime { - type Event = Event; - type SendXcmOrigin = EnsureXcmOrigin; - type XcmRouter = XcmRouter; - type ExecuteXcmOrigin = EnsureXcmOrigin; - type XcmExecuteFilter = Everything; - type XcmExecutor = XcmExecutor; - type XcmTeleportFilter = Everything; - type XcmReserveTransferFilter = Everything; - type Weigher = FixedWeightBounds; - type LocationInverter = LocationInverter; - type Origin = Origin; - type Call = Call; - const VERSION_DISCOVERY_QUEUE_SIZE: u32 = 100; - type AdvertisedXcmVersion = pallet_xcm::CurrentXcmVersion; -} - -impl cumulus_pallet_xcm::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; -} - -impl cumulus_pallet_xcmp_queue::Config for Runtime { - type WeightInfo = (); - type Event = Event; - type XcmExecutor = XcmExecutor; - type ChannelInfo = ParachainSystem; - type VersionWrapper = (); - type ExecuteOverweightOrigin = frame_system::EnsureRoot; - type ControllerOrigin = EnsureRoot; - type ControllerOriginConverter = XcmOriginToTransactDispatchOrigin; -} - -impl cumulus_pallet_dmp_queue::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; - type ExecuteOverweightOrigin = frame_system::EnsureRoot; -} - -impl pallet_aura::Config for Runtime { - type AuthorityId = AuraId; - type DisabledValidators = (); - type MaxAuthorities = MaxAuthorities; -} - -parameter_types! { - pub TreasuryAccountId: AccountId = TreasuryModuleId::get().into_account_truncating(); - pub const CollectionCreationPrice: Balance = 2 * UNIQUE; -} - -impl pallet_common::Config for Runtime { - type WeightInfo = pallet_common::weights::SubstrateWeight; - type Event = Event; - type Currency = Balances; - type CollectionCreationPrice = CollectionCreationPrice; - type TreasuryAccountId = TreasuryAccountId; - type CollectionDispatch = CollectionDispatchT; - - type EvmTokenAddressMapping = EvmTokenAddressMapping; - type CrossTokenAddressMapping = CrossTokenAddressMapping; - type ContractAddress = EvmCollectionHelpersAddress; -} - -impl pallet_structure::Config for Runtime { - type Event = Event; - type Call = Call; - type WeightInfo = pallet_structure::weights::SubstrateWeight; -} - -impl pallet_fungible::Config for Runtime { - type WeightInfo = pallet_fungible::weights::SubstrateWeight; -} -impl pallet_refungible::Config for Runtime { - type WeightInfo = pallet_refungible::weights::SubstrateWeight; -} -impl pallet_nonfungible::Config for Runtime { - type WeightInfo = pallet_nonfungible::weights::SubstrateWeight; -} - -impl pallet_proxy_rmrk_core::Config for Runtime { - type WeightInfo = pallet_proxy_rmrk_core::weights::SubstrateWeight; - type Event = Event; -} - -impl pallet_proxy_rmrk_equip::Config for Runtime { - type WeightInfo = pallet_proxy_rmrk_equip::weights::SubstrateWeight; - type Event = Event; -} - -impl pallet_unique::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_unique::weights::SubstrateWeight; - type CommonWeightInfo = CommonWeights; - type RefungibleExtensionsWeightInfo = CommonWeights; -} - -parameter_types! { - pub const InflationBlockInterval: BlockNumber = 100; // every time per how many blocks inflation is applied -} - -/// Used for the pallet inflation -impl pallet_inflation::Config for Runtime { - type Currency = Balances; - type TreasuryAccountId = TreasuryAccountId; - type InflationBlockInterval = InflationBlockInterval; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub MaximumSchedulerWeight: Weight = Perbill::from_percent(50) * - RuntimeBlockWeights::get().max_block; - pub const MaxScheduledPerBlock: u32 = 50; -} - -type ChargeTransactionPayment = pallet_charge_transaction::ChargeTransactionPayment; -use frame_support::traits::NamedReservableCurrency; - -fn get_signed_extras(from: ::AccountId) -> SignedExtraScheduler { - ( - frame_system::CheckSpecVersion::::new(), - frame_system::CheckGenesis::::new(), - frame_system::CheckEra::::from(Era::Immortal), - frame_system::CheckNonce::::from(frame_system::Pallet::::account_nonce( - from, - )), - frame_system::CheckWeight::::new(), - CheckMaintenance, - // sponsoring transaction logic - // pallet_charge_transaction::ChargeTransactionPayment::::new(0), - ) -} - -pub struct SchedulerPaymentExecutor; -impl - DispatchCall for SchedulerPaymentExecutor -where - ::Call: Member - + Dispatchable - + SelfContainedCall - + GetDispatchInfo - + From>, - SelfContainedSignedInfo: Send + Sync + 'static, - Call: From<::Call> - + From<::Call> - + SelfContainedCall, - sp_runtime::AccountId32: From<::AccountId>, -{ - fn dispatch_call( - signer: ::AccountId, - call: ::Call, - ) -> Result< - Result>, - TransactionValidityError, - > { - let dispatch_info = call.get_dispatch_info(); - let extrinsic = fp_self_contained::CheckedExtrinsic::< - AccountId, - Call, - SignedExtraScheduler, - SelfContainedSignedInfo, - > { - signed: - CheckedSignature::::Signed( - signer.clone().into(), - get_signed_extras(signer.into()), - ), - function: call.into(), - }; - - extrinsic.apply::(&dispatch_info, 0) - } - - fn reserve_balance( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - count: u32, - ) -> Result<(), DispatchError> { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0) - .saturating_mul(count.into()); - - >::reserve_named( - &id, - &(sponsor.into()), - weight, - ) - } - - fn pay_for_call( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - ) -> Result { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0); - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - weight, - ), - ) - } - - fn cancel_reserve( - id: [u8; 16], - sponsor: ::AccountId, - ) -> Result { - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - u128::MAX, - ), - ) - } -} - -parameter_types! { - pub const NoPreimagePostponement: Option = Some(10); - pub const Preimage: Option = Some(10); -} - -/// Used the compare the privilege of an origin inside the scheduler. -pub struct OriginPrivilegeCmp; - -impl PrivilegeCmp for OriginPrivilegeCmp { - fn cmp_privilege(_left: &OriginCaller, _right: &OriginCaller) -> Option { - Some(Ordering::Equal) - } -} - -impl pallet_unique_scheduler::Config for Runtime { - type Event = Event; - type Origin = Origin; - type Currency = Balances; - type PalletsOrigin = OriginCaller; - type Call = Call; - type MaximumWeight = MaximumSchedulerWeight; - type ScheduleOrigin = EnsureSigned; - type MaxScheduledPerBlock = MaxScheduledPerBlock; - type WeightInfo = (); - type CallExecutor = SchedulerPaymentExecutor; - type OriginPrivilegeCmp = OriginPrivilegeCmp; - type PreimageProvider = (); - type NoPreimagePostponement = NoPreimagePostponement; -} - -type EvmSponsorshipHandler = ( - UniqueEthSponsorshipHandler, - pallet_evm_contract_helpers::HelpersContractSponsoring, -); - -type SponsorshipHandler = ( - UniqueSponsorshipHandler, - //pallet_contract_helpers::ContractSponsorshipHandler, - pallet_evm_transaction_payment::BridgeSponsorshipHandler, -); - -impl pallet_evm_transaction_payment::Config for Runtime { - type EvmSponsorshipHandler = EvmSponsorshipHandler; - type Currency = Balances; -} - -impl pallet_charge_transaction::Config for Runtime { - type SponsorshipHandler = SponsorshipHandler; -} - -// impl pallet_contract_helpers::Config for Runtime { -// type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -// } - -parameter_types! { - // 0x842899ECF380553E8a4de75bF534cdf6fBF64049 - pub const HelpersContractAddress: H160 = H160([ - 0x84, 0x28, 0x99, 0xec, 0xf3, 0x80, 0x55, 0x3e, 0x8a, 0x4d, 0xe7, 0x5b, 0xf5, 0x34, 0xcd, 0xf6, 0xfb, 0xf6, 0x40, 0x49, - ]); - - // 0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f - pub const EvmCollectionHelpersAddress: H160 = H160([ - 0x6c, 0x4e, 0x9f, 0xe1, 0xae, 0x37, 0xa4, 0x1e, 0x93, 0xce, 0xe4, 0x29, 0xe8, 0xe1, 0x88, 0x1a, 0xbd, 0xcb, 0xb5, 0x4f, - ]); -} - -impl pallet_evm_contract_helpers::Config for Runtime { - type ContractAddress = HelpersContractAddress; - type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -} - -impl pallet_maintenance::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_maintenance::weights::SubstrateWeight; -} - -#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] -pub struct CheckMaintenance; - -impl SignedExtension for CheckMaintenance { - type AccountId = AccountId; - type Call = Call; - type AdditionalSigned = (); - type Pre = (); - - const IDENTIFIER: &'static str = "CheckMaintenance"; - - fn additional_signed(&self) -> Result { - Ok(()) - } - - fn pre_dispatch( - self, - who: &Self::AccountId, - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result { - self.validate(who, call, info, len).map(|_| ()) - } - - fn validate( - &self, - _who: &Self::AccountId, - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EvmMigration(_) - | Call::EVM(_) - | Call::Ethereum(_) - | Call::Inflation(_) - | Call::Maintenance(_) - | Call::Scheduler(_) - | Call::Structure(_) - | Call::Unique(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - - #[cfg(any(feature = "opal-runtime", feature = "quartz-runtime"))] - Call::RmrkCore(_) | Call::RmrkEquip(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } - - fn pre_dispatch_unsigned( - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result<(), TransactionValidityError> { - Self::validate_unsigned(call, info, len).map(|_| ()) - } - - fn validate_unsigned( - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EVM(_) | Call::Ethereum(_) | Call::EvmMigration(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } -} - -construct_runtime!( - pub enum Runtime where - Block = Block, - NodeBlock = opaque::Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - ParachainSystem: cumulus_pallet_parachain_system::{Pallet, Call, Config, Storage, Inherent, Event, ValidateUnsigned} = 20, - ParachainInfo: parachain_info::{Pallet, Storage, Config} = 21, - - Aura: pallet_aura::{Pallet, Config} = 22, - AuraExt: cumulus_pallet_aura_ext::{Pallet, Config} = 23, - - Balances: pallet_balances::{Pallet, Call, Storage, Config, Event} = 30, - RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Storage} = 31, - Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent} = 32, - TransactionPayment: pallet_transaction_payment::{Pallet, Storage} = 33, - Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event} = 34, - Sudo: pallet_sudo::{Pallet, Call, Storage, Config, Event} = 35, - System: frame_system::{Pallet, Call, Storage, Config, Event} = 36, - Vesting: orml_vesting::{Pallet, Storage, Call, Event, Config} = 37, - // Vesting: pallet_vesting::{Pallet, Call, Config, Storage, Event} = 37, - // Contracts: pallet_contracts::{Pallet, Call, Storage, Event} = 38, - - // XCM helpers. - XcmpQueue: cumulus_pallet_xcmp_queue::{Pallet, Call, Storage, Event} = 50, - PolkadotXcm: pallet_xcm::{Pallet, Call, Event, Origin} = 51, - CumulusXcm: cumulus_pallet_xcm::{Pallet, Call, Event, Origin} = 52, - DmpQueue: cumulus_pallet_dmp_queue::{Pallet, Call, Storage, Event} = 53, - - // Unique Pallets - Inflation: pallet_inflation::{Pallet, Call, Storage} = 60, - Unique: pallet_unique::{Pallet, Call, Storage, Event} = 61, - Scheduler: pallet_unique_scheduler::{Pallet, Call, Storage, Event} = 62, - // free = 63 - Charging: pallet_charge_transaction::{Pallet, Call, Storage } = 64, - // ContractHelpers: pallet_contract_helpers::{Pallet, Call, Storage} = 65, - Common: pallet_common::{Pallet, Storage, Event} = 66, - Fungible: pallet_fungible::{Pallet, Storage} = 67, - Refungible: pallet_refungible::{Pallet, Storage} = 68, - Nonfungible: pallet_nonfungible::{Pallet, Storage} = 69, - Structure: pallet_structure::{Pallet, Call, Storage, Event} = 70, - RmrkCore: pallet_proxy_rmrk_core::{Pallet, Call, Storage, Event} = 71, - RmrkEquip: pallet_proxy_rmrk_equip::{Pallet, Call, Storage, Event} = 72, - - // Frontier - EVM: pallet_evm::{Pallet, Config, Call, Storage, Event} = 100, - Ethereum: pallet_ethereum::{Pallet, Config, Call, Storage, Event, Origin} = 101, - - EvmCoderSubstrate: pallet_evm_coder_substrate::{Pallet, Storage} = 150, - EvmContractHelpers: pallet_evm_contract_helpers::{Pallet, Storage} = 151, - EvmTransactionPayment: pallet_evm_transaction_payment::{Pallet} = 152, - EvmMigration: pallet_evm_migration::{Pallet, Call, Storage} = 153, - - Maintenance: pallet_maintenance::{Pallet, Call, Storage, Event} = 154, - } -); - -pub struct TransactionConverter; - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction(&self, transaction: pallet_ethereum::Transaction) -> UncheckedExtrinsic { - UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ) - } -} - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction( - &self, - transaction: pallet_ethereum::Transaction, - ) -> opaque::UncheckedExtrinsic { - let extrinsic = UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ); - let encoded = extrinsic.encode(); - opaque::UncheckedExtrinsic::decode(&mut &encoded[..]) - .expect("Encoded extrinsic is always valid") - } -} - -/// The address format for describing accounts. -pub type Address = sp_runtime::MultiAddress; -/// Block header type as expected by this runtime. -pub type Header = generic::Header; -/// Block type as expected by this runtime. -pub type Block = generic::Block; -/// A Block signed with a Justification -pub type SignedBlock = generic::SignedBlock; -/// BlockId type as expected by this runtime. -pub type BlockId = generic::BlockId; -/// The SignedExtension to the basic transaction logic. -pub type SignedExtra = ( - frame_system::CheckSpecVersion, - // system::CheckTxVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, - ChargeTransactionPayment, - //pallet_contract_helpers::ContractHelpersExtension, - pallet_ethereum::FakeTransactionFinalizer, -); -pub type SignedExtraScheduler = ( - frame_system::CheckSpecVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, -); -/// Unchecked extrinsic type as expected by this runtime. -pub type UncheckedExtrinsic = - fp_self_contained::UncheckedExtrinsic; -/// Extrinsic type that has already been checked. -pub type CheckedExtrinsic = fp_self_contained::CheckedExtrinsic; -/// Executive: handles dispatch to the various modules. -pub type Executive = frame_executive::Executive< - Runtime, - Block, - frame_system::ChainContext, - Runtime, - AllPalletsReversedWithSystemFirst, ->; - -impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } -} - -impl fp_self_contained::SelfContainedCall for Call { - type SignedInfo = H160; - - fn is_self_contained(&self) -> bool { - match self { - Call::Ethereum(call) => call.is_self_contained(), - _ => false, - } - } - - fn check_self_contained(&self) -> Option> { - match self { - Call::Ethereum(call) => call.check_self_contained(), - _ => None, - } - } - - fn validate_self_contained( - &self, - info: &Self::SignedInfo, - dispatch_info: &DispatchInfoOf, - len: usize, - ) -> Option { - match self { - Call::Ethereum(call) => call.validate_self_contained(info, dispatch_info, len), - _ => None, - } - } - - fn pre_dispatch_self_contained( - &self, - info: &Self::SignedInfo, - ) -> Option> { - match self { - Call::Ethereum(call) => call.pre_dispatch_self_contained(info), - _ => None, - } - } - - fn apply_self_contained( - self, - info: Self::SignedInfo, - ) -> Option>> { - match self { - call @ Call::Ethereum(pallet_ethereum::Call::transact { .. }) => Some(call.dispatch( - Origin::from(pallet_ethereum::RawOrigin::EthereumTransaction(info)), - )), - _ => None, - } - } -} - -macro_rules! dispatch_unique_runtime { - ($collection:ident.$method:ident($($name:ident),*)) => {{ - let collection = ::CollectionDispatch::dispatch(>::try_get($collection)?); - let dispatch = collection.as_dyn(); - - Ok::<_, DispatchError>(dispatch.$method($($name),*)) - }}; -} - -impl_common_runtime_apis! { - #![custom_apis] - - impl rmrk_rpc::RmrkApi< - Block, - AccountId, - RmrkCollectionInfo, - RmrkInstanceInfo, - RmrkResourceInfo, - RmrkPropertyInfo, - RmrkBaseInfo, - RmrkPartType, - RmrkTheme - > for Runtime { - fn last_collection_idx() -> Result { - pallet_proxy_rmrk_core::rpc::last_collection_idx::() - } - - fn collection_by_id(collection_id: RmrkCollectionId) -> Result>, DispatchError> { - pallet_proxy_rmrk_core::rpc::collection_by_id::(collection_id) - } - - fn nft_by_id(collection_id: RmrkCollectionId, nft_by_id: RmrkNftId) -> Result>, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_by_id::(collection_id, nft_by_id) - } - - fn account_tokens(account_id: AccountId, collection_id: RmrkCollectionId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::account_tokens::(account_id, collection_id) - } - - fn nft_children(collection_id: RmrkCollectionId, nft_id: RmrkNftId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_children::(collection_id, nft_id) - } - - fn collection_properties(collection_id: RmrkCollectionId, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::collection_properties::(collection_id, filter_keys) - } - - fn nft_properties(collection_id: RmrkCollectionId, nft_id: RmrkNftId, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_properties::(collection_id, nft_id, filter_keys) - } - - fn nft_resources(collection_id: RmrkCollectionId, nft_id: RmrkNftId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_resources::(collection_id, nft_id) - } - - fn nft_resource_priority(collection_id: RmrkCollectionId, nft_id: RmrkNftId, resource_id: RmrkResourceId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_resource_priority::(collection_id, nft_id, resource_id) - } - - fn base(base_id: RmrkBaseId) -> Result>, DispatchError> { - pallet_proxy_rmrk_equip::rpc::base::(base_id) - } - - fn base_parts(base_id: RmrkBaseId) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::base_parts::(base_id) - } - - fn theme_names(base_id: RmrkBaseId) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::theme_names::(base_id) - } - - fn theme(base_id: RmrkBaseId, theme_name: RmrkThemeName, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::theme::(base_id, theme_name, filter_keys) - } - } -} - -struct CheckInherents; - -impl cumulus_pallet_parachain_system::CheckInherents for CheckInherents { - fn check_inherents( - block: &Block, - relay_state_proof: &cumulus_pallet_parachain_system::RelayChainStateProof, - ) -> sp_inherents::CheckInherentsResult { - let relay_chain_slot = relay_state_proof - .read_slot() - .expect("Could not read the relay chain slot from the proof"); - - let inherent_data = - cumulus_primitives_timestamp::InherentDataProvider::from_relay_chain_slot_and_duration( - relay_chain_slot, - sp_std::time::Duration::from_secs(6), - ) - .create_inherent_data() - .expect("Could not create the timestamp inherent data"); - - inherent_data.check_extrinsics(block) - } -} +impl_common_runtime_apis!(); cumulus_pallet_parachain_system::register_validate_block!( Runtime = Runtime, diff --git a/runtime/opal/src/tests/logcapture.rs b/runtime/opal/src/tests/logcapture.rs new file mode 100644 index 0000000000..da0a310310 --- /dev/null +++ b/runtime/opal/src/tests/logcapture.rs @@ -0,0 +1,25 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use super::xcm::opal_xcm_tests; + +#[test] +fn opal_log_capture_tests() { + let mut logger = Logger::start(); + + opal_xcm_tests(&mut logger); +} diff --git a/runtime/opal/src/tests/mod.rs b/runtime/opal/src/tests/mod.rs new file mode 100644 index 0000000000..587524b3c9 --- /dev/null +++ b/runtime/opal/src/tests/mod.rs @@ -0,0 +1,18 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +mod logcapture; +mod xcm; diff --git a/runtime/opal/src/tests/xcm.rs b/runtime/opal/src/tests/xcm.rs new file mode 100644 index 0000000000..39ce4e3fd3 --- /dev/null +++ b/runtime/opal/src/tests/xcm.rs @@ -0,0 +1,27 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use crate::{runtime_common::tests::xcm::*, xcm_barrier::Barrier}; + +const OPAL_PARA_ID: u32 = 2095; // Same as Quartz + +pub fn opal_xcm_tests(logger: &mut Logger) { + barrier_denies_transact::(logger); + + barrier_denies_transfer_from_unknown_location::(logger, OPAL_PARA_ID) + .expect_err("opal runtime allows any location"); +} diff --git a/runtime/opal/src/xcm_barrier.rs b/runtime/opal/src/xcm_barrier.rs new file mode 100644 index 0000000000..99c4d9a3d0 --- /dev/null +++ b/runtime/opal/src/xcm_barrier.rs @@ -0,0 +1,51 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::traits::Everything; +use xcm::{ + latest::{Xcm, Weight}, + v1::MultiLocation, +}; +use xcm_builder::{AllowTopLevelPaidExecutionFrom, TakeWeightCredit}; +use xcm_executor::traits::ShouldExecute; + +use crate::runtime_common::config::xcm::{DenyThenTry, DenyTransact}; + +/// Execution barrier that just takes `max_weight` from `weight_credit`. +/// +/// Useful to allow XCM execution by local chain users via extrinsics. +/// E.g. `pallet_xcm::reserve_asset_transfer` to transfer a reserve asset +/// out of the local chain to another one. +pub struct AllowAllDebug; +impl ShouldExecute for AllowAllDebug { + fn should_execute( + _origin: &MultiLocation, + _message: &mut Xcm, + _max_weight: Weight, + _weight_credit: &mut Weight, + ) -> Result<(), ()> { + Ok(()) + } +} + +pub type Barrier = DenyThenTry< + DenyTransact, + ( + TakeWeightCredit, + AllowTopLevelPaidExecutionFrom, + AllowAllDebug, + ), +>; diff --git a/runtime/quartz/CHANGELOG.md b/runtime/quartz/CHANGELOG.md new file mode 100644 index 0000000000..f28cd71f83 --- /dev/null +++ b/runtime/quartz/CHANGELOG.md @@ -0,0 +1,25 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + + +## [v0.9.27] 2022-09-08 + +### Added + +- `AppPromotion` pallet to runtime. + +## [v0.9.27] 2022-08-16 + +### Bugfixes + +- Add missing config keys 74f532ac28dce15c15e7d576c074a58eba658c08 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/runtime/quartz/Cargo.toml b/runtime/quartz/Cargo.toml index e4e256cfcc..958890dd0a 100644 --- a/runtime/quartz/Cargo.toml +++ b/runtime/quartz/Cargo.toml @@ -1,6 +1,8 @@ ################################################################################ # Package +cargo-features = ["workspace-inheritance"] + [package] authors = ['Unique Network '] build = 'build.rs' @@ -10,7 +12,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'quartz-runtime' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.9.24' +version = '0.9.30' [package.metadata.docs.rs] targets = ['x86_64-unknown-linux-gnu'] @@ -36,6 +38,7 @@ runtime-benchmarks = [ 'pallet-proxy-rmrk-core/runtime-benchmarks', 'pallet-proxy-rmrk-equip/runtime-benchmarks', 'pallet-unique/runtime-benchmarks', + 'pallet-foreign-assets/runtime-benchmarks', 'pallet-inflation/runtime-benchmarks', 'pallet-unique-scheduler/runtime-benchmarks', 'pallet-xcm/runtime-benchmarks', @@ -46,7 +49,46 @@ runtime-benchmarks = [ try-runtime = [ 'frame-try-runtime', 'frame-executive/try-runtime', + 'frame-support/try-runtime', 'frame-system/try-runtime', + 'cumulus-pallet-parachain-system/try-runtime', + 'parachain-info/try-runtime', + 'pallet-aura/try-runtime', + 'cumulus-pallet-aura-ext/try-runtime', + 'pallet-balances/try-runtime', + 'pallet-randomness-collective-flip/try-runtime', + 'pallet-timestamp/try-runtime', + 'pallet-transaction-payment/try-runtime', + 'pallet-treasury/try-runtime', + 'pallet-sudo/try-runtime', + 'orml-vesting/try-runtime', + 'orml-xtokens/try-runtime', + 'orml-tokens/try-runtime', + 'cumulus-pallet-xcmp-queue/try-runtime', + 'pallet-xcm/try-runtime', + 'cumulus-pallet-xcm/try-runtime', + 'cumulus-pallet-dmp-queue/try-runtime', + 'pallet-inflation/try-runtime', + 'pallet-unique/try-runtime', + 'pallet-unique-scheduler/try-runtime', + 'pallet-configuration/try-runtime', + 'pallet-charge-transaction/try-runtime', + 'pallet-common/try-runtime', + 'pallet-fungible/try-runtime', + 'pallet-refungible/try-runtime', + 'pallet-nonfungible/try-runtime', + 'pallet-structure/try-runtime', + 'pallet-proxy-rmrk-core/try-runtime', + 'pallet-proxy-rmrk-equip/try-runtime', + 'pallet-app-promotion/try-runtime', + 'pallet-foreign-assets/try-runtime', + 'pallet-evm/try-runtime', + 'pallet-ethereum/try-runtime', + 'pallet-evm-coder-substrate/try-runtime', + 'pallet-evm-contract-helpers/try-runtime', + 'pallet-evm-transaction-payment/try-runtime', + 'pallet-evm-migration/try-runtime', + 'pallet-maintenance/try-runtime', ] std = [ 'codec/std', @@ -73,7 +115,6 @@ std = [ 'pallet-transaction-payment/std', 'pallet-transaction-payment-rpc-runtime-api/std', 'pallet-treasury/std', - # 'pallet-vesting/std', 'pallet-evm/std', 'pallet-evm-migration/std', 'pallet-evm-contract-helpers/std', @@ -83,11 +124,13 @@ std = [ 'pallet-base-fee/std', 'fp-rpc/std', 'up-rpc/std', + 'app-promotion-rpc/std', 'fp-evm-mapping/std', 'fp-self-contained/std', 'parachain-info/std', 'serde', 'pallet-inflation/std', + 'pallet-configuration/std', 'pallet-common/std', 'pallet-structure/std', 'pallet-fungible/std', @@ -114,14 +157,25 @@ std = [ 'xcm/std', 'xcm-builder/std', 'xcm-executor/std', - 'unique-runtime-common/std', + 'up-common/std', + 'rmrk-rpc/std', + 'evm-coder/std', + 'up-sponsorship/std', "orml-vesting/std", - - 'pallet-maintenance/std', + "orml-tokens/std", + "orml-xtokens/std", + "orml-traits/std", + "pallet-foreign-assets/std", + "pallet-maintenance/std", ] limit-testing = ['pallet-unique/limit-testing', 'up-data-structs/limit-testing'] -quartz-runtime = [] +quartz-runtime = ['refungible'] + +refungible = [] +scheduler = [] +rmrk = [] +foreign-assets = [] ################################################################################ # Substrate Dependencies @@ -136,39 +190,39 @@ version = '3.1.2' default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-try-runtime] default-features = false -git = 'https://github.com/paritytech/substrate' +git = "https://github.com/paritytech/substrate" optional = true -branch = 'polkadot-v0.9.24' +branch = "polkadot-v0.9.30" [dependencies.frame-executive] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-support] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-benchmarking] default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.hex-literal] optional = true @@ -183,12 +237,12 @@ version = '1.0.130' [dependencies.pallet-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-balances] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" # Contracts specific packages # [dependencies.pallet-contracts] @@ -212,102 +266,97 @@ branch = "polkadot-v0.9.24" [dependencies.pallet-randomness-collective-flip] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-sudo] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-timestamp] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-treasury] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -# [dependencies.pallet-vesting] -# default-features = false -# git = 'https://github.com/paritytech/substrate' -# branch = 'master' +branch = "polkadot-v0.9.30" [dependencies.sp-arithmetic] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-block-builder] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-consensus-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-inherents] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-io] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-offchain] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-runtime] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-session] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-std] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-transaction-pool] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-version] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.smallvec] version = '1.6.1' @@ -318,46 +367,46 @@ version = '1.6.1' [dependencies.parachain-info] default-features = false git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-pallet-aura-ext] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-parachain-system] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-core] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcm] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-dmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-utility] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-timestamp] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false ################################################################################ @@ -365,33 +414,27 @@ default-features = false [dependencies.polkadot-parachain] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-builder] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-executor] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.pallet-xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" -default-features = false - -[dependencies.orml-vesting] -git = "https://github.com/uniquenetwork/open-runtime-module-library" -branch = "unique-polkadot-v0.9.24" -version = "0.4.1-dev" +branch = "release-v0.9.30" default-features = false ################################################################################ @@ -406,17 +449,25 @@ path = "../../primitives/rmrk-rpc" # local dependencies [dependencies] +orml-vesting.workspace = true +orml-xtokens.workspace = true +orml-tokens.workspace = true +orml-traits.workspace = true + log = { version = "0.4.16", default-features = false } -unique-runtime-common = { path = "../common", default-features = false } +up-common = { path = "../../primitives/common", default-features = false } scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } derivative = "2.2.0" pallet-unique = { path = '../../pallets/unique', default-features = false } up-rpc = { path = "../../primitives/rpc", default-features = false } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc", default-features = false } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-inflation = { path = '../../pallets/inflation', default-features = false } +pallet-app-promotion = { path = '../../pallets/app-promotion', default-features = false } up-data-structs = { path = '../../primitives/data-structs', default-features = false } +pallet-configuration = { default-features = false, path = "../../pallets/configuration" } pallet-common = { default-features = false, path = "../../pallets/common" } pallet-structure = { default-features = false, path = "../../pallets/structure" } pallet-fungible = { default-features = false, path = "../../pallets/fungible" } @@ -426,21 +477,35 @@ pallet-proxy-rmrk-core = { default-features = false, path = "../../pallets/proxy pallet-proxy-rmrk-equip = { default-features = false, path = "../../pallets/proxy-rmrk-equip", package = "pallet-rmrk-equip" } pallet-unique-scheduler = { path = '../../pallets/scheduler', default-features = false } # pallet-contract-helpers = { path = '../pallets/contract-helpers', default-features = false, version = '0.1.0' } -pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } +pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } pallet-evm-migration = { path = '../../pallets/evm-migration', default-features = false } pallet-evm-contract-helpers = { path = '../../pallets/evm-contract-helpers', default-features = false } pallet-evm-transaction-payment = { path = '../../pallets/evm-transaction-payment', default-features = false } pallet-evm-coder-substrate = { default-features = false, path = "../../pallets/evm-coder-substrate" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +evm-coder = { default-features = false, path = '../../crates/evm-coder' } +up-sponsorship = { default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = 'polkadot-v0.9.30' } +pallet-foreign-assets = { default-features = false, path = "../../pallets/foreign-assets" } pallet-maintenance = { default-features = false, path = "../../pallets/maintenance" } +################################################################################ +# Other Dependencies + +impl-trait-for-tuples = "0.2.2" + +################################################################################ +# Dev Dependencies + +[dev-dependencies.logtest] +version = "2.0.0" + ################################################################################ # Build Dependencies [build-dependencies.substrate-wasm-builder] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" diff --git a/runtime/quartz/src/lib.rs b/runtime/quartz/src/lib.rs index fcb18ee4fa..5c4be978fe 100644 --- a/runtime/quartz/src/lib.rs +++ b/runtime/quartz/src/lib.rs @@ -25,1478 +25,47 @@ #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); -use sp_api::impl_runtime_apis; -use sp_core::{crypto::KeyTypeId, OpaqueMetadata, H256, U256, H160}; -use sp_runtime::DispatchError; -use fp_self_contained::*; -// #[cfg(any(feature = "std", test))] -// pub use sp_runtime::BuildStorage; +use frame_support::parameter_types; -use scale_info::TypeInfo; -use sp_runtime::{ - Permill, Perbill, Percent, create_runtime_str, generic, impl_opaque_keys, - traits::{ - AccountIdLookup, BlakeTwo256, Block as BlockT, AccountIdConversion, Zero, Member, - SignedExtension, - }, - transaction_validity::{ - TransactionSource, TransactionValidity, ValidTransaction, InvalidTransaction, - }, - ApplyExtrinsicResult, RuntimeAppPublic, -}; - -use sp_std::prelude::*; - -#[cfg(feature = "std")] -use sp_version::NativeVersion; use sp_version::RuntimeVersion; -pub use pallet_transaction_payment::{ - Multiplier, TargetedFeeAdjustment, FeeDetails, RuntimeDispatchInfo, -}; -// A few exports that help ease life for downstream crates. -pub use pallet_balances::Call as BalancesCall; -pub use pallet_evm::{ - EnsureAddressTruncated, HashedAddressMapping, Runner, account::CrossAccountId as _, - OnMethodCall, Account as EVMAccount, FeeCalculator, GasWeightMapping, -}; -pub use frame_support::{ - construct_runtime, match_types, - dispatch::DispatchResult, - PalletId, parameter_types, StorageValue, ConsensusEngineId, - traits::{ - tokens::currency::Currency as CurrencyT, OnUnbalanced as OnUnbalancedT, Everything, - Currency, ExistenceRequirement, Get, IsInVec, KeyOwnerProofSystem, LockIdentifier, - OnUnbalanced, Randomness, FindAuthor, ConstU32, Imbalance, PrivilegeCmp, - }, - weights::{ - constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND}, - DispatchClass, DispatchInfo, GetDispatchInfo, IdentityFee, Pays, PostDispatchInfo, Weight, - WeightToFeePolynomial, WeightToFeeCoefficient, WeightToFeeCoefficients, ConstantMultiplier, - WeightToFee, - }, -}; -use pallet_unique_scheduler::DispatchCall; -use up_data_structs::{ - CollectionId, TokenId, TokenData, Property, PropertyKeyPermission, CollectionLimits, - CollectionStats, RpcCollection, - mapping::{EvmTokenAddressMapping, CrossTokenAddressMapping}, - TokenChild, RmrkCollectionInfo, RmrkInstanceInfo, RmrkResourceInfo, RmrkPropertyInfo, - RmrkBaseInfo, RmrkPartType, RmrkTheme, RmrkThemeName, RmrkCollectionId, RmrkNftId, - RmrkNftChild, RmrkPropertyKey, RmrkResourceId, RmrkBaseId, -}; +use sp_runtime::create_runtime_str; -// use pallet_contracts::weights::WeightInfo; -// #[cfg(any(feature = "std", test))] -use frame_system::{ - self as frame_system, EnsureRoot, EnsureSigned, - limits::{BlockWeights, BlockLength}, -}; -use sp_arithmetic::{ - traits::{BaseArithmetic, Unsigned}, -}; -use smallvec::smallvec; -use codec::{Encode, Decode}; -use fp_rpc::TransactionStatus; -use sp_runtime::{ - traits::{ - Applyable, BlockNumberProvider, Dispatchable, PostDispatchInfoOf, DispatchInfoOf, - Saturating, CheckedConversion, - }, - generic::Era, - transaction_validity::TransactionValidityError, - DispatchErrorWithPostInfo, SaturatedConversion, -}; +use up_common::types::*; -// pub use pallet_timestamp::Call as TimestampCall; -pub use sp_consensus_aura::sr25519::AuthorityId as AuraId; +#[path = "../../common/mod.rs"] +mod runtime_common; -// Polkadot imports -use pallet_xcm::XcmPassthrough; -use polkadot_parachain::primitives::Sibling; -use xcm::v1::{BodyId, Junction::*, MultiLocation, NetworkId, Junctions::*}; -use xcm_builder::{ - AccountId32Aliases, AllowTopLevelPaidExecutionFrom, AllowUnpaidExecutionFrom, CurrencyAdapter, - EnsureXcmOrigin, FixedWeightBounds, LocationInverter, NativeAsset, ParentAsSuperuser, - RelayChainAsNative, SiblingParachainAsNative, SiblingParachainConvertsVia, - SignedAccountId32AsNative, SignedToAccountId32, SovereignSignedViaLocation, TakeWeightCredit, - ParentIsPreset, -}; -use xcm_executor::{Config, XcmExecutor, Assets}; -use sp_std::{cmp::Ordering, marker::PhantomData}; +pub mod xcm_barrier; -use xcm::latest::{ - // Xcm, - AssetId::{Concrete}, - Fungibility::Fungible as XcmFungible, - MultiAsset, - Error as XcmError, -}; -use xcm_executor::traits::{MatchesFungible, WeightTrader}; +#[cfg(test)] +mod tests; -use unique_runtime_common::{ - impl_common_runtime_apis, - types::*, - constants::*, - dispatch::{CollectionDispatchT, CollectionDispatch}, - sponsoring::UniqueSponsorshipHandler, - eth_sponsoring::UniqueEthSponsorshipHandler, - weights::CommonWeights, -}; +pub use runtime_common::*; pub const RUNTIME_NAME: &str = "quartz"; pub const TOKEN_SYMBOL: &str = "QTZ"; -type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - -impl RuntimeInstance for Runtime { - type CrossAccountId = self::CrossAccountId; - - type TransactionConverter = self::TransactionConverter; - - fn get_transaction_converter() -> TransactionConverter { - TransactionConverter - } -} - -/// The type for looking up accounts. We don't expect more than 4 billion of them, but you -/// never know... -pub type AccountIndex = u32; - -/// Balance of an account. -pub type Balance = u128; - -/// Index of a transaction in the chain. -pub type Index = u32; - -/// A hash of some data used by the chain. -pub type Hash = sp_core::H256; - -/// Digest item type. -pub type DigestItem = generic::DigestItem; - -/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know -/// the specifics of the runtime. They can then be made to be agnostic over specific formats -/// of data like extrinsics, allowing for them to continue syncing the network through upgrades -/// to even the core data structures. -pub mod opaque { - use sp_std::prelude::*; - use sp_runtime::impl_opaque_keys; - use super::Aura; - - pub use unique_runtime_common::types::*; - - impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } - } -} - /// This runtime version. pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!(RUNTIME_NAME), impl_name: create_runtime_str!(RUNTIME_NAME), authoring_version: 1, - spec_version: 924013, + spec_version: 930032, impl_version: 0, apis: RUNTIME_API_VERSIONS, - transaction_version: 1, + transaction_version: 2, state_version: 0, }; -#[derive(codec::Encode, codec::Decode)] -pub enum XCMPMessage { - /// Transfer tokens to the given account from the Parachain account. - TransferToken(XAccountId, XBalance), -} - -/// The version information used to identify this runtime when compiled natively. -#[cfg(feature = "std")] -pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } -} - -type NegativeImbalance = >::NegativeImbalance; - -pub struct DealWithFees; -impl OnUnbalanced for DealWithFees { - fn on_unbalanceds(mut fees_then_tips: impl Iterator) { - if let Some(fees) = fees_then_tips.next() { - // for fees, 100% to treasury - let mut split = fees.ration(100, 0); - if let Some(tips) = fees_then_tips.next() { - // for tips, if any, 100% to treasury - tips.ration_merge_into(100, 0, &mut split); - } - Treasury::on_unbalanced(split.0); - // Author::on_unbalanced(split.1); - } - } -} - parameter_types! { - pub const BlockHashCount: BlockNumber = 2400; - pub RuntimeBlockLength: BlockLength = - BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO); - pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); - pub const MaximumBlockLength: u32 = 5 * 1024 * 1024; - pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); - // Operational transactions have some extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); pub const Version: RuntimeVersion = VERSION; pub const SS58Prefix: u8 = 255; -} - -parameter_types! { pub const ChainId: u64 = 8881; } -pub struct FixedFee; -impl FeeCalculator for FixedFee { - fn min_gas_price() -> (U256, u64) { - (MIN_GAS_PRICE.into(), 0) - } -} - -// Assuming slowest ethereum opcode is SSTORE, with gas price of 20000 as our worst case -// (contract, which only writes a lot of data), -// approximating on top of our real store write weight -parameter_types! { - pub const WritesPerSecond: u64 = WEIGHT_PER_SECOND / ::DbWeight::get().write; - pub const GasPerSecond: u64 = WritesPerSecond::get() * 20000; - pub const WeightPerGas: u64 = WEIGHT_PER_SECOND / GasPerSecond::get(); -} - -/// Limiting EVM execution to 50% of block for substrate users and management tasks -/// EVM transaction consumes more weight than substrate's, so we can't rely on them being -/// scheduled fairly -const EVM_DISPATCH_RATIO: Perbill = Perbill::from_percent(50); -parameter_types! { - pub BlockGasLimit: U256 = U256::from(NORMAL_DISPATCH_RATIO * EVM_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT / WeightPerGas::get()); -} - -pub enum FixedGasWeightMapping {} -impl GasWeightMapping for FixedGasWeightMapping { - fn gas_to_weight(gas: u64) -> Weight { - gas.saturating_mul(WeightPerGas::get()) - } - fn weight_to_gas(weight: Weight) -> u64 { - weight / WeightPerGas::get() - } -} - -impl pallet_evm::account::Config for Runtime { - type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - type EvmAddressMapping = HashedAddressMapping; - type EvmBackwardsAddressMapping = fp_evm_mapping::MapBackwardsAddressTruncated; -} - -impl pallet_evm::Config for Runtime { - type BlockGasLimit = BlockGasLimit; - type FeeCalculator = FixedFee; - type GasWeightMapping = FixedGasWeightMapping; - type BlockHashMapping = pallet_ethereum::EthereumBlockHashMapping; - type CallOrigin = EnsureAddressTruncated; - type WithdrawOrigin = EnsureAddressTruncated; - type AddressMapping = HashedAddressMapping; - type PrecompilesType = (); - type PrecompilesValue = (); - type Currency = Balances; - type Event = Event; - type OnMethodCall = ( - pallet_evm_migration::OnMethodCall, - pallet_evm_contract_helpers::HelpersOnMethodCall, - CollectionDispatchT, - pallet_unique::eth::CollectionHelpersOnMethodCall, - ); - type OnCreate = pallet_evm_contract_helpers::HelpersOnCreate; - type ChainId = ChainId; - type Runner = pallet_evm::runner::stack::Runner; - type OnChargeTransaction = pallet_evm::EVMCurrencyAdapter; - type TransactionValidityHack = pallet_evm_transaction_payment::TransactionValidityHack; - type FindAuthor = EthereumFindAuthor; -} - -impl pallet_evm_migration::Config for Runtime { - type WeightInfo = pallet_evm_migration::weights::SubstrateWeight; -} - -pub struct EthereumFindAuthor(core::marker::PhantomData); -impl> FindAuthor for EthereumFindAuthor { - fn find_author<'a, I>(digests: I) -> Option - where - I: 'a + IntoIterator, - { - if let Some(author_index) = F::find_author(digests) { - let authority_id = Aura::authorities()[author_index as usize].clone(); - return Some(H160::from_slice(&authority_id.to_raw_vec()[4..24])); - } - None - } -} - -impl pallet_ethereum::Config for Runtime { - type Event = Event; - type StateRoot = pallet_ethereum::IntermediateStateRoot; -} - -impl pallet_randomness_collective_flip::Config for Runtime {} - -impl frame_system::Config for Runtime { - /// The data to be stored in an account. - type AccountData = pallet_balances::AccountData; - /// The identifier used to distinguish between accounts. - type AccountId = AccountId; - /// The basic call filter to use in dispatchable. - type BaseCallFilter = Everything; - /// Maximum number of block number to block hash mappings to keep (oldest pruned first). - type BlockHashCount = BlockHashCount; - /// The maximum length of a block (in bytes). - type BlockLength = RuntimeBlockLength; - /// The index type for blocks. - type BlockNumber = BlockNumber; - /// The weight of the overhead invoked on the block import process, independent of the extrinsics included in that block. - type BlockWeights = RuntimeBlockWeights; - /// The aggregated dispatch type that is available for extrinsics. - type Call = Call; - /// The weight of database operations that the runtime can invoke. - type DbWeight = RocksDbWeight; - /// The ubiquitous event type. - type Event = Event; - /// The type for hashing blocks and tries. - type Hash = Hash; - /// The hashing algorithm used. - type Hashing = BlakeTwo256; - /// The header type. - type Header = generic::Header; - /// The index type for storing how many extrinsics an account has signed. - type Index = Index; - /// The lookup mechanism to get account ID from whatever is passed in dispatchers. - type Lookup = AccountIdLookup; - /// What to do if an account is fully reaped from the system. - type OnKilledAccount = (); - /// What to do if a new account is created. - type OnNewAccount = (); - type OnSetCode = cumulus_pallet_parachain_system::ParachainSetCode; - /// The ubiquitous origin type. - type Origin = Origin; - /// This type is being generated by `construct_runtime!`. - type PalletInfo = PalletInfo; - /// This is used as an identifier of the chain. 42 is the generic substrate prefix. - type SS58Prefix = SS58Prefix; - /// Weight information for the extrinsics of this pallet. - type SystemWeightInfo = frame_system::weights::SubstrateWeight; - /// Version of the runtime. - type Version = Version; - type MaxConsumers = ConstU32<16>; -} - -parameter_types! { - pub const MinimumPeriod: u64 = SLOT_DURATION / 2; -} - -impl pallet_timestamp::Config for Runtime { - /// A timestamp: milliseconds since the unix epoch. - type Moment = u64; - type OnTimestampSet = (); - type MinimumPeriod = MinimumPeriod; - type WeightInfo = (); -} - -parameter_types! { - // pub const ExistentialDeposit: u128 = 500; - pub const ExistentialDeposit: u128 = 0; - pub const MaxLocks: u32 = 50; - pub const MaxReserves: u32 = 50; -} - -impl pallet_balances::Config for Runtime { - type MaxLocks = MaxLocks; - type MaxReserves = MaxReserves; - type ReserveIdentifier = [u8; 16]; - /// The type for recording an account's balance. - type Balance = Balance; - /// The ubiquitous event type. - type Event = Event; - type DustRemoval = Treasury; - type ExistentialDeposit = ExistentialDeposit; - type AccountStore = System; - type WeightInfo = pallet_balances::weights::SubstrateWeight; -} - -pub const fn deposit(items: u32, bytes: u32) -> Balance { - items as Balance * 15 * CENTIUNIQUE + (bytes as Balance) * 6 * CENTIUNIQUE -} - -/* -parameter_types! { - pub TombstoneDeposit: Balance = deposit( - 1, - sp_std::mem::size_of::> as u32, - ); - pub DepositPerContract: Balance = TombstoneDeposit::get(); - pub const DepositPerStorageByte: Balance = deposit(0, 1); - pub const DepositPerStorageItem: Balance = deposit(1, 0); - pub RentFraction: Perbill = Perbill::from_rational(1u32, 30 * DAYS); - pub const SurchargeReward: Balance = 150 * MILLIUNIQUE; - pub const SignedClaimHandicap: u32 = 2; - pub const MaxDepth: u32 = 32; - pub const MaxValueSize: u32 = 16 * 1024; - pub const MaxCodeSize: u32 = 1024 * 1024 * 25; // 25 Mb - // The lazy deletion runs inside on_initialize. - pub DeletionWeightLimit: Weight = AVERAGE_ON_INITIALIZE_RATIO * - RuntimeBlockWeights::get().max_block; - // The weight needed for decoding the queue should be less or equal than a fifth - // of the overall weight dedicated to the lazy deletion. - pub DeletionQueueDepth: u32 = ((DeletionWeightLimit::get() / ( - ::WeightInfo::on_initialize_per_queue_item(1) - - ::WeightInfo::on_initialize_per_queue_item(0) - )) / 5) as u32; - pub Schedule: pallet_contracts::Schedule = Default::default(); -} - -impl pallet_contracts::Config for Runtime { - type Time = Timestamp; - type Randomness = RandomnessCollectiveFlip; - type Currency = Balances; - type Event = Event; - type RentPayment = (); - type SignedClaimHandicap = SignedClaimHandicap; - type TombstoneDeposit = TombstoneDeposit; - type DepositPerContract = DepositPerContract; - type DepositPerStorageByte = DepositPerStorageByte; - type DepositPerStorageItem = DepositPerStorageItem; - type RentFraction = RentFraction; - type SurchargeReward = SurchargeReward; - type WeightPrice = pallet_transaction_payment::Pallet; - type WeightInfo = pallet_contracts::weights::SubstrateWeight; - type ChainExtension = NFTExtension; - type DeletionQueueDepth = DeletionQueueDepth; - type DeletionWeightLimit = DeletionWeightLimit; - type Schedule = Schedule; - type CallStack = [pallet_contracts::Frame; 31]; -} -*/ - -parameter_types! { - /// This value increases the priority of `Operational` transactions by adding - /// a "virtual tip" that's equal to the `OperationalFeeMultiplier * final_fee`. - pub const OperationalFeeMultiplier: u8 = 5; -} - -/// Linear implementor of `WeightToFeePolynomial` -pub struct LinearFee(sp_std::marker::PhantomData); - -impl WeightToFeePolynomial for LinearFee -where - T: BaseArithmetic + From + Copy + Unsigned, -{ - type Balance = T; - - fn polynomial() -> WeightToFeeCoefficients { - smallvec!(WeightToFeeCoefficient { - coeff_integer: WEIGHT_TO_FEE_COEFF.into(), - coeff_frac: Perbill::zero(), - negative: false, - degree: 1, - }) - } -} - -impl pallet_transaction_payment::Config for Runtime { - type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; - type LengthToFee = ConstantMultiplier; - type OperationalFeeMultiplier = OperationalFeeMultiplier; - type WeightToFee = LinearFee; - type FeeMultiplierUpdate = (); -} - -parameter_types! { - pub const ProposalBond: Permill = Permill::from_percent(5); - pub const ProposalBondMinimum: Balance = 1 * UNIQUE; - pub const ProposalBondMaximum: Balance = 1000 * UNIQUE; - pub const SpendPeriod: BlockNumber = 5 * MINUTES; - pub const Burn: Permill = Permill::from_percent(0); - pub const TipCountdown: BlockNumber = 1 * DAYS; - pub const TipFindersFee: Percent = Percent::from_percent(20); - pub const TipReportDepositBase: Balance = 1 * UNIQUE; - pub const DataDepositPerByte: Balance = 1 * CENTIUNIQUE; - pub const BountyDepositBase: Balance = 1 * UNIQUE; - pub const BountyDepositPayoutDelay: BlockNumber = 1 * DAYS; - pub const TreasuryModuleId: PalletId = PalletId(*b"py/trsry"); - pub const BountyUpdatePeriod: BlockNumber = 14 * DAYS; - pub const MaximumReasonLength: u32 = 16384; - pub const BountyCuratorDeposit: Permill = Permill::from_percent(50); - pub const BountyValueMinimum: Balance = 5 * UNIQUE; - pub const MaxApprovals: u32 = 100; -} - -impl pallet_treasury::Config for Runtime { - type PalletId = TreasuryModuleId; - type Currency = Balances; - type ApproveOrigin = EnsureRoot; - type RejectOrigin = EnsureRoot; - type Event = Event; - type OnSlash = (); - type ProposalBond = ProposalBond; - type ProposalBondMinimum = ProposalBondMinimum; - type ProposalBondMaximum = ProposalBondMaximum; - type SpendPeriod = SpendPeriod; - type Burn = Burn; - type BurnDestination = (); - type SpendFunds = (); - type WeightInfo = pallet_treasury::weights::SubstrateWeight; - type MaxApprovals = MaxApprovals; -} - -impl pallet_sudo::Config for Runtime { - type Event = Event; - type Call = Call; -} - -pub struct RelayChainBlockNumberProvider(sp_std::marker::PhantomData); - -impl BlockNumberProvider - for RelayChainBlockNumberProvider -{ - type BlockNumber = BlockNumber; - - fn current_block_number() -> Self::BlockNumber { - cumulus_pallet_parachain_system::Pallet::::validation_data() - .map(|d| d.relay_parent_number) - .unwrap_or_default() - } -} - -parameter_types! { - pub const MinVestedTransfer: Balance = 10 * UNIQUE; - pub const MaxVestingSchedules: u32 = 28; -} - -impl orml_vesting::Config for Runtime { - type Event = Event; - type Currency = pallet_balances::Pallet; - type MinVestedTransfer = MinVestedTransfer; - type VestedTransferOrigin = EnsureSigned; - type WeightInfo = (); - type MaxVestingSchedules = MaxVestingSchedules; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub const ReservedDmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; - pub const ReservedXcmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; -} - -impl cumulus_pallet_parachain_system::Config for Runtime { - type Event = Event; - type SelfParaId = parachain_info::Pallet; - type OnSystemEvent = (); - // type DownwardMessageHandlers = cumulus_primitives_utility::UnqueuedDmpAsParent< - // MaxDownwardMessageWeight, - // XcmExecutor, - // Call, - // >; - type OutboundXcmpMessageSource = XcmpQueue; - type DmpMessageHandler = DmpQueue; - type ReservedDmpWeight = ReservedDmpWeight; - type ReservedXcmpWeight = ReservedXcmpWeight; - type XcmpMessageHandler = XcmpQueue; -} - -impl parachain_info::Config for Runtime {} - -impl cumulus_pallet_aura_ext::Config for Runtime {} - -parameter_types! { - pub const RelayLocation: MultiLocation = MultiLocation::parent(); - pub const RelayNetwork: NetworkId = NetworkId::Polkadot; - pub RelayOrigin: Origin = cumulus_pallet_xcm::Origin::Relay.into(); - pub Ancestry: MultiLocation = Parachain(ParachainInfo::parachain_id().into()).into(); -} - -/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used -/// when determining ownership of accounts for asset transacting and when attempting to use XCM -/// `Transact` in order to determine the dispatch Origin. -pub type LocationToAccountId = ( - // The parent (Relay-chain) origin converts to the default `AccountId`. - ParentIsPreset, - // Sibling parachain origins convert to AccountId via the `ParaId::into`. - SiblingParachainConvertsVia, - // Straight up local `AccountId32` origins just alias directly to `AccountId`. - AccountId32Aliases, -); - -pub struct OnlySelfCurrency; -impl> MatchesFungible for OnlySelfCurrency { - fn matches_fungible(a: &MultiAsset) -> Option { - match (&a.id, &a.fun) { - (Concrete(_), XcmFungible(ref amount)) => CheckedConversion::checked_from(*amount), - _ => None, - } - } -} - -/// Means for transacting assets on this chain. -pub type LocalAssetTransactor = CurrencyAdapter< - // Use this currency: - Balances, - // Use this currency when it is a fungible asset matching the given location or name: - OnlySelfCurrency, - // Do a simple punn to convert an AccountId32 MultiLocation into a native chain account ID: - LocationToAccountId, - // Our chain's account ID type (we can't get away without mentioning it explicitly): - AccountId, - // We don't track any teleports. - (), ->; - -/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance, -/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can -/// biases the kind of local `Origin` it will become. -pub type XcmOriginToTransactDispatchOrigin = ( - // Sovereign account converter; this attempts to derive an `AccountId` from the origin location - // using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for - // foreign chains who want to have a local sovereign account on this chain which they control. - SovereignSignedViaLocation, - // Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when - // recognised. - RelayChainAsNative, - // Native converter for sibling Parachains; will convert to a `SiblingPara` origin when - // recognised. - SiblingParachainAsNative, - // Superuser converter for the Relay-chain (Parent) location. This will allow it to issue a - // transaction from the Root origin. - ParentAsSuperuser, - // Native signed account converter; this just converts an `AccountId32` origin into a normal - // `Origin::Signed` origin of the same 32-byte value. - SignedAccountId32AsNative, - // Xcm origins can be represented natively under the Xcm pallet's Xcm origin. - XcmPassthrough, -); - -parameter_types! { - // One XCM operation is 1_000_000 weight - almost certainly a conservative estimate. - pub UnitWeightCost: Weight = 1_000_000; - // 1200 UNIQUEs buy 1 second of weight. - pub const WeightPrice: (MultiLocation, u128) = (MultiLocation::parent(), 1_200 * UNIQUE); - pub const MaxInstructions: u32 = 100; - pub const MaxAuthorities: u32 = 100_000; -} - -match_types! { - pub type ParentOrParentsUnitPlurality: impl Contains = { - MultiLocation { parents: 1, interior: Here } | - MultiLocation { parents: 1, interior: X1(Plurality { id: BodyId::Unit, .. }) } - }; -} - -pub type Barrier = ( - TakeWeightCredit, - AllowTopLevelPaidExecutionFrom, - AllowUnpaidExecutionFrom, - // ^^^ Parent & its unit plurality gets free execution -); - -pub struct UsingOnlySelfCurrencyComponents< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, ->( - Weight, - Currency::Balance, - PhantomData<(WeightToFee, AssetId, AccountId, Currency, OnUnbalanced)>, -); -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > WeightTrader - for UsingOnlySelfCurrencyComponents -{ - fn new() -> Self { - Self(0, Zero::zero(), PhantomData) - } - - fn buy_weight(&mut self, weight: Weight, payment: Assets) -> Result { - let amount = WeightToFee::weight_to_fee(&weight); - let u128_amount: u128 = amount.try_into().map_err(|_| XcmError::Overflow)?; - - // location to this parachain through relay chain - let option1: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 1, - interior: X1(Parachain(ParachainInfo::parachain_id().into())), - }); - // direct location - let option2: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 0, - interior: Here, - }); - - let required = if payment.fungible.contains_key(&option1) { - (option1, u128_amount).into() - } else if payment.fungible.contains_key(&option2) { - (option2, u128_amount).into() - } else { - (Concrete(MultiLocation::default()), u128_amount).into() - }; - - let unused = payment - .checked_sub(required) - .map_err(|_| XcmError::TooExpensive)?; - self.0 = self.0.saturating_add(weight); - self.1 = self.1.saturating_add(amount); - Ok(unused) - } - - fn refund_weight(&mut self, weight: Weight) -> Option { - let weight = weight.min(self.0); - let amount = WeightToFee::weight_to_fee(&weight); - self.0 -= weight; - self.1 = self.1.saturating_sub(amount); - let amount: u128 = amount.saturated_into(); - if amount > 0 { - Some((AssetId::get(), amount).into()) - } else { - None - } - } -} -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > Drop - for UsingOnlySelfCurrencyComponents -{ - fn drop(&mut self) { - OnUnbalanced::on_unbalanced(Currency::issue(self.1)); - } -} - -pub struct XcmConfig; -impl Config for XcmConfig { - type Call = Call; - type XcmSender = XcmRouter; - // How to withdraw and deposit an asset. - type AssetTransactor = LocalAssetTransactor; - type OriginConverter = XcmOriginToTransactDispatchOrigin; - type IsReserve = NativeAsset; - type IsTeleporter = (); // Teleportation is disabled - type LocationInverter = LocationInverter; - type Barrier = Barrier; - type Weigher = FixedWeightBounds; - type Trader = - UsingOnlySelfCurrencyComponents, RelayLocation, AccountId, Balances, ()>; - type ResponseHandler = (); // Don't handle responses for now. - type SubscriptionService = PolkadotXcm; +construct_runtime!(quartz); - type AssetTrap = PolkadotXcm; - type AssetClaims = PolkadotXcm; -} - -// parameter_types! { -// pub const MaxDownwardMessageWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 10; -// } - -/// No local origins on this chain are allowed to dispatch XCM sends/executions. -pub type LocalOriginToLocation = (SignedToAccountId32,); - -/// The means for routing XCM messages which are not for local execution into the right message -/// queues. -pub type XcmRouter = ( - // Two routers - use UMP to communicate with the relay chain: - cumulus_primitives_utility::ParentAsUmp, - // ..and XCMP to communicate with the sibling chains. - XcmpQueue, -); - -impl pallet_evm_coder_substrate::Config for Runtime {} - -impl pallet_xcm::Config for Runtime { - type Event = Event; - type SendXcmOrigin = EnsureXcmOrigin; - type XcmRouter = XcmRouter; - type ExecuteXcmOrigin = EnsureXcmOrigin; - type XcmExecuteFilter = Everything; - type XcmExecutor = XcmExecutor; - type XcmTeleportFilter = Everything; - type XcmReserveTransferFilter = Everything; - type Weigher = FixedWeightBounds; - type LocationInverter = LocationInverter; - type Origin = Origin; - type Call = Call; - const VERSION_DISCOVERY_QUEUE_SIZE: u32 = 100; - type AdvertisedXcmVersion = pallet_xcm::CurrentXcmVersion; -} - -impl cumulus_pallet_xcm::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; -} - -impl cumulus_pallet_xcmp_queue::Config for Runtime { - type WeightInfo = (); - type Event = Event; - type XcmExecutor = XcmExecutor; - type ChannelInfo = ParachainSystem; - type VersionWrapper = (); - type ExecuteOverweightOrigin = frame_system::EnsureRoot; - type ControllerOrigin = EnsureRoot; - type ControllerOriginConverter = XcmOriginToTransactDispatchOrigin; -} - -impl cumulus_pallet_dmp_queue::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; - type ExecuteOverweightOrigin = frame_system::EnsureRoot; -} - -impl pallet_aura::Config for Runtime { - type AuthorityId = AuraId; - type DisabledValidators = (); - type MaxAuthorities = MaxAuthorities; -} - -parameter_types! { - pub TreasuryAccountId: AccountId = TreasuryModuleId::get().into_account_truncating(); - pub const CollectionCreationPrice: Balance = 2 * UNIQUE; -} - -impl pallet_common::Config for Runtime { - type WeightInfo = pallet_common::weights::SubstrateWeight; - type Event = Event; - type Currency = Balances; - type CollectionCreationPrice = CollectionCreationPrice; - type TreasuryAccountId = TreasuryAccountId; - type CollectionDispatch = CollectionDispatchT; - - type EvmTokenAddressMapping = EvmTokenAddressMapping; - type CrossTokenAddressMapping = CrossTokenAddressMapping; - type ContractAddress = EvmCollectionHelpersAddress; -} - -impl pallet_structure::Config for Runtime { - type Event = Event; - type Call = Call; - type WeightInfo = pallet_structure::weights::SubstrateWeight; -} - -impl pallet_fungible::Config for Runtime { - type WeightInfo = pallet_fungible::weights::SubstrateWeight; -} -impl pallet_refungible::Config for Runtime { - type WeightInfo = pallet_refungible::weights::SubstrateWeight; -} -impl pallet_nonfungible::Config for Runtime { - type WeightInfo = pallet_nonfungible::weights::SubstrateWeight; -} - -impl pallet_proxy_rmrk_core::Config for Runtime { - type WeightInfo = pallet_proxy_rmrk_core::weights::SubstrateWeight; - type Event = Event; -} - -impl pallet_proxy_rmrk_equip::Config for Runtime { - type WeightInfo = pallet_proxy_rmrk_equip::weights::SubstrateWeight; - type Event = Event; -} - -impl pallet_unique::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_unique::weights::SubstrateWeight; - type CommonWeightInfo = CommonWeights; - type RefungibleExtensionsWeightInfo = CommonWeights; -} - -parameter_types! { - pub const InflationBlockInterval: BlockNumber = 100; // every time per how many blocks inflation is applied -} - -/// Used for the pallet inflation -impl pallet_inflation::Config for Runtime { - type Currency = Balances; - type TreasuryAccountId = TreasuryAccountId; - type InflationBlockInterval = InflationBlockInterval; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub MaximumSchedulerWeight: Weight = Perbill::from_percent(50) * - RuntimeBlockWeights::get().max_block; - pub const MaxScheduledPerBlock: u32 = 50; -} - -type ChargeTransactionPayment = pallet_charge_transaction::ChargeTransactionPayment; -use frame_support::traits::NamedReservableCurrency; - -fn get_signed_extras(from: ::AccountId) -> SignedExtraScheduler { - ( - frame_system::CheckSpecVersion::::new(), - frame_system::CheckGenesis::::new(), - frame_system::CheckEra::::from(Era::Immortal), - frame_system::CheckNonce::::from(frame_system::Pallet::::account_nonce( - from, - )), - frame_system::CheckWeight::::new(), - CheckMaintenance, - // sponsoring transaction logic - // pallet_charge_transaction::ChargeTransactionPayment::::new(0), - ) -} - -pub struct SchedulerPaymentExecutor; -impl - DispatchCall for SchedulerPaymentExecutor -where - ::Call: Member - + Dispatchable - + SelfContainedCall - + GetDispatchInfo - + From>, - SelfContainedSignedInfo: Send + Sync + 'static, - Call: From<::Call> - + From<::Call> - + SelfContainedCall, - sp_runtime::AccountId32: From<::AccountId>, -{ - fn dispatch_call( - signer: ::AccountId, - call: ::Call, - ) -> Result< - Result>, - TransactionValidityError, - > { - let dispatch_info = call.get_dispatch_info(); - let extrinsic = fp_self_contained::CheckedExtrinsic::< - AccountId, - Call, - SignedExtraScheduler, - SelfContainedSignedInfo, - > { - signed: - CheckedSignature::::Signed( - signer.clone().into(), - get_signed_extras(signer.into()), - ), - function: call.into(), - }; - - extrinsic.apply::(&dispatch_info, 0) - } - - fn reserve_balance( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - count: u32, - ) -> Result<(), DispatchError> { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0) - .saturating_mul(count.into()); - - >::reserve_named( - &id, - &(sponsor.into()), - weight.into(), - ) - } - - fn pay_for_call( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - ) -> Result { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0); - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - weight.into(), - ), - ) - } - - fn cancel_reserve( - id: [u8; 16], - sponsor: ::AccountId, - ) -> Result { - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - u128::MAX, - ), - ) - } -} - -parameter_types! { - pub const NoPreimagePostponement: Option = Some(10); - pub const Preimage: Option = Some(10); -} - -/// Used the compare the privilege of an origin inside the scheduler. -pub struct OriginPrivilegeCmp; - -impl PrivilegeCmp for OriginPrivilegeCmp { - fn cmp_privilege(_left: &OriginCaller, _right: &OriginCaller) -> Option { - Some(Ordering::Equal) - } -} - -impl pallet_unique_scheduler::Config for Runtime { - type Event = Event; - type Origin = Origin; - type Currency = Balances; - type PalletsOrigin = OriginCaller; - type Call = Call; - type MaximumWeight = MaximumSchedulerWeight; - type ScheduleOrigin = EnsureSigned; - type MaxScheduledPerBlock = MaxScheduledPerBlock; - type WeightInfo = (); - type CallExecutor = SchedulerPaymentExecutor; - type OriginPrivilegeCmp = OriginPrivilegeCmp; - type PreimageProvider = (); - type NoPreimagePostponement = NoPreimagePostponement; -} - -type EvmSponsorshipHandler = ( - UniqueEthSponsorshipHandler, - pallet_evm_contract_helpers::HelpersContractSponsoring, -); -type SponsorshipHandler = ( - UniqueSponsorshipHandler, - //pallet_contract_helpers::ContractSponsorshipHandler, - pallet_evm_transaction_payment::BridgeSponsorshipHandler, -); - -impl pallet_evm_transaction_payment::Config for Runtime { - type EvmSponsorshipHandler = EvmSponsorshipHandler; - type Currency = Balances; -} - -impl pallet_charge_transaction::Config for Runtime { - type SponsorshipHandler = SponsorshipHandler; -} - -// impl pallet_contract_helpers::Config for Runtime { -// type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -// } - -parameter_types! { - // 0x842899ECF380553E8a4de75bF534cdf6fBF64049 - pub const HelpersContractAddress: H160 = H160([ - 0x84, 0x28, 0x99, 0xec, 0xf3, 0x80, 0x55, 0x3e, 0x8a, 0x4d, 0xe7, 0x5b, 0xf5, 0x34, 0xcd, 0xf6, 0xfb, 0xf6, 0x40, 0x49, - ]); - - // 0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f - pub const EvmCollectionHelpersAddress: H160 = H160([ - 0x6c, 0x4e, 0x9f, 0xe1, 0xae, 0x37, 0xa4, 0x1e, 0x93, 0xce, 0xe4, 0x29, 0xe8, 0xe1, 0x88, 0x1a, 0xbd, 0xcb, 0xb5, 0x4f, - ]); -} - -impl pallet_evm_contract_helpers::Config for Runtime { - type ContractAddress = HelpersContractAddress; - type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -} - -impl pallet_maintenance::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_maintenance::weights::SubstrateWeight; -} - -#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] -pub struct CheckMaintenance; - -impl SignedExtension for CheckMaintenance { - type AccountId = AccountId; - type Call = Call; - type AdditionalSigned = (); - type Pre = (); - - const IDENTIFIER: &'static str = "CheckMaintenance"; - - fn additional_signed(&self) -> Result { - Ok(()) - } - - fn pre_dispatch( - self, - who: &Self::AccountId, - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result { - self.validate(who, call, info, len).map(|_| ()) - } - - fn validate( - &self, - _who: &Self::AccountId, - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EvmMigration(_) - | Call::EVM(_) - | Call::Ethereum(_) - | Call::Inflation(_) - | Call::Maintenance(_) - | Call::Scheduler(_) - | Call::Structure(_) - | Call::Unique(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - - #[cfg(any(feature = "opal-runtime", feature = "quartz-runtime"))] - Call::RmrkCore(_) | Call::RmrkEquip(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } - - fn pre_dispatch_unsigned( - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result<(), TransactionValidityError> { - Self::validate_unsigned(call, info, len).map(|_| ()) - } - - fn validate_unsigned( - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EVM(_) | Call::Ethereum(_) | Call::EvmMigration(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } -} - -construct_runtime!( - pub enum Runtime where - Block = Block, - NodeBlock = opaque::Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - ParachainSystem: cumulus_pallet_parachain_system::{Pallet, Call, Config, Storage, Inherent, Event, ValidateUnsigned} = 20, - ParachainInfo: parachain_info::{Pallet, Storage, Config} = 21, - - Aura: pallet_aura::{Pallet, Config} = 22, - AuraExt: cumulus_pallet_aura_ext::{Pallet, Config} = 23, - - Balances: pallet_balances::{Pallet, Call, Storage, Config, Event} = 30, - RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Storage} = 31, - Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent} = 32, - TransactionPayment: pallet_transaction_payment::{Pallet, Storage} = 33, - Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event} = 34, - Sudo: pallet_sudo::{Pallet, Call, Storage, Config, Event} = 35, - System: frame_system::{Pallet, Call, Storage, Config, Event} = 36, - Vesting: orml_vesting::{Pallet, Storage, Call, Event, Config} = 37, - // Vesting: pallet_vesting::{Pallet, Call, Config, Storage, Event} = 37, - // Contracts: pallet_contracts::{Pallet, Call, Storage, Event} = 38, - - // XCM helpers. - XcmpQueue: cumulus_pallet_xcmp_queue::{Pallet, Call, Storage, Event} = 50, - PolkadotXcm: pallet_xcm::{Pallet, Call, Event, Origin} = 51, - CumulusXcm: cumulus_pallet_xcm::{Pallet, Call, Event, Origin} = 52, - DmpQueue: cumulus_pallet_dmp_queue::{Pallet, Call, Storage, Event} = 53, - - // Unique Pallets - Inflation: pallet_inflation::{Pallet, Call, Storage} = 60, - Unique: pallet_unique::{Pallet, Call, Storage, Event} = 61, - Scheduler: pallet_unique_scheduler::{Pallet, Call, Storage, Event} = 62, - // free = 63 - Charging: pallet_charge_transaction::{Pallet, Call, Storage } = 64, - // ContractHelpers: pallet_contract_helpers::{Pallet, Call, Storage} = 65, - Common: pallet_common::{Pallet, Storage, Event} = 66, - Fungible: pallet_fungible::{Pallet, Storage} = 67, - Refungible: pallet_refungible::{Pallet, Storage} = 68, - Nonfungible: pallet_nonfungible::{Pallet, Storage} = 69, - Structure: pallet_structure::{Pallet, Call, Storage, Event} = 70, - RmrkCore: pallet_proxy_rmrk_core::{Pallet, Call, Storage, Event} = 71, - RmrkEquip: pallet_proxy_rmrk_equip::{Pallet, Call, Storage, Event} = 72, - - // Frontier - EVM: pallet_evm::{Pallet, Config, Call, Storage, Event} = 100, - Ethereum: pallet_ethereum::{Pallet, Config, Call, Storage, Event, Origin} = 101, - - EvmCoderSubstrate: pallet_evm_coder_substrate::{Pallet, Storage} = 150, - EvmContractHelpers: pallet_evm_contract_helpers::{Pallet, Storage} = 151, - EvmTransactionPayment: pallet_evm_transaction_payment::{Pallet} = 152, - EvmMigration: pallet_evm_migration::{Pallet, Call, Storage} = 153, - - Maintenance: pallet_maintenance::{Pallet, Call, Storage, Event} = 154, - } -); - -pub struct TransactionConverter; - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction(&self, transaction: pallet_ethereum::Transaction) -> UncheckedExtrinsic { - UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ) - } -} - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction( - &self, - transaction: pallet_ethereum::Transaction, - ) -> opaque::UncheckedExtrinsic { - let extrinsic = UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ); - let encoded = extrinsic.encode(); - opaque::UncheckedExtrinsic::decode(&mut &encoded[..]) - .expect("Encoded extrinsic is always valid") - } -} - -/// The address format for describing accounts. -pub type Address = sp_runtime::MultiAddress; -/// Block header type as expected by this runtime. -pub type Header = generic::Header; -/// Block type as expected by this runtime. -pub type Block = generic::Block; -/// A Block signed with a Justification -pub type SignedBlock = generic::SignedBlock; -/// BlockId type as expected by this runtime. -pub type BlockId = generic::BlockId; -/// The SignedExtension to the basic transaction logic. -pub type SignedExtra = ( - frame_system::CheckSpecVersion, - // system::CheckTxVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, - ChargeTransactionPayment, - //pallet_contract_helpers::ContractHelpersExtension, - pallet_ethereum::FakeTransactionFinalizer, -); - -pub type SignedExtraScheduler = ( - frame_system::CheckSpecVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, - // pallet_charge_transaction::ChargeTransactionPayment, -); -/// Unchecked extrinsic type as expected by this runtime. -pub type UncheckedExtrinsic = - fp_self_contained::UncheckedExtrinsic; -/// Extrinsic type that has already been checked. -pub type CheckedExtrinsic = fp_self_contained::CheckedExtrinsic; -/// Executive: handles dispatch to the various modules. -pub type Executive = frame_executive::Executive< - Runtime, - Block, - frame_system::ChainContext, - Runtime, - AllPalletsReversedWithSystemFirst, ->; - -impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } -} - -impl fp_self_contained::SelfContainedCall for Call { - type SignedInfo = H160; - - fn is_self_contained(&self) -> bool { - match self { - Call::Ethereum(call) => call.is_self_contained(), - _ => false, - } - } - - fn check_self_contained(&self) -> Option> { - match self { - Call::Ethereum(call) => call.check_self_contained(), - _ => None, - } - } - - fn validate_self_contained( - &self, - info: &Self::SignedInfo, - dispatch_info: &DispatchInfoOf, - len: usize, - ) -> Option { - match self { - Call::Ethereum(call) => call.validate_self_contained(info, dispatch_info, len), - _ => None, - } - } - - fn pre_dispatch_self_contained( - &self, - info: &Self::SignedInfo, - ) -> Option> { - match self { - Call::Ethereum(call) => call.pre_dispatch_self_contained(info), - _ => None, - } - } - - fn apply_self_contained( - self, - info: Self::SignedInfo, - ) -> Option>> { - match self { - call @ Call::Ethereum(pallet_ethereum::Call::transact { .. }) => Some(call.dispatch( - Origin::from(pallet_ethereum::RawOrigin::EthereumTransaction(info)), - )), - _ => None, - } - } -} - -macro_rules! dispatch_unique_runtime { - ($collection:ident.$method:ident($($name:ident),*)) => {{ - let collection = ::CollectionDispatch::dispatch(>::try_get($collection)?); - let dispatch = collection.as_dyn(); - - Ok::<_, DispatchError>(dispatch.$method($($name),*)) - }}; -} - -impl_common_runtime_apis! { - #![custom_apis] - - impl rmrk_rpc::RmrkApi< - Block, - AccountId, - RmrkCollectionInfo, - RmrkInstanceInfo, - RmrkResourceInfo, - RmrkPropertyInfo, - RmrkBaseInfo, - RmrkPartType, - RmrkTheme - > for Runtime { - fn last_collection_idx() -> Result { - pallet_proxy_rmrk_core::rpc::last_collection_idx::() - } - - fn collection_by_id(collection_id: RmrkCollectionId) -> Result>, DispatchError> { - pallet_proxy_rmrk_core::rpc::collection_by_id::(collection_id) - } - - fn nft_by_id(collection_id: RmrkCollectionId, nft_by_id: RmrkNftId) -> Result>, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_by_id::(collection_id, nft_by_id) - } - - fn account_tokens(account_id: AccountId, collection_id: RmrkCollectionId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::account_tokens::(account_id, collection_id) - } - - fn nft_children(collection_id: RmrkCollectionId, nft_id: RmrkNftId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_children::(collection_id, nft_id) - } - - fn collection_properties(collection_id: RmrkCollectionId, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::collection_properties::(collection_id, filter_keys) - } - - fn nft_properties(collection_id: RmrkCollectionId, nft_id: RmrkNftId, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_properties::(collection_id, nft_id, filter_keys) - } - - fn nft_resources(collection_id: RmrkCollectionId, nft_id: RmrkNftId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_resources::(collection_id, nft_id) - } - - fn nft_resource_priority(collection_id: RmrkCollectionId, nft_id: RmrkNftId, resource_id: RmrkResourceId) -> Result, DispatchError> { - pallet_proxy_rmrk_core::rpc::nft_resource_priority::(collection_id, nft_id, resource_id) - } - - fn base(base_id: RmrkBaseId) -> Result>, DispatchError> { - pallet_proxy_rmrk_equip::rpc::base::(base_id) - } - - fn base_parts(base_id: RmrkBaseId) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::base_parts::(base_id) - } - - fn theme_names(base_id: RmrkBaseId) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::theme_names::(base_id) - } - - fn theme(base_id: RmrkBaseId, theme_name: RmrkThemeName, filter_keys: Option>) -> Result, DispatchError> { - pallet_proxy_rmrk_equip::rpc::theme::(base_id, theme_name, filter_keys) - } - } -} - -struct CheckInherents; - -impl cumulus_pallet_parachain_system::CheckInherents for CheckInherents { - fn check_inherents( - block: &Block, - relay_state_proof: &cumulus_pallet_parachain_system::RelayChainStateProof, - ) -> sp_inherents::CheckInherentsResult { - let relay_chain_slot = relay_state_proof - .read_slot() - .expect("Could not read the relay chain slot from the proof"); - - let inherent_data = - cumulus_primitives_timestamp::InherentDataProvider::from_relay_chain_slot_and_duration( - relay_chain_slot, - sp_std::time::Duration::from_secs(6), - ) - .create_inherent_data() - .expect("Could not create the timestamp inherent data"); - - inherent_data.check_extrinsics(block) - } -} +impl_common_runtime_apis!(); cumulus_pallet_parachain_system::register_validate_block!( Runtime = Runtime, diff --git a/runtime/quartz/src/tests/logcapture.rs b/runtime/quartz/src/tests/logcapture.rs new file mode 100644 index 0000000000..97cecd289b --- /dev/null +++ b/runtime/quartz/src/tests/logcapture.rs @@ -0,0 +1,25 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use super::xcm::quartz_xcm_tests; + +#[test] +fn quartz_log_capture_tests() { + let mut logger = Logger::start(); + + quartz_xcm_tests(&mut logger); +} diff --git a/runtime/quartz/src/tests/mod.rs b/runtime/quartz/src/tests/mod.rs new file mode 100644 index 0000000000..587524b3c9 --- /dev/null +++ b/runtime/quartz/src/tests/mod.rs @@ -0,0 +1,18 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +mod logcapture; +mod xcm; diff --git a/runtime/quartz/src/tests/xcm.rs b/runtime/quartz/src/tests/xcm.rs new file mode 100644 index 0000000000..1244b03796 --- /dev/null +++ b/runtime/quartz/src/tests/xcm.rs @@ -0,0 +1,27 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use crate::{runtime_common::tests::xcm::*, xcm_barrier::Barrier}; + +const QUARTZ_PARA_ID: u32 = 2095; + +pub fn quartz_xcm_tests(logger: &mut Logger) { + barrier_denies_transact::(logger); + + barrier_denies_transfer_from_unknown_location::(logger, QUARTZ_PARA_ID) + .expect("quartz runtime denies an unknown location"); +} diff --git a/runtime/quartz/src/xcm_barrier.rs b/runtime/quartz/src/xcm_barrier.rs new file mode 100644 index 0000000000..3ecfc254a4 --- /dev/null +++ b/runtime/quartz/src/xcm_barrier.rs @@ -0,0 +1,83 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + match_types, parameter_types, + traits::{Get, Everything}, +}; +use sp_std::{vec, vec::Vec}; +use xcm::v1::{Junction::*, Junctions::*, MultiLocation}; +use xcm_builder::{ + AllowKnownQueryResponses, AllowSubscriptionsFrom, TakeWeightCredit, + AllowTopLevelPaidExecutionFrom, +}; + +use crate::{ + ParachainInfo, PolkadotXcm, + runtime_common::config::xcm::{DenyThenTry, DenyTransact, DenyExchangeWithUnknownLocation}, +}; + +match_types! { + pub type ParentOrSiblings: impl Contains = { + MultiLocation { parents: 1, interior: Here } | + MultiLocation { parents: 1, interior: X1(_) } + }; +} + +parameter_types! { + pub QuartzAllowedLocations: Vec = vec![ + // Self location + MultiLocation { + parents: 0, + interior: Here, + }, + // Parent location + MultiLocation { + parents: 1, + interior: Here, + }, + // Karura/Acala location + MultiLocation { + parents: 1, + interior: X1(Parachain(2000)), + }, + // Moonriver location + MultiLocation { + parents: 1, + interior: X1(Parachain(2023)), + }, + // Self parachain address + MultiLocation { + parents: 1, + interior: X1(Parachain(ParachainInfo::get().into())), + }, + ]; +} + +pub type Barrier = DenyThenTry< + ( + DenyTransact, + DenyExchangeWithUnknownLocation, + ), + ( + TakeWeightCredit, + AllowTopLevelPaidExecutionFrom, + // Expected responses are OK. + AllowKnownQueryResponses, + // Subscriptions for version tracking are OK. + AllowSubscriptionsFrom, + ), +>; diff --git a/runtime/tests/CHANGELOG.md b/runtime/tests/CHANGELOG.md new file mode 100644 index 0000000000..08f2af5bc9 --- /dev/null +++ b/runtime/tests/CHANGELOG.md @@ -0,0 +1,18 @@ + +## [v0.1.1] 2022-08-16 + +### Bugfixes + +- After conflicts b66d0b01b670bbbdf05431c6636a0300907f0ec8 + +- Add missing config keys 74f532ac28dce15c15e7d576c074a58eba658c08 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade test runtime bdd7247a3eaa6f6d458f4320a8bbee98770da2b3 + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b \ No newline at end of file diff --git a/runtime/tests/Cargo.toml b/runtime/tests/Cargo.toml index 6a70aabe1f..3497f9b392 100644 --- a/runtime/tests/Cargo.toml +++ b/runtime/tests/Cargo.toml @@ -1,28 +1,32 @@ [package] name = "tests" -version = "0.1.0" +version = "0.1.1" edition = "2021" +[features] +default = ['refungible'] + +refungible = [] + [dependencies] -unique-runtime-common = { path = '../common' } up-data-structs = { default-features = false, path = '../../primitives/data-structs' } -sp-core = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-std = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-io = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -sp-runtime = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } -frame-support = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -frame-system = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } -pallet-balances = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -pallet-transaction-payment = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } -pallet-timestamp = { git = 'https://github.com/paritytech/substrate', branch = 'polkadot-v0.9.24' } +pallet-balances = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-transaction-payment = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } +pallet-timestamp = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.30" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } pallet-common = { path = '../../pallets/common' } pallet-structure = { path = '../../pallets/structure' } @@ -37,3 +41,6 @@ parity-scale-codec = { version = "3.1.2", default-features = false, features = [ "derive", ] } scale-info = "*" + +evm-coder = { default-features = false, path = '../../crates/evm-coder' } +up-sponsorship = { default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30" } diff --git a/runtime/tests/src/lib.rs b/runtime/tests/src/lib.rs index 2a993e88c0..5c673517bf 100644 --- a/runtime/tests/src/lib.rs +++ b/runtime/tests/src/lib.rs @@ -35,9 +35,18 @@ use fp_evm_mapping::EvmBackwardsAddressMapping; use parity_scale_codec::{Encode, Decode, MaxEncodedLen}; use scale_info::TypeInfo; -use unique_runtime_common::{dispatch::CollectionDispatchT, weights::CommonWeights}; use up_data_structs::mapping::{CrossTokenAddressMapping, EvmTokenAddressMapping}; +#[path = "../../common/dispatch.rs"] +mod dispatch; + +use dispatch::CollectionDispatchT; + +#[path = "../../common/weights.rs"] +mod weights; + +use weights::CommonWeights; + type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -51,13 +60,16 @@ frame_support::construct_runtime!( NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Unique: pallet_unique::{Pallet, Call, Storage}, - Balances: pallet_balances::{Pallet, Call, Storage}, + System: frame_system, + Unique: pallet_unique::{Pallet, Call, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Storage, Event}, Common: pallet_common::{Pallet, Storage, Event}, Fungible: pallet_fungible::{Pallet, Storage}, Refungible: pallet_refungible::{Pallet, Storage}, Nonfungible: pallet_nonfungible::{Pallet, Storage}, + Structure: pallet_structure::{Pallet, Storage, Event}, + TransactionPayment: pallet_transaction_payment::{Pallet, Storage, Event}, + Ethereum: pallet_ethereum::{Pallet, Config, Call, Storage, Event, Origin}, EVM: pallet_evm::{Pallet, Config, Call, Storage, Event}, } ); @@ -68,12 +80,13 @@ parameter_types! { } impl system::Config for Test { + type RuntimeEvent = RuntimeEvent; type BaseCallFilter = Everything; type BlockWeights = (); type BlockLength = (); type DbWeight = (); - type Origin = Origin; - type Call = Call; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeCall = RuntimeCall; type Index = u64; type BlockNumber = u64; type Hash = H256; @@ -81,7 +94,6 @@ impl system::Config for Test { type AccountId = u64; type Lookup = IdentityLookup; type Header = Header; - type Event = (); type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; @@ -100,10 +112,10 @@ parameter_types! { } //frame_system::Module; impl pallet_balances::Config for Test { + type RuntimeEvent = RuntimeEvent; type AccountStore = System; type Balance = u64; type DustRemoval = (); - type Event = (); type ExistentialDeposit = ExistentialDeposit; type WeightInfo = (); type MaxLocks = MaxLocks; @@ -116,6 +128,7 @@ parameter_types! { } impl pallet_transaction_payment::Config for Test { + type RuntimeEvent = RuntimeEvent; type OnChargeTransaction = CurrencyAdapter, ()>; type LengthToFee = IdentityFee; type WeightToFee = IdentityFee; @@ -154,7 +167,7 @@ impl EvmBackwardsAddressMapping for TestEvmBackwardsAddressMapping { } #[derive(Encode, Decode, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, TypeInfo, MaxEncodedLen)] -pub struct TestCrossAccountId(u64, sp_core::H160); +pub struct TestCrossAccountId(u64, sp_core::H160, bool); impl CrossAccountId for TestCrossAccountId { fn as_sub(&self) -> &u64 { &self.0 @@ -165,17 +178,20 @@ impl CrossAccountId for TestCrossAccountId { fn from_sub(sub: u64) -> Self { let mut eth = [0; 20]; eth[12..20].copy_from_slice(&sub.to_be_bytes()); - Self(sub, sp_core::H160(eth)) + Self(sub, sp_core::H160(eth), true) } fn from_eth(eth: sp_core::H160) -> Self { let mut sub_raw = [0; 8]; sub_raw.copy_from_slice(ð.0[0..8]); let sub = u64::from_be_bytes(sub_raw); - Self(sub, eth) + Self(sub, eth, false) } fn conv_eq(&self, other: &Self) -> bool { self.as_sub() == other.as_sub() } + fn is_canonical_substrate(&self) -> bool { + self.2 + } } impl Default for TestCrossAccountId { @@ -188,8 +204,13 @@ parameter_types! { pub BlockGasLimit: U256 = 0u32.into(); } +impl pallet_ethereum::Config for Test { + type RuntimeEvent = RuntimeEvent; + type StateRoot = pallet_ethereum::IntermediateStateRoot; +} + impl pallet_evm::Config for Test { - type Event = (); + type RuntimeEvent = RuntimeEvent; type FeeCalculator = (); type GasWeightMapping = (); type CallOrigin = EnsureAddressNever; @@ -212,7 +233,7 @@ impl pallet_evm_coder_substrate::Config for Test {} impl pallet_common::Config for Test { type WeightInfo = (); - type Event = (); + type RuntimeEvent = RuntimeEvent; type Currency = Balances; type CollectionCreationPrice = CollectionCreationPrice; type TreasuryAccountId = TreasuryAccountId; @@ -231,8 +252,8 @@ impl pallet_evm::account::Config for Test { impl pallet_structure::Config for Test { type WeightInfo = (); - type Event = (); - type Call = Call; + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; } impl pallet_fungible::Config for Test { type WeightInfo = (); @@ -252,7 +273,7 @@ parameter_types! { } impl pallet_unique::Config for Test { - type Event = (); + type RuntimeEvent = RuntimeEvent; type WeightInfo = (); type CommonWeightInfo = CommonWeights; type RefungibleExtensionsWeightInfo = CommonWeights; diff --git a/runtime/tests/src/tests.rs b/runtime/tests/src/tests.rs index 2ff49b9025..c064a7d2f6 100644 --- a/runtime/tests/src/tests.rs +++ b/runtime/tests/src/tests.rs @@ -15,7 +15,7 @@ // along with Unique Network. If not, see . // Tests to be written here -use crate::{Test, TestCrossAccountId, CollectionCreationPrice, Origin, Unique, new_test_ext}; +use crate::{Test, TestCrossAccountId, CollectionCreationPrice, RuntimeOrigin, Unique, new_test_ext}; use up_data_structs::{ COLLECTION_NUMBER_LIMIT, CollectionId, CreateItemData, CreateFungibleData, CreateNftData, CreateReFungibleData, MAX_DECIMAL_POINTS, COLLECTION_ADMINS_LIMIT, TokenId, @@ -32,13 +32,13 @@ use pallet_unique::Error as UniqueError; fn add_balance(user: u64, value: u64) { const DONOR_USER: u64 = 999; assert_ok!(>::set_balance( - Origin::root(), + RuntimeOrigin::root(), DONOR_USER, value, 0 )); assert_ok!(>::force_transfer( - Origin::root(), + RuntimeOrigin::root(), DONOR_USER, user, value @@ -62,8 +62,13 @@ fn default_fungible_data() -> CreateFungibleData { fn default_re_fungible_data() -> CreateReFungibleData { CreateReFungibleData { - const_data: vec![1, 2, 3].try_into().unwrap(), pieces: 1023, + properties: vec![Property { + key: b"test-prop".to_vec().try_into().unwrap(), + value: b"test-nft-prop".to_vec().try_into().unwrap(), + }] + .try_into() + .unwrap(), } } @@ -105,7 +110,7 @@ fn create_test_collection_for_owner( ..Default::default() }; - let origin1 = Origin::signed(owner); + let origin1 = RuntimeOrigin::signed(owner); assert_ok!(Unique::create_collection_ex(origin1, data)); let saved_col_name: Vec = "Test1\0".encode_utf16().collect::>(); @@ -174,7 +179,7 @@ fn create_test_collection(mode: &CollectionMode, id: CollectionId) -> Collection } fn create_test_item(collection_id: CollectionId, data: &CreateItemData) { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::create_item( origin1, collection_id, @@ -194,7 +199,7 @@ fn account(sub: u64) -> TestCrossAccountId { fn check_not_sufficient_founds() { new_test_ext().execute_with(|| { let acc: u64 = 1; - >::set_balance(Origin::root(), acc, 0, 0).unwrap(); + >::set_balance(RuntimeOrigin::root(), acc, 0, 0).unwrap(); let name: Vec = "Test1\0".encode_utf16().collect::>(); let description: Vec = "TestDescription1\0".encode_utf16().collect::>(); @@ -209,7 +214,7 @@ fn check_not_sufficient_founds() { ..Default::default() }; - let result = Unique::create_collection_ex(Origin::signed(acc), data); + let result = Unique::create_collection_ex(RuntimeOrigin::signed(acc), data); assert_err!(result, >::NotSufficientFounds); }); } @@ -229,7 +234,7 @@ fn create_fungible_collection_fails_with_large_decimal_numbers() { ..Default::default() }; - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_noop!( Unique::create_collection_ex(origin1, data), UniqueError::::CollectionDecimalPointLimitExceeded @@ -259,7 +264,7 @@ fn create_nft_multiple_items() { new_test_ext().execute_with(|| { create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let items_data = vec![default_nft_data(), default_nft_data(), default_nft_data()]; @@ -292,7 +297,6 @@ fn create_refungible_item() { let item = >::get((collection_id, TokenId(1))); let balance = >::get((collection_id, TokenId(1), account(1))); - assert_eq!(item.const_data, data.const_data.into_inner()); assert_eq!(balance, 1023); }); } @@ -302,7 +306,7 @@ fn create_multiple_refungible_items() { new_test_ext().execute_with(|| { create_test_collection(&CollectionMode::ReFungible, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let items_data = vec![ default_re_fungible_data(), @@ -327,7 +331,6 @@ fn create_multiple_refungible_items() { )); let balance = >::get((CollectionId(1), TokenId(1), account(1))); - assert_eq!(item.const_data.to_vec(), data.const_data.into_inner()); assert_eq!(balance, 1023); } }); @@ -355,7 +358,7 @@ fn create_fungible_item() { // create_test_collection(&CollectionMode::Fungible(3), CollectionId(1)); -// let origin1 = Origin::signed(1); +// let origin1 = RuntimeOrigin::signed(1); // let items_data = vec![default_fungible_data(), default_fungible_data(), default_fungible_data()]; @@ -379,8 +382,8 @@ fn transfer_fungible_item() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::Fungible(3), CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); let data = default_fungible_data(); create_test_item(collection_id, &data.into()); @@ -440,7 +443,6 @@ fn transfer_refungible_item() { let data = default_re_fungible_data(); create_test_item(collection_id, &data.clone().into()); let item = >::get((collection_id, TokenId(1))); - assert_eq!(item.const_data, data.const_data.into_inner()); assert_eq!( >::get((collection_id, account(1))), 1 @@ -455,8 +457,8 @@ fn transfer_refungible_item() { ); // Account 1 transfers all 1023 pieces of RFT 1 to account 2 - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::transfer( origin1, account(2), @@ -569,7 +571,7 @@ fn transfer_nft_item() { true ); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // default scenario assert_ok!(Unique::transfer( origin1, @@ -613,7 +615,7 @@ fn transfer_nft_item_wrong_value() { true ); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_noop!( Unique::transfer(origin1, account(2), CollectionId(1), TokenId(1), 2) @@ -639,7 +641,7 @@ fn transfer_nft_item_zero_value() { true ); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // Transferring 0 amount works on NFT... assert_ok!(Unique::transfer( @@ -669,8 +671,8 @@ fn nft_approve_and_transfer_from() { let data = default_nft_data(); create_test_item(collection_id, &data.into()); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_eq!( >::get((collection_id, account(1))), @@ -727,8 +729,8 @@ fn nft_approve_and_transfer_from_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); // Create NFT 1 for account 1 let data = default_nft_data(); @@ -801,8 +803,8 @@ fn refungible_approve_and_transfer_from() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::ReFungible, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); // Create RFT 1 in 1023 pieces for account 1 let data = default_re_fungible_data(); @@ -918,8 +920,8 @@ fn fungible_approve_and_transfer_from() { let data = default_fungible_data(); create_test_item(collection_id, &data.into()); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -1008,7 +1010,7 @@ fn change_collection_owner() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::change_collection_owner(origin1, collection_id, 2)); assert_eq!( >::get(collection_id) @@ -1024,7 +1026,7 @@ fn destroy_collection() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::destroy_collection(origin1, collection_id)); }); } @@ -1034,7 +1036,7 @@ fn burn_nft_item() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1064,7 +1066,7 @@ fn burn_same_nft_item_twice() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1101,7 +1103,7 @@ fn burn_fungible_item() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::Fungible(3), CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_collection_admin( origin1.clone(), collection_id, @@ -1141,7 +1143,7 @@ fn burn_fungible_item_with_token_id() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::Fungible(3), CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_collection_admin( origin1.clone(), collection_id, @@ -1168,7 +1170,7 @@ fn burn_fungible_item_with_token_id() { fn burn_refungible_item() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::ReFungible, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -1228,7 +1230,7 @@ fn add_collection_admin() { new_test_ext().execute_with(|| { let collection1_id = create_test_collection_for_owner(&CollectionMode::NFT, 1, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // Add collection admins assert_ok!(Unique::add_collection_admin( @@ -1263,7 +1265,7 @@ fn remove_collection_admin() { new_test_ext().execute_with(|| { let collection1_id = create_test_collection_for_owner(&CollectionMode::NFT, 1, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // Add collection admins 2 and 3 assert_ok!(Unique::add_collection_admin( @@ -1374,7 +1376,7 @@ fn approve() { let data = default_nft_data(); create_test_item(collection_id, &data.into()); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // approve assert_ok!(Unique::approve( @@ -1395,8 +1397,8 @@ fn approve() { fn transfer_from() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1470,7 +1472,7 @@ fn owner_can_add_address_to_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_to_allow_list( origin1, collection_id, @@ -1487,8 +1489,8 @@ fn owner_can_add_address_to_allow_list() { fn admin_can_add_address_to_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::add_collection_admin( origin1, @@ -1512,7 +1514,7 @@ fn nonprivileged_user_cannot_add_address_to_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin2 = Origin::signed(2); + let origin2 = RuntimeOrigin::signed(2); assert_noop!( Unique::add_to_allow_list(origin2, collection_id, account(3)), CommonError::::NoPermission @@ -1523,7 +1525,7 @@ fn nonprivileged_user_cannot_add_address_to_allow_list() { #[test] fn nobody_can_add_address_to_allow_list_of_nonexisting_collection() { new_test_ext().execute_with(|| { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_noop!( Unique::add_to_allow_list(origin1, CollectionId(1), account(2)), @@ -1537,7 +1539,7 @@ fn nobody_can_add_address_to_allow_list_of_deleted_collection() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::destroy_collection(origin1.clone(), collection_id)); assert_noop!( Unique::add_to_allow_list(origin1, collection_id, account(2)), @@ -1551,7 +1553,7 @@ fn nobody_can_add_address_to_allow_list_of_deleted_collection() { fn address_is_already_added_to_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_to_allow_list( origin1.clone(), @@ -1575,7 +1577,7 @@ fn owner_can_remove_address_from_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_to_allow_list( origin1.clone(), collection_id, @@ -1597,8 +1599,8 @@ fn owner_can_remove_address_from_allow_list() { fn admin_can_remove_address_from_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); // Owner adds admin assert_ok!(Unique::add_collection_admin( @@ -1631,8 +1633,8 @@ fn admin_can_remove_address_from_allow_list() { fn nonprivileged_user_cannot_remove_address_from_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::add_to_allow_list( origin1, @@ -1653,7 +1655,7 @@ fn nonprivileged_user_cannot_remove_address_from_allow_list() { #[test] fn nobody_can_remove_address_from_allow_list_of_nonexisting_collection() { new_test_ext().execute_with(|| { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_noop!( Unique::remove_from_allow_list(origin1, CollectionId(1), account(2)), @@ -1666,8 +1668,8 @@ fn nobody_can_remove_address_from_allow_list_of_nonexisting_collection() { fn nobody_can_remove_address_from_allow_list_of_deleted_collection() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); // Add account 2 to allow list assert_ok!(Unique::add_to_allow_list( @@ -1704,7 +1706,7 @@ fn nobody_can_remove_address_from_allow_list_of_deleted_collection() { fn address_is_already_removed_from_allow_list() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_to_allow_list( origin1.clone(), @@ -1738,7 +1740,7 @@ fn allow_list_test_1() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1770,7 +1772,7 @@ fn allow_list_test_1() { fn allow_list_test_2() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1835,7 +1837,7 @@ fn allow_list_test_3() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1868,7 +1870,7 @@ fn allow_list_test_4() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1933,7 +1935,7 @@ fn allow_list_test_5() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1960,7 +1962,7 @@ fn allow_list_test_6() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let data = default_nft_data(); create_test_item(collection_id, &data.into()); @@ -1994,7 +1996,7 @@ fn allow_list_test_7() { let data = default_nft_data(); create_test_item(collection_id, &data.into()); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2035,7 +2037,7 @@ fn allow_list_test_8() { let data = default_nft_data(); create_test_item(collection_id, &data.into()); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // Toggle Allow List mode and add accounts 1 and 2 assert_ok!(Unique::set_collection_permissions( @@ -2088,7 +2090,7 @@ fn allow_list_test_8() { fn allow_list_test_9() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2111,8 +2113,8 @@ fn allow_list_test_10() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2145,8 +2147,8 @@ fn allow_list_test_11() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2182,8 +2184,8 @@ fn allow_list_test_12() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2214,7 +2216,7 @@ fn allow_list_test_13() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2237,8 +2239,8 @@ fn allow_list_test_14() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2271,8 +2273,8 @@ fn allow_list_test_15() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2303,8 +2305,8 @@ fn allow_list_test_16() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); - let origin2 = Origin::signed(2); + let origin1 = RuntimeOrigin::signed(1); + let origin2 = RuntimeOrigin::signed(2); assert_ok!(Unique::set_collection_permissions( origin1.clone(), @@ -2351,7 +2353,7 @@ fn create_max_collections() { #[test] fn total_number_collections_bound_neg() { new_test_ext().execute_with(|| { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); for i in 1..=COLLECTION_NUMBER_LIMIT { create_test_collection(&CollectionMode::NFT, CollectionId(i)); @@ -2395,7 +2397,7 @@ fn owned_tokens_bound_neg() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); for _ in 1..=MAX_TOKEN_OWNERSHIP { let data = default_nft_data(); @@ -2417,7 +2419,7 @@ fn collection_admins_bound() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); assert_ok!(Unique::add_collection_admin( origin1.clone(), @@ -2438,7 +2440,7 @@ fn collection_admins_bound_neg() { new_test_ext().execute_with(|| { let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); for i in 0..COLLECTION_ADMINS_LIMIT { assert_ok!(Unique::add_collection_admin( @@ -2462,7 +2464,7 @@ fn collection_admins_bound_neg() { #[test] fn collection_transfer_flag_works() { new_test_ext().execute_with(|| { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); assert_ok!(Unique::set_transfers_enabled_flag( @@ -2482,7 +2484,7 @@ fn collection_transfer_flag_works() { true ); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // default scenario assert_ok!(Unique::transfer( @@ -2514,7 +2516,7 @@ fn collection_transfer_flag_works() { #[test] fn collection_transfer_flag_works_neg() { new_test_ext().execute_with(|| { - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); let collection_id = create_test_collection(&CollectionMode::NFT, CollectionId(1)); assert_ok!(Unique::set_transfers_enabled_flag( @@ -2534,7 +2536,7 @@ fn collection_transfer_flag_works_neg() { true ); - let origin1 = Origin::signed(1); + let origin1 = RuntimeOrigin::signed(1); // default scenario assert_noop!( @@ -2567,8 +2569,8 @@ fn collection_sponsoring() { // default_limits(); let user1 = 1_u64; let user2 = 777_u64; - let origin1 = Origin::signed(user1); - let origin2 = Origin::signed(user2); + let origin1 = RuntimeOrigin::signed(user1); + let origin2 = RuntimeOrigin::signed(user2); let account2 = account(user2); let collection_id = diff --git a/runtime/unique/CHANGELOG.md b/runtime/unique/CHANGELOG.md new file mode 100644 index 0000000000..f28cd71f83 --- /dev/null +++ b/runtime/unique/CHANGELOG.md @@ -0,0 +1,25 @@ +# Change Log + +All notable changes to this project will be documented in this file. + + + +## [v0.9.27] 2022-09-08 + +### Added + +- `AppPromotion` pallet to runtime. + +## [v0.9.27] 2022-08-16 + +### Bugfixes + +- Add missing config keys 74f532ac28dce15c15e7d576c074a58eba658c08 + +### Other changes + +- build: Upgrade polkadot to v0.9.27 2c498572636f2b34d53b1c51b7283a761a7dc90a + +- build: Upgrade polkadot to v0.9.26 85515e54c4ca1b82a2630034e55dcc804c643bf8 + +- build: Upgrade polkadot to v0.9.25 cdfb9bdc7b205ff1b5134f034ef9973d769e5e6b diff --git a/runtime/unique/Cargo.toml b/runtime/unique/Cargo.toml index bef2cb771b..2c63a3ac42 100644 --- a/runtime/unique/Cargo.toml +++ b/runtime/unique/Cargo.toml @@ -1,6 +1,8 @@ ################################################################################ # Package +cargo-features = ["workspace-inheritance"] + [package] authors = ['Unique Network '] build = 'build.rs' @@ -10,7 +12,7 @@ homepage = 'https://unique.network' license = 'GPLv3' name = 'unique-runtime' repository = 'https://github.com/UniqueNetwork/unique-chain' -version = '0.9.24' +version = '0.9.30' [package.metadata.docs.rs] targets = ['x86_64-unknown-linux-gnu'] @@ -36,6 +38,7 @@ runtime-benchmarks = [ 'pallet-proxy-rmrk-core/runtime-benchmarks', 'pallet-proxy-rmrk-equip/runtime-benchmarks', 'pallet-unique/runtime-benchmarks', + 'pallet-foreign-assets/runtime-benchmarks', 'pallet-inflation/runtime-benchmarks', 'pallet-unique-scheduler/runtime-benchmarks', 'pallet-xcm/runtime-benchmarks', @@ -47,7 +50,46 @@ runtime-benchmarks = [ try-runtime = [ 'frame-try-runtime', 'frame-executive/try-runtime', + 'frame-support/try-runtime', 'frame-system/try-runtime', + 'cumulus-pallet-parachain-system/try-runtime', + 'parachain-info/try-runtime', + 'pallet-aura/try-runtime', + 'cumulus-pallet-aura-ext/try-runtime', + 'pallet-balances/try-runtime', + 'pallet-randomness-collective-flip/try-runtime', + 'pallet-timestamp/try-runtime', + 'pallet-transaction-payment/try-runtime', + 'pallet-treasury/try-runtime', + 'pallet-sudo/try-runtime', + 'orml-vesting/try-runtime', + 'orml-xtokens/try-runtime', + 'orml-tokens/try-runtime', + 'cumulus-pallet-xcmp-queue/try-runtime', + 'pallet-xcm/try-runtime', + 'cumulus-pallet-xcm/try-runtime', + 'cumulus-pallet-dmp-queue/try-runtime', + 'pallet-inflation/try-runtime', + 'pallet-unique/try-runtime', + 'pallet-unique-scheduler/try-runtime', + 'pallet-configuration/try-runtime', + 'pallet-charge-transaction/try-runtime', + 'pallet-common/try-runtime', + 'pallet-fungible/try-runtime', + 'pallet-refungible/try-runtime', + 'pallet-nonfungible/try-runtime', + 'pallet-structure/try-runtime', + 'pallet-proxy-rmrk-core/try-runtime', + 'pallet-proxy-rmrk-equip/try-runtime', + 'pallet-app-promotion/try-runtime', + 'pallet-foreign-assets/try-runtime', + 'pallet-evm/try-runtime', + 'pallet-ethereum/try-runtime', + 'pallet-evm-coder-substrate/try-runtime', + 'pallet-evm-contract-helpers/try-runtime', + 'pallet-evm-transaction-payment/try-runtime', + 'pallet-evm-migration/try-runtime', + 'pallet-maintenance/try-runtime', ] std = [ 'codec/std', @@ -74,7 +116,6 @@ std = [ 'pallet-transaction-payment/std', 'pallet-transaction-payment-rpc-runtime-api/std', 'pallet-treasury/std', - # 'pallet-vesting/std', 'pallet-evm/std', 'pallet-evm-migration/std', 'pallet-evm-contract-helpers/std', @@ -84,11 +125,13 @@ std = [ 'pallet-base-fee/std', 'fp-rpc/std', 'up-rpc/std', + 'app-promotion-rpc/std', 'fp-evm-mapping/std', 'fp-self-contained/std', 'parachain-info/std', 'serde', 'pallet-inflation/std', + 'pallet-configuration/std', 'pallet-common/std', 'pallet-structure/std', 'pallet-fungible/std', @@ -115,15 +158,26 @@ std = [ 'xcm/std', 'xcm-builder/std', 'xcm-executor/std', - 'unique-runtime-common/std', + 'up-common/std', + 'rmrk-rpc/std', + 'evm-coder/std', + 'up-sponsorship/std', "orml-vesting/std", - - 'pallet-maintenance/std', + "orml-tokens/std", + "orml-xtokens/std", + "orml-traits/std", + "pallet-foreign-assets/std", + "pallet-maintenance/std", ] limit-testing = ['pallet-unique/limit-testing', 'up-data-structs/limit-testing'] unique-runtime = [] +refungible = [] +scheduler = [] +rmrk = [] +foreign-assets = [] + ################################################################################ # Substrate Dependencies @@ -137,39 +191,39 @@ version = '3.1.2' default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-try-runtime] default-features = false -git = 'https://github.com/paritytech/substrate' +git = "https://github.com/paritytech/substrate" optional = true -branch = 'polkadot-v0.9.24' +branch = "polkadot-v0.9.30" [dependencies.frame-executive] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-support] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-benchmarking] default-features = false git = "https://github.com/paritytech/substrate" optional = true -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.frame-system-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.hex-literal] optional = true @@ -184,12 +238,12 @@ version = '1.0.130' [dependencies.pallet-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-balances] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" # Contracts specific packages # [dependencies.pallet-contracts] @@ -213,102 +267,97 @@ branch = "polkadot-v0.9.24" [dependencies.pallet-randomness-collective-flip] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-sudo] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-timestamp] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-transaction-payment-rpc-runtime-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.pallet-treasury] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" - -# [dependencies.pallet-vesting] -# default-features = false -# git = 'https://github.com/paritytech/substrate' -# branch = 'master' +branch = "polkadot-v0.9.30" [dependencies.sp-arithmetic] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-api] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-block-builder] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-core] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-consensus-aura] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-inherents] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-io] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-offchain] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-runtime] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-session] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-std] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-transaction-pool] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.sp-version] default-features = false git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.smallvec] version = '1.6.1' @@ -319,46 +368,46 @@ version = '1.6.1' [dependencies.parachain-info] default-features = false git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" [dependencies.cumulus-pallet-aura-ext] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-parachain-system] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-core] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcm] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-dmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-pallet-xcmp-queue] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-utility] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false [dependencies.cumulus-primitives-timestamp] git = "https://github.com/paritytech/cumulus" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" default-features = false ################################################################################ @@ -366,50 +415,52 @@ default-features = false [dependencies.polkadot-parachain] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-builder] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.xcm-executor] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" +branch = "release-v0.9.30" default-features = false [dependencies.pallet-xcm] git = "https://github.com/paritytech/polkadot" -branch = "release-v0.9.24" -default-features = false - -[dependencies.orml-vesting] -git = "https://github.com/uniquenetwork/open-runtime-module-library" -branch = "unique-polkadot-v0.9.24" -version = "0.4.1-dev" +branch = "release-v0.9.30" default-features = false ################################################################################ # local dependencies [dependencies] +orml-vesting.workspace = true +orml-xtokens.workspace = true +orml-tokens.workspace = true +orml-traits.workspace = true + log = { version = "0.4.16", default-features = false } -unique-runtime-common = { path = "../common", default-features = false } +up-common = { path = "../../primitives/common", default-features = false } scale-info = { version = "2.0.1", default-features = false, features = [ "derive", ] } derivative = "2.2.0" pallet-unique = { path = '../../pallets/unique', default-features = false } up-rpc = { path = "../../primitives/rpc", default-features = false } +app-promotion-rpc = { path = "../../primitives/app_promotion_rpc", default-features = false } rmrk-rpc = { path = "../../primitives/rmrk-rpc", default-features = false } pallet-inflation = { path = '../../pallets/inflation', default-features = false } +pallet-app-promotion = { path = '../../pallets/app-promotion', default-features = false } up-data-structs = { path = '../../primitives/data-structs', default-features = false } +pallet-configuration = { default-features = false, path = "../../pallets/configuration" } pallet-common = { default-features = false, path = "../../pallets/common" } pallet-structure = { default-features = false, path = "../../pallets/structure" } pallet-fungible = { default-features = false, path = "../../pallets/fungible" } @@ -419,22 +470,36 @@ pallet-proxy-rmrk-core = { default-features = false, path = "../../pallets/proxy pallet-proxy-rmrk-equip = { default-features = false, path = "../../pallets/proxy-rmrk-equip", package = "pallet-rmrk-equip" } pallet-unique-scheduler = { path = '../../pallets/scheduler', default-features = false } # pallet-contract-helpers = { path = '../pallets/contract-helpers', default-features = false, version = '0.1.0' } -pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.24", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } +pallet-charge-transaction = { git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = "polkadot-v0.9.30", package = "pallet-template-transaction-payment", default-features = false, version = "3.0.0" } pallet-evm-migration = { path = '../../pallets/evm-migration', default-features = false } pallet-evm-contract-helpers = { path = '../../pallets/evm-contract-helpers', default-features = false } pallet-evm-transaction-payment = { path = '../../pallets/evm-transaction-payment', default-features = false } pallet-evm-coder-substrate = { default-features = false, path = "../../pallets/evm-coder-substrate" } -pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } -fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.24" } +pallet-evm = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-ethereum = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +pallet-base-fee = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-rpc = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-self-contained = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +fp-evm-mapping = { default-features = false, git = "https://github.com/uniquenetwork/frontier", branch = "unique-polkadot-v0.9.30" } +evm-coder = { default-features = false, path = '../../crates/evm-coder' } +up-sponsorship = { default-features = false, git = "https://github.com/uniquenetwork/pallet-sponsoring", branch = 'polkadot-v0.9.30' } +pallet-foreign-assets = { default-features = false, path = "../../pallets/foreign-assets" } pallet-maintenance = { default-features = false, path = "../../pallets/maintenance" } +################################################################################ +# Other Dependencies + +impl-trait-for-tuples = "0.2.2" + +################################################################################ +# Dev Dependencies + +[dev-dependencies.logtest] +version = "2.0.0" + ################################################################################ # Build Dependencies [build-dependencies.substrate-wasm-builder] git = "https://github.com/paritytech/substrate" -branch = "polkadot-v0.9.24" +branch = "polkadot-v0.9.30" diff --git a/runtime/unique/src/lib.rs b/runtime/unique/src/lib.rs index 31cc92c73f..161af85d65 100644 --- a/runtime/unique/src/lib.rs +++ b/runtime/unique/src/lib.rs @@ -25,1464 +25,47 @@ #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); -use sp_api::impl_runtime_apis; -use sp_core::{crypto::KeyTypeId, OpaqueMetadata, H256, U256, H160}; -use sp_runtime::DispatchError; -use fp_self_contained::*; -// #[cfg(any(feature = "std", test))] -// pub use sp_runtime::BuildStorage; +use frame_support::parameter_types; -use scale_info::TypeInfo; -use sp_runtime::{ - Permill, Perbill, Percent, create_runtime_str, generic, impl_opaque_keys, - traits::{ - AccountIdLookup, BlakeTwo256, Block as BlockT, AccountIdConversion, Zero, Member, - SignedExtension, - }, - transaction_validity::{ - TransactionSource, TransactionValidity, ValidTransaction, InvalidTransaction, - }, - ApplyExtrinsicResult, RuntimeAppPublic, -}; - -use sp_std::prelude::*; - -#[cfg(feature = "std")] -use sp_version::NativeVersion; use sp_version::RuntimeVersion; -pub use pallet_transaction_payment::{ - Multiplier, TargetedFeeAdjustment, FeeDetails, RuntimeDispatchInfo, -}; -// A few exports that help ease life for downstream crates. -pub use pallet_balances::Call as BalancesCall; -pub use pallet_evm::{ - EnsureAddressTruncated, HashedAddressMapping, Runner, account::CrossAccountId as _, - OnMethodCall, Account as EVMAccount, FeeCalculator, GasWeightMapping, -}; -pub use frame_support::{ - construct_runtime, match_types, - dispatch::DispatchResult, - PalletId, parameter_types, StorageValue, ConsensusEngineId, - traits::{ - tokens::currency::Currency as CurrencyT, OnUnbalanced as OnUnbalancedT, Everything, - Currency, ExistenceRequirement, Get, IsInVec, KeyOwnerProofSystem, LockIdentifier, - OnUnbalanced, Randomness, FindAuthor, ConstU32, Imbalance, PrivilegeCmp, - }, - weights::{ - constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND}, - DispatchClass, DispatchInfo, GetDispatchInfo, IdentityFee, Pays, PostDispatchInfo, Weight, - WeightToFeePolynomial, WeightToFeeCoefficient, WeightToFeeCoefficients, ConstantMultiplier, - WeightToFee, - }, -}; -use pallet_unique_scheduler::DispatchCall; -use up_data_structs::{ - CollectionId, TokenId, TokenData, Property, PropertyKeyPermission, CollectionLimits, - CollectionStats, RpcCollection, - mapping::{EvmTokenAddressMapping, CrossTokenAddressMapping}, - TokenChild, RmrkCollectionInfo, RmrkInstanceInfo, RmrkResourceInfo, RmrkPropertyInfo, - RmrkBaseInfo, RmrkPartType, RmrkTheme, RmrkThemeName, RmrkCollectionId, RmrkNftId, - RmrkNftChild, RmrkPropertyKey, RmrkResourceId, RmrkBaseId, -}; +use sp_runtime::create_runtime_str; -// use pallet_contracts::weights::WeightInfo; -// #[cfg(any(feature = "std", test))] -use frame_system::{ - self as frame_system, EnsureRoot, EnsureSigned, - limits::{BlockWeights, BlockLength}, -}; -use sp_arithmetic::{ - traits::{BaseArithmetic, Unsigned}, -}; -use smallvec::smallvec; -use codec::{Encode, Decode}; -use fp_rpc::TransactionStatus; -use sp_runtime::{ - traits::{ - Applyable, BlockNumberProvider, Dispatchable, PostDispatchInfoOf, DispatchInfoOf, - Saturating, CheckedConversion, - }, - generic::Era, - transaction_validity::TransactionValidityError, - DispatchErrorWithPostInfo, SaturatedConversion, -}; +use up_common::types::*; -// pub use pallet_timestamp::Call as TimestampCall; -pub use sp_consensus_aura::sr25519::AuthorityId as AuraId; +#[path = "../../common/mod.rs"] +mod runtime_common; -// Polkadot imports -use pallet_xcm::XcmPassthrough; -use polkadot_parachain::primitives::Sibling; -use xcm::v1::{BodyId, Junction::*, MultiLocation, NetworkId, Junctions::*}; -use xcm_builder::{ - AccountId32Aliases, AllowTopLevelPaidExecutionFrom, AllowUnpaidExecutionFrom, CurrencyAdapter, - EnsureXcmOrigin, FixedWeightBounds, LocationInverter, NativeAsset, ParentAsSuperuser, - RelayChainAsNative, SiblingParachainAsNative, SiblingParachainConvertsVia, - SignedAccountId32AsNative, SignedToAccountId32, SovereignSignedViaLocation, TakeWeightCredit, - ParentIsPreset, -}; -use xcm_executor::{Config, XcmExecutor, Assets}; -use sp_std::{cmp::Ordering, marker::PhantomData}; +pub mod xcm_barrier; -use xcm::latest::{ - // Xcm, - AssetId::{Concrete}, - Fungibility::Fungible as XcmFungible, - MultiAsset, - Error as XcmError, -}; -use xcm_executor::traits::{MatchesFungible, WeightTrader}; +#[cfg(test)] +mod tests; -use unique_runtime_common::{ - impl_common_runtime_apis, - types::*, - constants::*, - dispatch::{CollectionDispatchT, CollectionDispatch}, - sponsoring::UniqueSponsorshipHandler, - eth_sponsoring::UniqueEthSponsorshipHandler, - weights::CommonWeights, -}; +pub use runtime_common::*; pub const RUNTIME_NAME: &str = "unique"; pub const TOKEN_SYMBOL: &str = "UNQ"; -type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - -impl RuntimeInstance for Runtime { - type CrossAccountId = self::CrossAccountId; - type TransactionConverter = self::TransactionConverter; - - fn get_transaction_converter() -> TransactionConverter { - TransactionConverter - } -} - -/// The type for looking up accounts. We don't expect more than 4 billion of them, but you -/// never know... -pub type AccountIndex = u32; - -/// Balance of an account. -pub type Balance = u128; - -/// Index of a transaction in the chain. -pub type Index = u32; - -/// A hash of some data used by the chain. -pub type Hash = sp_core::H256; - -/// Digest item type. -pub type DigestItem = generic::DigestItem; - -/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know -/// the specifics of the runtime. They can then be made to be agnostic over specific formats -/// of data like extrinsics, allowing for them to continue syncing the network through upgrades -/// to even the core data structures. -pub mod opaque { - use sp_std::prelude::*; - use sp_runtime::impl_opaque_keys; - use super::Aura; - - pub use unique_runtime_common::types::*; - - impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } - } -} - /// This runtime version. pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!(RUNTIME_NAME), impl_name: create_runtime_str!(RUNTIME_NAME), authoring_version: 1, - spec_version: 924013, + spec_version: 930032, impl_version: 0, apis: RUNTIME_API_VERSIONS, - transaction_version: 1, + transaction_version: 2, state_version: 0, }; -#[derive(codec::Encode, codec::Decode)] -pub enum XCMPMessage { - /// Transfer tokens to the given account from the Parachain account. - TransferToken(XAccountId, XBalance), -} - -/// The version information used to identify this runtime when compiled natively. -#[cfg(feature = "std")] -pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } -} - -type NegativeImbalance = >::NegativeImbalance; - -pub struct DealWithFees; -impl OnUnbalanced for DealWithFees { - fn on_unbalanceds(mut fees_then_tips: impl Iterator) { - if let Some(fees) = fees_then_tips.next() { - // for fees, 100% to treasury - let mut split = fees.ration(100, 0); - if let Some(tips) = fees_then_tips.next() { - // for tips, if any, 100% to treasury - tips.ration_merge_into(100, 0, &mut split); - } - Treasury::on_unbalanced(split.0); - // Author::on_unbalanced(split.1); - } - } -} - parameter_types! { - pub const BlockHashCount: BlockNumber = 2400; - pub RuntimeBlockLength: BlockLength = - BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO); - pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); - pub const MaximumBlockLength: u32 = 5 * 1024 * 1024; - pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); - // Operational transactions have some extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); pub const Version: RuntimeVersion = VERSION; pub const SS58Prefix: u16 = 7391; -} - -parameter_types! { pub const ChainId: u64 = 8880; } -pub struct FixedFee; -impl FeeCalculator for FixedFee { - fn min_gas_price() -> (U256, u64) { - (MIN_GAS_PRICE.into(), 0) - } -} - -// Assuming slowest ethereum opcode is SSTORE, with gas price of 20000 as our worst case -// (contract, which only writes a lot of data), -// approximating on top of our real store write weight -parameter_types! { - pub const WritesPerSecond: u64 = WEIGHT_PER_SECOND / ::DbWeight::get().write; - pub const GasPerSecond: u64 = WritesPerSecond::get() * 20000; - pub const WeightPerGas: u64 = WEIGHT_PER_SECOND / GasPerSecond::get(); -} - -/// Limiting EVM execution to 50% of block for substrate users and management tasks -/// EVM transaction consumes more weight than substrate's, so we can't rely on them being -/// scheduled fairly -const EVM_DISPATCH_RATIO: Perbill = Perbill::from_percent(50); -parameter_types! { - pub BlockGasLimit: U256 = U256::from(NORMAL_DISPATCH_RATIO * EVM_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT / WeightPerGas::get()); -} - -pub enum FixedGasWeightMapping {} -impl GasWeightMapping for FixedGasWeightMapping { - fn gas_to_weight(gas: u64) -> Weight { - gas.saturating_mul(WeightPerGas::get()) - } - fn weight_to_gas(weight: Weight) -> u64 { - weight / WeightPerGas::get() - } -} - -impl pallet_evm::account::Config for Runtime { - type CrossAccountId = pallet_evm::account::BasicCrossAccountId; - type EvmAddressMapping = pallet_evm::HashedAddressMapping; - type EvmBackwardsAddressMapping = fp_evm_mapping::MapBackwardsAddressTruncated; -} - -impl pallet_evm::Config for Runtime { - type BlockGasLimit = BlockGasLimit; - type FeeCalculator = FixedFee; - type GasWeightMapping = FixedGasWeightMapping; - type BlockHashMapping = pallet_ethereum::EthereumBlockHashMapping; - type CallOrigin = EnsureAddressTruncated; - type WithdrawOrigin = EnsureAddressTruncated; - type AddressMapping = HashedAddressMapping; - type PrecompilesType = (); - type PrecompilesValue = (); - type Currency = Balances; - type Event = Event; - type OnMethodCall = ( - pallet_evm_migration::OnMethodCall, - pallet_evm_contract_helpers::HelpersOnMethodCall, - CollectionDispatchT, - pallet_unique::eth::CollectionHelpersOnMethodCall, - ); - type OnCreate = pallet_evm_contract_helpers::HelpersOnCreate; - type ChainId = ChainId; - type Runner = pallet_evm::runner::stack::Runner; - type OnChargeTransaction = pallet_evm::EVMCurrencyAdapter; - type TransactionValidityHack = pallet_evm_transaction_payment::TransactionValidityHack; - type FindAuthor = EthereumFindAuthor; -} - -impl pallet_evm_migration::Config for Runtime { - type WeightInfo = pallet_evm_migration::weights::SubstrateWeight; -} - -pub struct EthereumFindAuthor(core::marker::PhantomData); -impl> FindAuthor for EthereumFindAuthor { - fn find_author<'a, I>(digests: I) -> Option - where - I: 'a + IntoIterator, - { - if let Some(author_index) = F::find_author(digests) { - let authority_id = Aura::authorities()[author_index as usize].clone(); - return Some(H160::from_slice(&authority_id.to_raw_vec()[4..24])); - } - None - } -} - -impl pallet_ethereum::Config for Runtime { - type Event = Event; - type StateRoot = pallet_ethereum::IntermediateStateRoot; -} - -impl pallet_randomness_collective_flip::Config for Runtime {} - -impl frame_system::Config for Runtime { - /// The data to be stored in an account. - type AccountData = pallet_balances::AccountData; - /// The identifier used to distinguish between accounts. - type AccountId = AccountId; - /// The basic call filter to use in dispatchable. - type BaseCallFilter = Everything; - /// Maximum number of block number to block hash mappings to keep (oldest pruned first). - type BlockHashCount = BlockHashCount; - /// The maximum length of a block (in bytes). - type BlockLength = RuntimeBlockLength; - /// The index type for blocks. - type BlockNumber = BlockNumber; - /// The weight of the overhead invoked on the block import process, independent of the extrinsics included in that block. - type BlockWeights = RuntimeBlockWeights; - /// The aggregated dispatch type that is available for extrinsics. - type Call = Call; - /// The weight of database operations that the runtime can invoke. - type DbWeight = RocksDbWeight; - /// The ubiquitous event type. - type Event = Event; - /// The type for hashing blocks and tries. - type Hash = Hash; - /// The hashing algorithm used. - type Hashing = BlakeTwo256; - /// The header type. - type Header = generic::Header; - /// The index type for storing how many extrinsics an account has signed. - type Index = Index; - /// The lookup mechanism to get account ID from whatever is passed in dispatchers. - type Lookup = AccountIdLookup; - /// What to do if an account is fully reaped from the system. - type OnKilledAccount = (); - /// What to do if a new account is created. - type OnNewAccount = (); - type OnSetCode = cumulus_pallet_parachain_system::ParachainSetCode; - /// The ubiquitous origin type. - type Origin = Origin; - /// This type is being generated by `construct_runtime!`. - type PalletInfo = PalletInfo; - /// This is used as an identifier of the chain. 42 is the generic substrate prefix. - type SS58Prefix = SS58Prefix; - /// Weight information for the extrinsics of this pallet. - type SystemWeightInfo = frame_system::weights::SubstrateWeight; - /// Version of the runtime. - type Version = Version; - type MaxConsumers = ConstU32<16>; -} - -parameter_types! { - pub const MinimumPeriod: u64 = SLOT_DURATION / 2; -} - -impl pallet_timestamp::Config for Runtime { - /// A timestamp: milliseconds since the unix epoch. - type Moment = u64; - type OnTimestampSet = (); - type MinimumPeriod = MinimumPeriod; - type WeightInfo = (); -} - -parameter_types! { - // pub const ExistentialDeposit: u128 = 500; - pub const ExistentialDeposit: u128 = 0; - pub const MaxLocks: u32 = 50; - pub const MaxReserves: u32 = 50; -} - -impl pallet_balances::Config for Runtime { - type MaxLocks = MaxLocks; - type MaxReserves = MaxReserves; - type ReserveIdentifier = [u8; 16]; - /// The type for recording an account's balance. - type Balance = Balance; - /// The ubiquitous event type. - type Event = Event; - type DustRemoval = Treasury; - type ExistentialDeposit = ExistentialDeposit; - type AccountStore = System; - type WeightInfo = pallet_balances::weights::SubstrateWeight; -} - -pub const fn deposit(items: u32, bytes: u32) -> Balance { - items as Balance * 15 * CENTIUNIQUE + (bytes as Balance) * 6 * CENTIUNIQUE -} - -/* -parameter_types! { - pub TombstoneDeposit: Balance = deposit( - 1, - sp_std::mem::size_of::> as u32, - ); - pub DepositPerContract: Balance = TombstoneDeposit::get(); - pub const DepositPerStorageByte: Balance = deposit(0, 1); - pub const DepositPerStorageItem: Balance = deposit(1, 0); - pub RentFraction: Perbill = Perbill::from_rational(1u32, 30 * DAYS); - pub const SurchargeReward: Balance = 150 * MILLIUNIQUE; - pub const SignedClaimHandicap: u32 = 2; - pub const MaxDepth: u32 = 32; - pub const MaxValueSize: u32 = 16 * 1024; - pub const MaxCodeSize: u32 = 1024 * 1024 * 25; // 25 Mb - // The lazy deletion runs inside on_initialize. - pub DeletionWeightLimit: Weight = AVERAGE_ON_INITIALIZE_RATIO * - RuntimeBlockWeights::get().max_block; - // The weight needed for decoding the queue should be less or equal than a fifth - // of the overall weight dedicated to the lazy deletion. - pub DeletionQueueDepth: u32 = ((DeletionWeightLimit::get() / ( - ::WeightInfo::on_initialize_per_queue_item(1) - - ::WeightInfo::on_initialize_per_queue_item(0) - )) / 5) as u32; - pub Schedule: pallet_contracts::Schedule = Default::default(); -} - -impl pallet_contracts::Config for Runtime { - type Time = Timestamp; - type Randomness = RandomnessCollectiveFlip; - type Currency = Balances; - type Event = Event; - type RentPayment = (); - type SignedClaimHandicap = SignedClaimHandicap; - type TombstoneDeposit = TombstoneDeposit; - type DepositPerContract = DepositPerContract; - type DepositPerStorageByte = DepositPerStorageByte; - type DepositPerStorageItem = DepositPerStorageItem; - type RentFraction = RentFraction; - type SurchargeReward = SurchargeReward; - type WeightPrice = pallet_transaction_payment::Pallet; - type WeightInfo = pallet_contracts::weights::SubstrateWeight; - type ChainExtension = NFTExtension; - type DeletionQueueDepth = DeletionQueueDepth; - type DeletionWeightLimit = DeletionWeightLimit; - type Schedule = Schedule; - type CallStack = [pallet_contracts::Frame; 31]; -} -*/ - -parameter_types! { - /// This value increases the priority of `Operational` transactions by adding - /// a "virtual tip" that's equal to the `OperationalFeeMultiplier * final_fee`. - pub const OperationalFeeMultiplier: u8 = 5; -} - -/// Linear implementor of `WeightToFeePolynomial` -pub struct LinearFee(sp_std::marker::PhantomData); - -impl WeightToFeePolynomial for LinearFee -where - T: BaseArithmetic + From + Copy + Unsigned, -{ - type Balance = T; - - fn polynomial() -> WeightToFeeCoefficients { - smallvec!(WeightToFeeCoefficient { - coeff_integer: WEIGHT_TO_FEE_COEFF.into(), - coeff_frac: Perbill::zero(), - negative: false, - degree: 1, - }) - } -} - -impl pallet_transaction_payment::Config for Runtime { - type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; - type LengthToFee = ConstantMultiplier; - type OperationalFeeMultiplier = OperationalFeeMultiplier; - type WeightToFee = LinearFee; - type FeeMultiplierUpdate = (); -} - -parameter_types! { - pub const ProposalBond: Permill = Permill::from_percent(5); - pub const ProposalBondMinimum: Balance = 1 * UNIQUE; - pub const ProposalBondMaximum: Balance = 1000 * UNIQUE; - pub const SpendPeriod: BlockNumber = 5 * MINUTES; - pub const Burn: Permill = Permill::from_percent(0); - pub const TipCountdown: BlockNumber = 1 * DAYS; - pub const TipFindersFee: Percent = Percent::from_percent(20); - pub const TipReportDepositBase: Balance = 1 * UNIQUE; - pub const DataDepositPerByte: Balance = 1 * CENTIUNIQUE; - pub const BountyDepositBase: Balance = 1 * UNIQUE; - pub const BountyDepositPayoutDelay: BlockNumber = 1 * DAYS; - pub const TreasuryModuleId: PalletId = PalletId(*b"py/trsry"); - pub const BountyUpdatePeriod: BlockNumber = 14 * DAYS; - pub const MaximumReasonLength: u32 = 16384; - pub const BountyCuratorDeposit: Permill = Permill::from_percent(50); - pub const BountyValueMinimum: Balance = 5 * UNIQUE; - pub const MaxApprovals: u32 = 100; -} - -impl pallet_treasury::Config for Runtime { - type PalletId = TreasuryModuleId; - type Currency = Balances; - type ApproveOrigin = EnsureRoot; - type RejectOrigin = EnsureRoot; - type Event = Event; - type OnSlash = (); - type ProposalBond = ProposalBond; - type ProposalBondMinimum = ProposalBondMinimum; - type ProposalBondMaximum = ProposalBondMaximum; - type SpendPeriod = SpendPeriod; - type Burn = Burn; - type BurnDestination = (); - type SpendFunds = (); - type WeightInfo = pallet_treasury::weights::SubstrateWeight; - type MaxApprovals = MaxApprovals; -} - -impl pallet_sudo::Config for Runtime { - type Event = Event; - type Call = Call; -} - -pub struct RelayChainBlockNumberProvider(sp_std::marker::PhantomData); - -impl BlockNumberProvider - for RelayChainBlockNumberProvider -{ - type BlockNumber = BlockNumber; - - fn current_block_number() -> Self::BlockNumber { - cumulus_pallet_parachain_system::Pallet::::validation_data() - .map(|d| d.relay_parent_number) - .unwrap_or_default() - } -} - -parameter_types! { - pub const MinVestedTransfer: Balance = 10 * UNIQUE; - pub const MaxVestingSchedules: u32 = 28; -} - -impl orml_vesting::Config for Runtime { - type Event = Event; - type Currency = pallet_balances::Pallet; - type MinVestedTransfer = MinVestedTransfer; - type VestedTransferOrigin = EnsureSigned; - type WeightInfo = (); - type MaxVestingSchedules = MaxVestingSchedules; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub const ReservedDmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; - pub const ReservedXcmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4; -} - -impl cumulus_pallet_parachain_system::Config for Runtime { - type Event = Event; - type SelfParaId = parachain_info::Pallet; - type OnSystemEvent = (); - // type DownwardMessageHandlers = cumulus_primitives_utility::UnqueuedDmpAsParent< - // MaxDownwardMessageWeight, - // XcmExecutor, - // Call, - // >; - type OutboundXcmpMessageSource = XcmpQueue; - type DmpMessageHandler = DmpQueue; - type ReservedDmpWeight = ReservedDmpWeight; - type ReservedXcmpWeight = ReservedXcmpWeight; - type XcmpMessageHandler = XcmpQueue; -} - -impl parachain_info::Config for Runtime {} - -impl cumulus_pallet_aura_ext::Config for Runtime {} - -parameter_types! { - pub const RelayLocation: MultiLocation = MultiLocation::parent(); - pub const RelayNetwork: NetworkId = NetworkId::Polkadot; - pub RelayOrigin: Origin = cumulus_pallet_xcm::Origin::Relay.into(); - pub Ancestry: MultiLocation = Parachain(ParachainInfo::parachain_id().into()).into(); -} - -/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used -/// when determining ownership of accounts for asset transacting and when attempting to use XCM -/// `Transact` in order to determine the dispatch Origin. -pub type LocationToAccountId = ( - // The parent (Relay-chain) origin converts to the default `AccountId`. - ParentIsPreset, - // Sibling parachain origins convert to AccountId via the `ParaId::into`. - SiblingParachainConvertsVia, - // Straight up local `AccountId32` origins just alias directly to `AccountId`. - AccountId32Aliases, -); - -pub struct OnlySelfCurrency; -impl> MatchesFungible for OnlySelfCurrency { - fn matches_fungible(a: &MultiAsset) -> Option { - match (&a.id, &a.fun) { - (Concrete(_), XcmFungible(ref amount)) => CheckedConversion::checked_from(*amount), - _ => None, - } - } -} - -/// Means for transacting assets on this chain. -pub type LocalAssetTransactor = CurrencyAdapter< - // Use this currency: - Balances, - // Use this currency when it is a fungible asset matching the given location or name: - OnlySelfCurrency, - // Do a simple punn to convert an AccountId32 MultiLocation into a native chain account ID: - LocationToAccountId, - // Our chain's account ID type (we can't get away without mentioning it explicitly): - AccountId, - // We don't track any teleports. - (), ->; - -/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance, -/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can -/// biases the kind of local `Origin` it will become. -pub type XcmOriginToTransactDispatchOrigin = ( - // Sovereign account converter; this attempts to derive an `AccountId` from the origin location - // using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for - // foreign chains who want to have a local sovereign account on this chain which they control. - SovereignSignedViaLocation, - // Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when - // recognised. - RelayChainAsNative, - // Native converter for sibling Parachains; will convert to a `SiblingPara` origin when - // recognised. - SiblingParachainAsNative, - // Superuser converter for the Relay-chain (Parent) location. This will allow it to issue a - // transaction from the Root origin. - ParentAsSuperuser, - // Native signed account converter; this just converts an `AccountId32` origin into a normal - // `Origin::Signed` origin of the same 32-byte value. - SignedAccountId32AsNative, - // Xcm origins can be represented natively under the Xcm pallet's Xcm origin. - XcmPassthrough, -); - -parameter_types! { - // One XCM operation is 1_000_000 weight - almost certainly a conservative estimate. - pub UnitWeightCost: Weight = 1_000_000; - // 1200 UNIQUEs buy 1 second of weight. - pub const WeightPrice: (MultiLocation, u128) = (MultiLocation::parent(), 1_200 * UNIQUE); - pub const MaxInstructions: u32 = 100; - pub const MaxAuthorities: u32 = 100_000; -} - -match_types! { - pub type ParentOrParentsUnitPlurality: impl Contains = { - MultiLocation { parents: 1, interior: Here } | - MultiLocation { parents: 1, interior: X1(Plurality { id: BodyId::Unit, .. }) } - }; -} - -pub type Barrier = ( - TakeWeightCredit, - AllowTopLevelPaidExecutionFrom, - AllowUnpaidExecutionFrom, - // ^^^ Parent & its unit plurality gets free execution -); - -pub struct UsingOnlySelfCurrencyComponents< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, ->( - Weight, - Currency::Balance, - PhantomData<(WeightToFee, AssetId, AccountId, Currency, OnUnbalanced)>, -); -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > WeightTrader - for UsingOnlySelfCurrencyComponents -{ - fn new() -> Self { - Self(0, Zero::zero(), PhantomData) - } - - fn buy_weight(&mut self, weight: Weight, payment: Assets) -> Result { - let amount = WeightToFee::weight_to_fee(&weight); - let u128_amount: u128 = amount.try_into().map_err(|_| XcmError::Overflow)?; - - // location to this parachain through relay chain - let option1: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 1, - interior: X1(Parachain(ParachainInfo::parachain_id().into())), - }); - // direct location - let option2: xcm::v1::AssetId = Concrete(MultiLocation { - parents: 0, - interior: Here, - }); - - let required = if payment.fungible.contains_key(&option1) { - (option1, u128_amount).into() - } else if payment.fungible.contains_key(&option2) { - (option2, u128_amount).into() - } else { - (Concrete(MultiLocation::default()), u128_amount).into() - }; - - let unused = payment - .checked_sub(required) - .map_err(|_| XcmError::TooExpensive)?; - self.0 = self.0.saturating_add(weight); - self.1 = self.1.saturating_add(amount); - Ok(unused) - } - - fn refund_weight(&mut self, weight: Weight) -> Option { - let weight = weight.min(self.0); - let amount = WeightToFee::weight_to_fee(&weight); - self.0 -= weight; - self.1 = self.1.saturating_sub(amount); - let amount: u128 = amount.saturated_into(); - if amount > 0 { - Some((AssetId::get(), amount).into()) - } else { - None - } - } -} -impl< - WeightToFee: WeightToFeePolynomial, - AssetId: Get, - AccountId, - Currency: CurrencyT, - OnUnbalanced: OnUnbalancedT, - > Drop - for UsingOnlySelfCurrencyComponents -{ - fn drop(&mut self) { - OnUnbalanced::on_unbalanced(Currency::issue(self.1)); - } -} - -pub struct XcmConfig; -impl Config for XcmConfig { - type Call = Call; - type XcmSender = XcmRouter; - // How to withdraw and deposit an asset. - type AssetTransactor = LocalAssetTransactor; - type OriginConverter = XcmOriginToTransactDispatchOrigin; - type IsReserve = NativeAsset; - type IsTeleporter = (); // Teleportation is disabled - type LocationInverter = LocationInverter; - type Barrier = Barrier; - type Weigher = FixedWeightBounds; - type Trader = - UsingOnlySelfCurrencyComponents, RelayLocation, AccountId, Balances, ()>; - type ResponseHandler = (); // Don't handle responses for now. - type SubscriptionService = PolkadotXcm; - - type AssetTrap = PolkadotXcm; - type AssetClaims = PolkadotXcm; -} - -// parameter_types! { -// pub const MaxDownwardMessageWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 10; -// } - -/// No local origins on this chain are allowed to dispatch XCM sends/executions. -pub type LocalOriginToLocation = (SignedToAccountId32,); - -/// The means for routing XCM messages which are not for local execution into the right message -/// queues. -pub type XcmRouter = ( - // Two routers - use UMP to communicate with the relay chain: - cumulus_primitives_utility::ParentAsUmp, - // ..and XCMP to communicate with the sibling chains. - XcmpQueue, -); - -impl pallet_evm_coder_substrate::Config for Runtime {} - -impl pallet_xcm::Config for Runtime { - type Event = Event; - type SendXcmOrigin = EnsureXcmOrigin; - type XcmRouter = XcmRouter; - type ExecuteXcmOrigin = EnsureXcmOrigin; - type XcmExecuteFilter = Everything; - type XcmExecutor = XcmExecutor; - type XcmTeleportFilter = Everything; - type XcmReserveTransferFilter = Everything; - type Weigher = FixedWeightBounds; - type LocationInverter = LocationInverter; - type Origin = Origin; - type Call = Call; - const VERSION_DISCOVERY_QUEUE_SIZE: u32 = 100; - type AdvertisedXcmVersion = pallet_xcm::CurrentXcmVersion; -} - -impl cumulus_pallet_xcm::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; -} - -impl cumulus_pallet_xcmp_queue::Config for Runtime { - type WeightInfo = (); - type Event = Event; - type XcmExecutor = XcmExecutor; - type ChannelInfo = ParachainSystem; - type VersionWrapper = (); - type ExecuteOverweightOrigin = frame_system::EnsureRoot; - type ControllerOrigin = EnsureRoot; - type ControllerOriginConverter = XcmOriginToTransactDispatchOrigin; -} - -impl cumulus_pallet_dmp_queue::Config for Runtime { - type Event = Event; - type XcmExecutor = XcmExecutor; - type ExecuteOverweightOrigin = frame_system::EnsureRoot; -} - -impl pallet_aura::Config for Runtime { - type AuthorityId = AuraId; - type DisabledValidators = (); - type MaxAuthorities = MaxAuthorities; -} - -parameter_types! { - pub TreasuryAccountId: AccountId = TreasuryModuleId::get().into_account_truncating(); - pub const CollectionCreationPrice: Balance = 2 * UNIQUE; -} - -impl pallet_common::Config for Runtime { - type WeightInfo = pallet_common::weights::SubstrateWeight; - type Event = Event; - type Currency = Balances; - type CollectionCreationPrice = CollectionCreationPrice; - type TreasuryAccountId = TreasuryAccountId; - type CollectionDispatch = CollectionDispatchT; - - type EvmTokenAddressMapping = EvmTokenAddressMapping; - type CrossTokenAddressMapping = CrossTokenAddressMapping; - type ContractAddress = EvmCollectionHelpersAddress; -} - -impl pallet_structure::Config for Runtime { - type Event = Event; - type Call = Call; - type WeightInfo = pallet_structure::weights::SubstrateWeight; -} - -impl pallet_fungible::Config for Runtime { - type WeightInfo = pallet_fungible::weights::SubstrateWeight; -} -impl pallet_refungible::Config for Runtime { - type WeightInfo = pallet_refungible::weights::SubstrateWeight; -} -impl pallet_nonfungible::Config for Runtime { - type WeightInfo = pallet_nonfungible::weights::SubstrateWeight; -} - -impl pallet_unique::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_unique::weights::SubstrateWeight; - type CommonWeightInfo = CommonWeights; - type RefungibleExtensionsWeightInfo = CommonWeights; -} - -parameter_types! { - pub const InflationBlockInterval: BlockNumber = 100; // every time per how many blocks inflation is applied -} - -/// Used for the pallet inflation -impl pallet_inflation::Config for Runtime { - type Currency = Balances; - type TreasuryAccountId = TreasuryAccountId; - type InflationBlockInterval = InflationBlockInterval; - type BlockNumberProvider = RelayChainBlockNumberProvider; -} - -parameter_types! { - pub MaximumSchedulerWeight: Weight = Perbill::from_percent(50) * - RuntimeBlockWeights::get().max_block; - pub const MaxScheduledPerBlock: u32 = 50; -} - -type ChargeTransactionPayment = pallet_charge_transaction::ChargeTransactionPayment; -use frame_support::traits::NamedReservableCurrency; - -fn get_signed_extras(from: ::AccountId) -> SignedExtraScheduler { - ( - frame_system::CheckSpecVersion::::new(), - frame_system::CheckGenesis::::new(), - frame_system::CheckEra::::from(Era::Immortal), - frame_system::CheckNonce::::from(frame_system::Pallet::::account_nonce( - from, - )), - frame_system::CheckWeight::::new(), - CheckMaintenance, - // sponsoring transaction logic - // pallet_charge_transaction::ChargeTransactionPayment::::new(0), - ) -} - -pub struct SchedulerPaymentExecutor; -impl - DispatchCall for SchedulerPaymentExecutor -where - ::Call: Member - + Dispatchable - + SelfContainedCall - + GetDispatchInfo - + From>, - SelfContainedSignedInfo: Send + Sync + 'static, - Call: From<::Call> - + From<::Call> - + SelfContainedCall, - sp_runtime::AccountId32: From<::AccountId>, -{ - fn dispatch_call( - signer: ::AccountId, - call: ::Call, - ) -> Result< - Result>, - TransactionValidityError, - > { - let dispatch_info = call.get_dispatch_info(); - let extrinsic = fp_self_contained::CheckedExtrinsic::< - AccountId, - Call, - SignedExtraScheduler, - SelfContainedSignedInfo, - > { - signed: - CheckedSignature::::Signed( - signer.clone().into(), - get_signed_extras(signer.into()), - ), - function: call.into(), - }; - - extrinsic.apply::(&dispatch_info, 0) - } - - fn reserve_balance( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - count: u32, - ) -> Result<(), DispatchError> { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0) - .saturating_mul(count.into()); - - >::reserve_named( - &id, - &(sponsor.into()), - weight, - ) - } - - fn pay_for_call( - id: [u8; 16], - sponsor: ::AccountId, - call: ::Call, - ) -> Result { - let dispatch_info = call.get_dispatch_info(); - let weight: Balance = ChargeTransactionPayment::traditional_fee(0, &dispatch_info, 0); - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - weight, - ), - ) - } - - fn cancel_reserve( - id: [u8; 16], - sponsor: ::AccountId, - ) -> Result { - Ok( - >::unreserve_named( - &id, - &(sponsor.into()), - u128::MAX, - ), - ) - } -} - -parameter_types! { - pub const NoPreimagePostponement: Option = Some(10); - pub const Preimage: Option = Some(10); -} - -/// Used the compare the privilege of an origin inside the scheduler. -pub struct OriginPrivilegeCmp; - -impl PrivilegeCmp for OriginPrivilegeCmp { - fn cmp_privilege(_left: &OriginCaller, _right: &OriginCaller) -> Option { - Some(Ordering::Equal) - } -} - -impl pallet_unique_scheduler::Config for Runtime { - type Event = Event; - type Origin = Origin; - type Currency = Balances; - type PalletsOrigin = OriginCaller; - type Call = Call; - type MaximumWeight = MaximumSchedulerWeight; - type ScheduleOrigin = EnsureSigned; - type MaxScheduledPerBlock = MaxScheduledPerBlock; - type WeightInfo = (); - type CallExecutor = SchedulerPaymentExecutor; - type OriginPrivilegeCmp = OriginPrivilegeCmp; - type PreimageProvider = (); - type NoPreimagePostponement = NoPreimagePostponement; -} - -type EvmSponsorshipHandler = ( - UniqueEthSponsorshipHandler, - pallet_evm_contract_helpers::HelpersContractSponsoring, -); -type SponsorshipHandler = ( - UniqueSponsorshipHandler, - //pallet_contract_helpers::ContractSponsorshipHandler, - pallet_evm_transaction_payment::BridgeSponsorshipHandler, -); - -impl pallet_evm_transaction_payment::Config for Runtime { - type EvmSponsorshipHandler = EvmSponsorshipHandler; - type Currency = Balances; -} - -impl pallet_charge_transaction::Config for Runtime { - type SponsorshipHandler = SponsorshipHandler; -} - -// impl pallet_contract_helpers::Config for Runtime { -// type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -// } +construct_runtime!(unique); -parameter_types! { - // 0x842899ECF380553E8a4de75bF534cdf6fBF64049 - pub const HelpersContractAddress: H160 = H160([ - 0x84, 0x28, 0x99, 0xec, 0xf3, 0x80, 0x55, 0x3e, 0x8a, 0x4d, 0xe7, 0x5b, 0xf5, 0x34, 0xcd, 0xf6, 0xfb, 0xf6, 0x40, 0x49, - ]); - - // 0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f - pub const EvmCollectionHelpersAddress: H160 = H160([ - 0x6c, 0x4e, 0x9f, 0xe1, 0xae, 0x37, 0xa4, 0x1e, 0x93, 0xce, 0xe4, 0x29, 0xe8, 0xe1, 0x88, 0x1a, 0xbd, 0xcb, 0xb5, 0x4f, - ]); -} - -impl pallet_evm_contract_helpers::Config for Runtime { - type ContractAddress = HelpersContractAddress; - type DefaultSponsoringRateLimit = DefaultSponsoringRateLimit; -} - -impl pallet_maintenance::Config for Runtime { - type Event = Event; - type WeightInfo = pallet_maintenance::weights::SubstrateWeight; -} - -#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] -pub struct CheckMaintenance; - -impl SignedExtension for CheckMaintenance { - type AccountId = AccountId; - type Call = Call; - type AdditionalSigned = (); - type Pre = (); - - const IDENTIFIER: &'static str = "CheckMaintenance"; - - fn additional_signed(&self) -> Result { - Ok(()) - } - - fn pre_dispatch( - self, - who: &Self::AccountId, - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result { - self.validate(who, call, info, len).map(|_| ()) - } - - fn validate( - &self, - _who: &Self::AccountId, - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EvmMigration(_) - | Call::EVM(_) - | Call::Ethereum(_) - | Call::Inflation(_) - | Call::Maintenance(_) - | Call::Scheduler(_) - | Call::Structure(_) - | Call::Unique(_) => Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - - #[cfg(any(feature = "opal-runtime", feature = "quartz-runtime"))] - Call::RmrkCore(_) | Call::RmrkEquip(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } - - fn pre_dispatch_unsigned( - call: &Self::Call, - info: &DispatchInfoOf, - len: usize, - ) -> Result<(), TransactionValidityError> { - Self::validate_unsigned(call, info, len).map(|_| ()) - } - - fn validate_unsigned( - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - if Maintenance::is_enabled() { - match call { - Call::EVM(_) | Call::Ethereum(_) | Call::EvmMigration(_) => { - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) - } - _ => Ok(ValidTransaction::default()), - } - } else { - Ok(ValidTransaction::default()) - } - } -} - -construct_runtime!( - pub enum Runtime where - Block = Block, - NodeBlock = opaque::Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - ParachainSystem: cumulus_pallet_parachain_system::{Pallet, Call, Config, Storage, Inherent, Event, ValidateUnsigned} = 20, - ParachainInfo: parachain_info::{Pallet, Storage, Config} = 21, - - Aura: pallet_aura::{Pallet, Config} = 22, - AuraExt: cumulus_pallet_aura_ext::{Pallet, Config} = 23, - - Balances: pallet_balances::{Pallet, Call, Storage, Config, Event} = 30, - RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Storage} = 31, - Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent} = 32, - TransactionPayment: pallet_transaction_payment::{Pallet, Storage} = 33, - Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event} = 34, - Sudo: pallet_sudo::{Pallet, Call, Storage, Config, Event} = 35, - System: frame_system::{Pallet, Call, Storage, Config, Event} = 36, - Vesting: orml_vesting::{Pallet, Storage, Call, Event, Config} = 37, - // Vesting: pallet_vesting::{Pallet, Call, Config, Storage, Event} = 37, - // Contracts: pallet_contracts::{Pallet, Call, Storage, Event} = 38, - - // XCM helpers. - XcmpQueue: cumulus_pallet_xcmp_queue::{Pallet, Call, Storage, Event} = 50, - PolkadotXcm: pallet_xcm::{Pallet, Call, Event, Origin} = 51, - CumulusXcm: cumulus_pallet_xcm::{Pallet, Call, Event, Origin} = 52, - DmpQueue: cumulus_pallet_dmp_queue::{Pallet, Call, Storage, Event} = 53, - - // Unique Pallets - Inflation: pallet_inflation::{Pallet, Call, Storage} = 60, - Unique: pallet_unique::{Pallet, Call, Storage, Event} = 61, - Scheduler: pallet_unique_scheduler::{Pallet, Call, Storage, Event} = 62, - // free = 63 - Charging: pallet_charge_transaction::{Pallet, Call, Storage } = 64, - // ContractHelpers: pallet_contract_helpers::{Pallet, Call, Storage} = 65, - Common: pallet_common::{Pallet, Storage, Event} = 66, - Fungible: pallet_fungible::{Pallet, Storage} = 67, - Refungible: pallet_refungible::{Pallet, Storage} = 68, - Nonfungible: pallet_nonfungible::{Pallet, Storage} = 69, - Structure: pallet_structure::{Pallet, Call, Storage, Event} = 70, - - // Frontier - EVM: pallet_evm::{Pallet, Config, Call, Storage, Event} = 100, - Ethereum: pallet_ethereum::{Pallet, Config, Call, Storage, Event, Origin} = 101, - - EvmCoderSubstrate: pallet_evm_coder_substrate::{Pallet, Storage} = 150, - EvmContractHelpers: pallet_evm_contract_helpers::{Pallet, Storage} = 151, - EvmTransactionPayment: pallet_evm_transaction_payment::{Pallet} = 152, - EvmMigration: pallet_evm_migration::{Pallet, Call, Storage} = 153, - - Maintenance: pallet_maintenance::{Pallet, Call, Storage, Event} = 154, - } -); - -pub struct TransactionConverter; - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction(&self, transaction: pallet_ethereum::Transaction) -> UncheckedExtrinsic { - UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ) - } -} - -impl fp_rpc::ConvertTransaction for TransactionConverter { - fn convert_transaction( - &self, - transaction: pallet_ethereum::Transaction, - ) -> opaque::UncheckedExtrinsic { - let extrinsic = UncheckedExtrinsic::new_unsigned( - pallet_ethereum::Call::::transact { transaction }.into(), - ); - let encoded = extrinsic.encode(); - opaque::UncheckedExtrinsic::decode(&mut &encoded[..]) - .expect("Encoded extrinsic is always valid") - } -} - -/// The address format for describing accounts. -pub type Address = sp_runtime::MultiAddress; -/// Block header type as expected by this runtime. -pub type Header = generic::Header; -/// Block type as expected by this runtime. -pub type Block = generic::Block; -/// A Block signed with a Justification -pub type SignedBlock = generic::SignedBlock; -/// BlockId type as expected by this runtime. -pub type BlockId = generic::BlockId; -/// The SignedExtension to the basic transaction logic. -pub type SignedExtra = ( - frame_system::CheckSpecVersion, - // system::CheckTxVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, - pallet_charge_transaction::ChargeTransactionPayment, - //pallet_contract_helpers::ContractHelpersExtension, - pallet_ethereum::FakeTransactionFinalizer, -); -pub type SignedExtraScheduler = ( - frame_system::CheckSpecVersion, - frame_system::CheckGenesis, - frame_system::CheckEra, - frame_system::CheckNonce, - frame_system::CheckWeight, - CheckMaintenance, - // pallet_charge_transaction::ChargeTransactionPayment, -); -/// Unchecked extrinsic type as expected by this runtime. -pub type UncheckedExtrinsic = - fp_self_contained::UncheckedExtrinsic; -/// Extrinsic type that has already been checked. -pub type CheckedExtrinsic = fp_self_contained::CheckedExtrinsic; -/// Executive: handles dispatch to the various modules. -pub type Executive = frame_executive::Executive< - Runtime, - Block, - frame_system::ChainContext, - Runtime, - AllPalletsReversedWithSystemFirst, ->; - -impl_opaque_keys! { - pub struct SessionKeys { - pub aura: Aura, - } -} - -impl fp_self_contained::SelfContainedCall for Call { - type SignedInfo = H160; - - fn is_self_contained(&self) -> bool { - match self { - Call::Ethereum(call) => call.is_self_contained(), - _ => false, - } - } - - fn check_self_contained(&self) -> Option> { - match self { - Call::Ethereum(call) => call.check_self_contained(), - _ => None, - } - } - - fn validate_self_contained( - &self, - info: &Self::SignedInfo, - dispatch_info: &DispatchInfoOf, - len: usize, - ) -> Option { - match self { - Call::Ethereum(call) => call.validate_self_contained(info, dispatch_info, len), - _ => None, - } - } - - fn pre_dispatch_self_contained( - &self, - info: &Self::SignedInfo, - ) -> Option> { - match self { - Call::Ethereum(call) => call.pre_dispatch_self_contained(info), - _ => None, - } - } - - fn apply_self_contained( - self, - info: Self::SignedInfo, - ) -> Option>> { - match self { - call @ Call::Ethereum(pallet_ethereum::Call::transact { .. }) => Some(call.dispatch( - Origin::from(pallet_ethereum::RawOrigin::EthereumTransaction(info)), - )), - _ => None, - } - } -} - -macro_rules! dispatch_unique_runtime { - ($collection:ident.$method:ident($($name:ident),*)) => {{ - let collection = ::CollectionDispatch::dispatch(>::try_get($collection)?); - let dispatch = collection.as_dyn(); - - Ok::<_, DispatchError>(dispatch.$method($($name),*)) - }}; -} - -impl_common_runtime_apis! { - #![custom_apis] - - impl rmrk_rpc::RmrkApi< - Block, - AccountId, - RmrkCollectionInfo, - RmrkInstanceInfo, - RmrkResourceInfo, - RmrkPropertyInfo, - RmrkBaseInfo, - RmrkPartType, - RmrkTheme - > for Runtime { - fn last_collection_idx() -> Result { - Ok(Default::default()) - } - - fn collection_by_id(_collection_id: RmrkCollectionId) -> Result>, DispatchError> { - Ok(Default::default()) - } - - fn nft_by_id(_collection_id: RmrkCollectionId, _nft_by_id: RmrkNftId) -> Result>, DispatchError> { - Ok(Default::default()) - } - - fn account_tokens(_account_id: AccountId, _collection_id: RmrkCollectionId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn nft_children(_collection_id: RmrkCollectionId, _nft_id: RmrkNftId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn collection_properties(_collection_id: RmrkCollectionId, _filter_keys: Option>) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn nft_properties(_collection_id: RmrkCollectionId, _nft_id: RmrkNftId, _filter_keys: Option>) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn nft_resources(_collection_id: RmrkCollectionId, _nft_id: RmrkNftId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn nft_resource_priority(_collection_id: RmrkCollectionId, _nft_id: RmrkNftId, _resource_id: RmrkResourceId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn base(_base_id: RmrkBaseId) -> Result>, DispatchError> { - Ok(Default::default()) - } - - fn base_parts(_base_id: RmrkBaseId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn theme_names(_base_id: RmrkBaseId) -> Result, DispatchError> { - Ok(Default::default()) - } - - fn theme(_base_id: RmrkBaseId, _theme_name: RmrkThemeName, _filter_keys: Option>) -> Result, DispatchError> { - Ok(Default::default()) - } - } -} - -struct CheckInherents; - -impl cumulus_pallet_parachain_system::CheckInherents for CheckInherents { - fn check_inherents( - block: &Block, - relay_state_proof: &cumulus_pallet_parachain_system::RelayChainStateProof, - ) -> sp_inherents::CheckInherentsResult { - let relay_chain_slot = relay_state_proof - .read_slot() - .expect("Could not read the relay chain slot from the proof"); - - let inherent_data = - cumulus_primitives_timestamp::InherentDataProvider::from_relay_chain_slot_and_duration( - relay_chain_slot, - sp_std::time::Duration::from_secs(6), - ) - .create_inherent_data() - .expect("Could not create the timestamp inherent data"); - - inherent_data.check_extrinsics(block) - } -} +impl_common_runtime_apis!(); cumulus_pallet_parachain_system::register_validate_block!( Runtime = Runtime, diff --git a/runtime/unique/src/tests/logcapture.rs b/runtime/unique/src/tests/logcapture.rs new file mode 100644 index 0000000000..ab7308513c --- /dev/null +++ b/runtime/unique/src/tests/logcapture.rs @@ -0,0 +1,25 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use super::xcm::unique_xcm_tests; + +#[test] +fn unique_log_capture_tests() { + let mut logger = Logger::start(); + + unique_xcm_tests(&mut logger); +} diff --git a/runtime/unique/src/tests/mod.rs b/runtime/unique/src/tests/mod.rs new file mode 100644 index 0000000000..587524b3c9 --- /dev/null +++ b/runtime/unique/src/tests/mod.rs @@ -0,0 +1,18 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +mod logcapture; +mod xcm; diff --git a/runtime/unique/src/tests/xcm.rs b/runtime/unique/src/tests/xcm.rs new file mode 100644 index 0000000000..0a6cff931e --- /dev/null +++ b/runtime/unique/src/tests/xcm.rs @@ -0,0 +1,27 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use logtest::Logger; +use crate::{runtime_common::tests::xcm::*, xcm_barrier::Barrier}; + +const UNIQUE_PARA_ID: u32 = 2037; + +pub fn unique_xcm_tests(logger: &mut Logger) { + barrier_denies_transact::(logger); + + barrier_denies_transfer_from_unknown_location::(logger, UNIQUE_PARA_ID) + .expect("unique runtime denies an unknown location"); +} diff --git a/runtime/unique/src/xcm_barrier.rs b/runtime/unique/src/xcm_barrier.rs new file mode 100644 index 0000000000..8fa2d25534 --- /dev/null +++ b/runtime/unique/src/xcm_barrier.rs @@ -0,0 +1,83 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +use frame_support::{ + match_types, parameter_types, + traits::{Get, Everything}, +}; +use sp_std::{vec, vec::Vec}; +use xcm::v1::{Junction::*, Junctions::*, MultiLocation}; +use xcm_builder::{ + AllowKnownQueryResponses, AllowSubscriptionsFrom, TakeWeightCredit, + AllowTopLevelPaidExecutionFrom, +}; + +use crate::{ + ParachainInfo, PolkadotXcm, + runtime_common::config::xcm::{DenyThenTry, DenyTransact, DenyExchangeWithUnknownLocation}, +}; + +match_types! { + pub type ParentOrSiblings: impl Contains = { + MultiLocation { parents: 1, interior: Here } | + MultiLocation { parents: 1, interior: X1(_) } + }; +} + +parameter_types! { + pub UniqueAllowedLocations: Vec = vec![ + // Self location + MultiLocation { + parents: 0, + interior: Here, + }, + // Parent location + MultiLocation { + parents: 1, + interior: Here, + }, + // Karura/Acala location + MultiLocation { + parents: 1, + interior: X1(Parachain(2000)), + }, + // Moonbeam location + MultiLocation { + parents: 1, + interior: X1(Parachain(2004)), + }, + // Self parachain address + MultiLocation { + parents: 1, + interior: X1(Parachain(ParachainInfo::get().into())), + }, + ]; +} + +pub type Barrier = DenyThenTry< + ( + DenyTransact, + DenyExchangeWithUnknownLocation, + ), + ( + TakeWeightCredit, + AllowTopLevelPaidExecutionFrom, + // Expected responses are OK. + AllowKnownQueryResponses, + // Subscriptions for version tracking are OK. + AllowSubscriptionsFrom, + ), +>; diff --git a/tests/.eslintrc.json b/tests/.eslintrc.json index b7177976ae..eb6fabc601 100644 --- a/tests/.eslintrc.json +++ b/tests/.eslintrc.json @@ -13,7 +13,8 @@ "sourceType": "module" }, "plugins": [ - "@typescript-eslint" + "@typescript-eslint", + "mocha" ], "rules": { "indent": [ @@ -42,6 +43,10 @@ "avoidEscape": true } ], + "require-await": 2, + "mocha/no-async-describe": "error", + "mocha/no-nested-tests": "error", + "mocha/no-synchronous-tests": "error", "semi": [ "error", "always" @@ -55,7 +60,13 @@ "@typescript-eslint/no-empty-function": "off", "@typescript-eslint/no-non-null-assertion": "off", "@typescript-eslint/no-explicit-any": "off", - "@typescript-eslint/no-unused-vars": "warn", + "@typescript-eslint/no-unused-vars": [ + "warn", + { + "varsIgnorePattern": "_.+", + "argsIgnorePattern": "_.+" + } + ], "no-async-promise-executor": "warn", "@typescript-eslint/no-empty-interface": "off", "prefer-const": [ diff --git a/tests/CHANGELOG.md b/tests/CHANGELOG.md new file mode 100644 index 0000000000..650629cdec --- /dev/null +++ b/tests/CHANGELOG.md @@ -0,0 +1,16 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +## 2022-08-12 + +### Added + +- In integration tests for `RFT` added check of work with the maximum allowable number of pieces (MAX_REFUNGIBLE_PIECES). + +## 2022-07-14 + +### Added + +- Integrintegration tests of RPC method `token_owners`. +- Integrintegration tests of Fungible Pallet. diff --git a/tests/README.md b/tests/README.md index 3ecf35c785..2adf063f4e 100644 --- a/tests/README.md +++ b/tests/README.md @@ -5,7 +5,7 @@ 1. Checkout polkadot in sibling folder with this project ```bash git clone https://github.com/paritytech/polkadot.git && cd polkadot -git checkout release-v0.9.24 +git checkout release-v0.9.27 ``` 2. Build with nightly-2022-05-11 diff --git a/tests/package.json b/tests/package.json index a8bb72be05..df0c555348 100644 --- a/tests/package.json +++ b/tests/package.json @@ -4,49 +4,57 @@ "description": "Unique Chain Tests", "main": "", "devDependencies": { - "@polkadot/ts": "0.4.22", - "@polkadot/typegen": "8.7.2-15", - "@types/chai": "^4.3.1", + "@polkadot/typegen": "9.5.2", + "@types/chai": "^4.3.3", "@types/chai-as-promised": "^7.1.5", - "@types/mocha": "^9.1.1", - "@types/node": "^17.0.35", - "@typescript-eslint/eslint-plugin": "^5.26.0", - "@typescript-eslint/parser": "^5.26.0", + "@types/chai-like": "^1.1.1", + "@types/mocha": "^10.0.0", + "@types/node": "^18.11.2", + "@typescript-eslint/eslint-plugin": "^5.40.1", + "@typescript-eslint/parser": "^5.40.1", "chai": "^4.3.6", - "eslint": "^8.16.0", - "mocha": "^10.0.0", - "ts-node": "^10.8.0", - "typescript": "^4.7.2" + "eslint": "^8.25.0", + "eslint-plugin-mocha": "^10.1.0", + "mocha": "^10.1.0", + "ts-node": "^10.9.1", + "typescript": "^4.8.4" }, "mocha": { "timeout": 9999999, - "require": "ts-node/register" + "require": [ + "ts-node/register" + ] }, "scripts": { "lint": "eslint --ext .ts,.js src/", "fix": "eslint --ext .ts,.js src/ --fix", - "test": "mocha --timeout 9999999 -r ts-node/register './src/**/*.test.ts'", - "testEth": "mocha --timeout 9999999 -r ts-node/register './**/eth/**/*.test.ts'", - "testEthMarketplace": "mocha --timeout 9999999 -r ts-node/register './**/eth/marketplace/**/*.test.ts'", - "testEthNesting": "mocha --timeout 9999999 -r ts-node/register './**/eth/nesting/**/*.test.ts'", - "load": "mocha --timeout 9999999 -r ts-node/register './**/*.load.ts'", - "loadTransfer": "ts-node src/transfer.nload.ts", - "testCollision": "mocha --timeout 9999999 -r ts-node/register ./src/collision-tests/*.test.ts", - "testEvent": "mocha --timeout 9999999 -r ts-node/register ./src/check-event/*.test.ts", + "setup": "ts-node ./src/util/globalSetup.ts", + "test": "yarn setup && mocha --timeout 9999999 -r ts-node/register './src/**/*.*test.ts'", + "testParallelFull": "yarn testParallel && yarn testSequential", + "testParallel": "yarn setup && mocha --parallel --timeout 9999999 -r ts-node/register './src/**/*.test.ts'", + "testSequential": "yarn setup && mocha --timeout 9999999 -r ts-node/register './src/**/*.seqtest.ts'", + "testStructure": "yarn setup && mocha --timeout 9999999 -r ts-node/register ./**/nesting/*.*test.ts", + "testEth": "yarn setup && mocha --timeout 9999999 -r ts-node/register './**/eth/**/*.*test.ts'", + "testEthNesting": "yarn setup && mocha --timeout 9999999 -r ts-node/register './**/eth/nesting/**/*.*test.ts'", + "testEthFractionalizer": "yarn setup && mocha --timeout 9999999 -r ts-node/register './**/eth/fractionalizer/**/*.*test.ts'", + "testEthMarketplace": "yarn setup && mocha --timeout 9999999 -r ts-node/register './**/eth/marketplace/**/*.*test.ts'", + "testEvent": "yarn setup && mocha --timeout 9999999 -r ts-node/register ./src/check-event/*.*test.ts", + "testRmrk": "yarn setup && mocha --timeout 9999999 -r ts-node/register ./**/rmrk/*.*test.ts", + "testEthPayable": "mocha --timeout 9999999 -r ts-node/register './**/eth/payable.test.ts'", + "testEthTokenProperties": "mocha --timeout 9999999 -r ts-node/register ./**/eth/tokenProperties.test.ts", + "testEvmCoder": "mocha --timeout 9999999 -r ts-node/register './**/eth/evmCoder.test.ts'", "testNesting": "mocha --timeout 9999999 -r ts-node/register ./**/nest.test.ts", "testUnnesting": "mocha --timeout 9999999 -r ts-node/register ./**/unnest.test.ts", - "testStructure": "mocha --timeout 9999999 -r ts-node/register ./**/nesting/**.test.ts", - "testProperties": "mocha --timeout 9999999 -r ts-node/register ./**/properties.test.ts", + "testProperties": "mocha --timeout 9999999 -r ts-node/register ./**/properties.test.ts ./**/getPropertiesRpc.test.ts", "testMigration": "mocha --timeout 9999999 -r ts-node/register ./**/nesting/migration-check.test.ts", - "testRmrk": "mocha --timeout 9999999 -r ts-node/register ./**/rmrk/**.test.ts", "testAddCollectionAdmin": "mocha --timeout 9999999 -r ts-node/register ./**/addCollectionAdmin.test.ts", - "testSetSchemaVersion": "mocha --timeout 9999999 -r ts-node/register ./**/setSchemaVersion.test.ts", "testSetCollectionLimits": "mocha --timeout 9999999 -r ts-node/register ./**/setCollectionLimits.test.ts", + "testChangeCollectionOwner": "mocha --timeout 9999999 -r ts-node/register ./**/change-collection-owner.test.ts", "testSetCollectionSponsor": "mocha --timeout 9999999 -r ts-node/register ./**/setCollectionSponsor.test.ts", - "testConfirmSponsorship": "mocha --timeout 9999999 -r ts-node/register ./**/confirmSponsorship.test.ts", + "testConfirmSponsorship": "mocha --timeout 9999999 --parallel -r ts-node/register ./**/confirmSponsorship.test.ts", "testRemoveCollectionAdmin": "mocha --timeout 9999999 -r ts-node/register ./**/removeCollectionAdmin.test.ts", "testRemoveCollectionSponsor": "mocha --timeout 9999999 -r ts-node/register ./**/removeCollectionSponsor.test.ts", - "testRemoveFromAllowList": "mocha --timeout 9999999 -r ts-node/register ./**/removeFromAllowList.test.ts", + "testAllowLists": "mocha --timeout 9999999 -r ts-node/register ./**/allowLists.test.ts", "testConnection": "mocha --timeout 9999999 -r ts-node/register ./**/connection.test.ts", "testContracts": "mocha --timeout 9999999 -r ts-node/register ./**/contracts.test.ts", "testCreateItem": "mocha --timeout 9999999 -r ts-node/register ./**/createItem.test.ts", @@ -61,26 +69,40 @@ "testTransfer": "mocha --timeout 9999999 -r ts-node/register ./**/transfer.test.ts", "testBurnItem": "mocha --timeout 9999999 -r ts-node/register ./**/burnItem.test.ts", "testAdminTransferAndBurn": "mocha --timeout 9999999 -r ts-node/register ./**/adminTransferAndBurn.test.ts", - "testSetMintPermission": "mocha --timeout 9999999 -r ts-node/register ./**/setMintPermission.test.ts", - "testCreditFeesToTreasury": "mocha --timeout 9999999 -r ts-node/register ./**/creditFeesToTreasury.test.ts", - "testContractSponsoring": "mocha --timeout 9999999 -r ts-node/register ./**/contractSponsoring.test.ts", + "testSetPermissions": "mocha --timeout 9999999 -r ts-node/register ./**/setPermissions.test.ts", + "testCreditFeesToTreasury": "mocha --timeout 9999999 -r ts-node/register ./**/creditFeesToTreasury.seqtest.ts", + "testContractSponsoring": "mocha --timeout 9999999 -r ts-node/register ./**/eth/contractSponsoring.test.ts", "testEnableContractSponsoring": "mocha --timeout 9999999 -r ts-node/register ./**/enableContractSponsoring.test.ts", "testRemoveFromContractAllowList": "mocha --timeout 9999999 -r ts-node/register ./**/removeFromContractAllowList.test.ts", "testSetContractSponsoringRateLimit": "mocha --timeout 9999999 -r ts-node/register ./**/setContractSponsoringRateLimit.test.ts", "testSetOffchainSchema": "mocha --timeout 9999999 -r ts-node/register ./**/setOffchainSchema.test.ts", + "testNextSponsoring": "mocha --timeout 9999999 -r ts-node/register ./**/nextSponsoring.test.ts", "testOverflow": "mocha --timeout 9999999 -r ts-node/register ./**/overflow.test.ts", - "testSetVariableMetadataSponsoringRateLimit": "mocha --timeout 9999999 -r ts-node/register ./**/setVariableMetadataSponsoringRateLimit.test.ts", - "testInflation": "mocha --timeout 9999999 -r ts-node/register ./**/inflation.test.ts", - "testScheduler": "mocha --timeout 9999999 -r ts-node/register ./**/scheduler.test.ts", + "testMaintenance": "mocha --timeout 9999999 -r ts-node/register ./**/maintenanceMode.seqtest.ts", + "testInflation": "mocha --timeout 9999999 -r ts-node/register ./**/inflation.seqtest.ts", + "testScheduler": "mocha --timeout 9999999 -r ts-node/register ./**/scheduler.seqtest.ts", "testSchedulingEVM": "mocha --timeout 9999999 -r ts-node/register ./**/eth/scheduling.test.ts", - "testXcmTransfer": "mocha --timeout 9999999 -r ts-node/register ./**/xcmTransfer.test.ts", - "testMaintenance": "mocha --timeout 9999999 -r ts-node/register ./**/maintenanceMode.test.ts", "testPalletPresence": "mocha --timeout 9999999 -r ts-node/register ./**/pallet-presence.test.ts", "testBlockProduction": "mocha --timeout 9999999 -r ts-node/register ./**/block-production.test.ts", "testEnableDisableTransfers": "mocha --timeout 9999999 -r ts-node/register ./**/enableDisableTransfer.test.ts", "testLimits": "mocha --timeout 9999999 -r ts-node/register ./**/limits.test.ts", - "testEthCreateCollection": "mocha --timeout 9999999 -r ts-node/register ./**/eth/createCollection.test.ts", + "testEthCreateNFTCollection": "mocha --timeout 9999999 -r ts-node/register ./**/eth/createNFTCollection.test.ts", + "testEthCreateRFTCollection": "mocha --timeout 9999999 -r ts-node/register ./**/eth/createRFTCollection.test.ts", + "testEthNFT": "mocha --timeout 9999999 -r ts-node/register ./**/eth/nonFungible.test.ts", "testRFT": "mocha --timeout 9999999 -r ts-node/register ./**/refungible.test.ts", + "testEthRFT": "mocha --timeout 9999999 -r ts-node/register ./**/eth/reFungible.test.ts ./**/eth/reFungibleToken.test.ts", + "testFT": "mocha --timeout 9999999 -r ts-node/register ./**/fungible.test.ts", + "testEthFT": "mocha --timeout 9999999 -r ts-node/register ./**/eth/fungible.test.ts", + "testRPC": "mocha --timeout 9999999 -r ts-node/register ./**/rpc.test.ts", + "testPromotion": "yarn setup && mocha --timeout 9999999 -r ts-node/register ./**/app-promotion.*test.ts", + "testXcmUnique": "RUN_XCM_TESTS=1 mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmUnique.test.ts", + "testXcmQuartz": "RUN_XCM_TESTS=1 mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmQuartz.test.ts", + "testXcmOpal": "RUN_XCM_TESTS=1 mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmOpal.test.ts", + "testXcmTransferAcala": "mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmTransferAcala.test.ts acalaId=2000 uniqueId=5000", + "testXcmTransferStatemine": "mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmTransferStatemine.test.ts statemineId=1000 uniqueId=5000", + "testXcmTransferMoonbeam": "mocha --timeout 9999999 -r ts-node/register ./**/xcm/xcmTransferMoonbeam.test.ts", + "load": "mocha --timeout 9999999 -r ts-node/register './**/*.load.ts'", + "loadTransfer": "ts-node src/transfer.nload.ts", "polkadot-types-fetch-metadata": "curl -H 'Content-Type: application/json' -d '{\"id\":\"1\", \"jsonrpc\":\"2.0\", \"method\": \"state_getMetadata\", \"params\":[]}' http://localhost:9933 > src/interfaces/metadata.json", "polkadot-types-from-defs": "ts-node ./node_modules/.bin/polkadot-types-from-defs --endpoint src/interfaces/metadata.json --input src/interfaces/ --package .", "polkadot-types-from-chain": "ts-node ./node_modules/.bin/polkadot-types-from-chain --endpoint src/interfaces/metadata.json --output src/interfaces/ --package .", @@ -90,27 +112,12 @@ "license": "SEE LICENSE IN ../LICENSE", "homepage": "", "dependencies": { - "@polkadot/api": "8.7.2-15", - "@polkadot/api-contract": "8.7.2-15", - "@polkadot/util-crypto": "9.4.1", - "bignumber.js": "^9.0.2", + "@polkadot/api": "9.5.2", + "@polkadot/util-crypto": "10.1.11", "chai-as-promised": "^7.1.1", + "chai-like": "^1.1.1", "find-process": "^1.4.7", - "solc": "0.8.14-fixed", - "web3": "^1.7.3" - }, - "standard": { - "globals": [ - "it", - "assert", - "beforeEach", - "afterEach", - "describe", - "contract", - "artifacts" - ] - }, - "resolutions": { - "simple-get": "^4.0.1" + "solc": "0.8.17", + "web3": "^1.8.0" } } diff --git a/tests/src/addToContractAllowList.test.ts b/tests/src/.outdated/addToContractAllowList.test.ts similarity index 95% rename from tests/src/addToContractAllowList.test.ts rename to tests/src/.outdated/addToContractAllowList.test.ts index 307b7b2f08..e5cd09da81 100644 --- a/tests/src/addToContractAllowList.test.ts +++ b/tests/src/.outdated/addToContractAllowList.test.ts @@ -16,17 +16,14 @@ import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - deployFlipper, -} from './util/contracthelpers'; -import { - getGenericResult, -} from './util/helpers'; +import usingApi, {submitTransactionAsync, submitTransactionExpectFailAsync} from '../substrate/substrate-api'; +import {deployFlipper} from '../deprecated-helpers/contracthelpers'; +import {getGenericResult} from '../deprecated-helpers/helpers'; chai.use(chaiAsPromised); const expect = chai.expect; +// todo:playgrounds skipped ~ postponed describe.skip('Integration Test addToContractAllowList', () => { it('Add an address to a contract allow list', async () => { diff --git a/tests/src/balance-transfer-contract/calls.wasm b/tests/src/.outdated/balance-transfer-contract/calls.wasm similarity index 100% rename from tests/src/balance-transfer-contract/calls.wasm rename to tests/src/.outdated/balance-transfer-contract/calls.wasm diff --git a/tests/src/balance-transfer-contract/metadata.json b/tests/src/.outdated/balance-transfer-contract/metadata.json similarity index 100% rename from tests/src/balance-transfer-contract/metadata.json rename to tests/src/.outdated/balance-transfer-contract/metadata.json diff --git a/tests/src/collision-tests/admVsOwnerChanges.test.ts b/tests/src/.outdated/collision-tests/admVsOwnerChanges.test.ts similarity index 100% rename from tests/src/collision-tests/admVsOwnerChanges.test.ts rename to tests/src/.outdated/collision-tests/admVsOwnerChanges.test.ts diff --git a/tests/src/collision-tests/admVsOwnerData.test.ts b/tests/src/.outdated/collision-tests/admVsOwnerData.test.ts similarity index 100% rename from tests/src/collision-tests/admVsOwnerData.test.ts rename to tests/src/.outdated/collision-tests/admVsOwnerData.test.ts diff --git a/tests/src/collision-tests/admVsOwnerTake.test.ts b/tests/src/.outdated/collision-tests/admVsOwnerTake.test.ts similarity index 100% rename from tests/src/collision-tests/admVsOwnerTake.test.ts rename to tests/src/.outdated/collision-tests/admVsOwnerTake.test.ts diff --git a/tests/src/collision-tests/adminDestroyCollection.test.ts b/tests/src/.outdated/collision-tests/adminDestroyCollection.test.ts similarity index 100% rename from tests/src/collision-tests/adminDestroyCollection.test.ts rename to tests/src/.outdated/collision-tests/adminDestroyCollection.test.ts diff --git a/tests/src/collision-tests/adminLimitsOff.test.ts b/tests/src/.outdated/collision-tests/adminLimitsOff.test.ts similarity index 100% rename from tests/src/collision-tests/adminLimitsOff.test.ts rename to tests/src/.outdated/collision-tests/adminLimitsOff.test.ts diff --git a/tests/src/collision-tests/adminRightsOff.test.ts b/tests/src/.outdated/collision-tests/adminRightsOff.test.ts similarity index 100% rename from tests/src/collision-tests/adminRightsOff.test.ts rename to tests/src/.outdated/collision-tests/adminRightsOff.test.ts diff --git a/tests/src/collision-tests/setSponsorNewOwner.test.ts b/tests/src/.outdated/collision-tests/setSponsorNewOwner.test.ts similarity index 100% rename from tests/src/collision-tests/setSponsorNewOwner.test.ts rename to tests/src/.outdated/collision-tests/setSponsorNewOwner.test.ts diff --git a/tests/src/collision-tests/sponsorPayments.test.ts b/tests/src/.outdated/collision-tests/sponsorPayments.test.ts similarity index 100% rename from tests/src/collision-tests/sponsorPayments.test.ts rename to tests/src/.outdated/collision-tests/sponsorPayments.test.ts diff --git a/tests/src/collision-tests/tokenLimitsOff.test.ts b/tests/src/.outdated/collision-tests/tokenLimitsOff.test.ts similarity index 100% rename from tests/src/collision-tests/tokenLimitsOff.test.ts rename to tests/src/.outdated/collision-tests/tokenLimitsOff.test.ts diff --git a/tests/src/collision-tests/turnsOffMinting.test.ts b/tests/src/.outdated/collision-tests/turnsOffMinting.test.ts similarity index 100% rename from tests/src/collision-tests/turnsOffMinting.test.ts rename to tests/src/.outdated/collision-tests/turnsOffMinting.test.ts diff --git a/tests/src/contracts.test.ts b/tests/src/.outdated/contracts.test.ts similarity index 88% rename from tests/src/contracts.test.ts rename to tests/src/.outdated/contracts.test.ts index d3f398d327..4b77e8738a 100644 --- a/tests/src/contracts.test.ts +++ b/tests/src/.outdated/contracts.test.ts @@ -16,14 +16,10 @@ import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from './substrate/substrate-api'; +import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; import fs from 'fs'; import {Abi, ContractPromise as Contract} from '@polkadot/api-contract'; -import { - deployFlipper, - getFlipValue, - deployTransferContract, -} from './util/contracthelpers'; +import {deployFlipper, getFlipValue, deployTransferContract} from '../deprecated-helpers/contracthelpers'; import { addToAllowListExpectSuccess, @@ -37,7 +33,7 @@ import { isAllowlisted, transferFromExpectSuccess, getTokenOwner, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; chai.use(chaiAsPromised); @@ -47,6 +43,7 @@ const value = 0; const gasLimit = 9000n * 1000000n; const marketContractAddress = '5CYN9j3YvRkqxewoxeSvRbhAym4465C57uMmX5j4yz99L5H6'; +// todo:playgrounds skipped ~ postponed describe.skip('Contracts', () => { it('Can deploy smart contract Flipper, instantiate it and call it\'s get and flip messages.', async () => { await usingApi(async (api, privateKeyWrapper) => { @@ -54,7 +51,7 @@ describe.skip('Contracts', () => { const initialGetResponse = await getFlipValue(contract, deployer); const bob = privateKeyWrapper('//Bob'); - const flip = contract.tx.flip(value, gasLimit); + const flip = contract.tx.flip({value, gasLimit}); await submitTransactionAsync(bob, flip); const afterFlipGetResponse = await getFlipValue(contract, deployer); @@ -89,7 +86,7 @@ describe.skip('Chain extensions', () => { expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal(normalizeAccountId(alice.address)); // Transfer - const transferTx = contract.tx.transfer(value, gasLimit, bob.address, collectionId, tokenId, 1); + const transferTx = contract.tx.transfer({value, gasLimit}, bob.address, collectionId, tokenId, 1); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; @@ -110,7 +107,7 @@ describe.skip('Chain extensions', () => { await addToAllowListExpectSuccess(alice, collectionId, contract.address); await addToAllowListExpectSuccess(alice, collectionId, bob.address); - const transferTx = contract.tx.createItem(value, gasLimit, bob.address, collectionId, {Nft: {const_data: '0x010203'}}); + const transferTx = contract.tx.createItem({value, gasLimit}, bob.address, collectionId, {Nft: {const_data: '0x010203'}}); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; @@ -137,7 +134,7 @@ describe.skip('Chain extensions', () => { await addToAllowListExpectSuccess(alice, collectionId, contract.address); await addToAllowListExpectSuccess(alice, collectionId, bob.address); - const transferTx = contract.tx.createMultipleItems(value, gasLimit, bob.address, collectionId, [ + const transferTx = contract.tx.createMultipleItems({value, gasLimit}, bob.address, collectionId, [ {NFT: {/*const_data: '0x010203'*/}}, {NFT: {/*const_data: '0x010204'*/}}, {NFT: {/*const_data: '0x010205'*/}}, @@ -176,7 +173,7 @@ describe.skip('Chain extensions', () => { const [contract] = await deployTransferContract(api, privateKeyWrapper); const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', contract.address.toString()); - const transferTx = contract.tx.approve(value, gasLimit, bob.address, collectionId, tokenId, 1); + const transferTx = contract.tx.approve({value, gasLimit}, bob.address, collectionId, tokenId, 1); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; @@ -196,7 +193,7 @@ describe.skip('Chain extensions', () => { const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); await approveExpectSuccess(collectionId, tokenId, bob, contract.address.toString(), 1); - const transferTx = contract.tx.transferFrom(value, gasLimit, bob.address, charlie.address, collectionId, tokenId, 1); + const transferTx = contract.tx.transferFrom({value, gasLimit}, bob.address, charlie.address, collectionId, tokenId, 1); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; @@ -219,7 +216,7 @@ describe.skip('Chain extensions', () => { expect(await isAllowlisted(api, collectionId, bob.address)).to.be.false; { - const transferTx = contract.tx.toggleAllowList(value, gasLimit, collectionId, bob.address, true); + const transferTx = contract.tx.toggleAllowList({value, gasLimit}, collectionId, bob.address, true); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; @@ -227,7 +224,7 @@ describe.skip('Chain extensions', () => { expect(await isAllowlisted(api, collectionId, bob.address)).to.be.true; } { - const transferTx = contract.tx.toggleAllowList(value, gasLimit, collectionId, bob.address, false); + const transferTx = contract.tx.toggleAllowList({value, gasLimit}, collectionId, bob.address, false); const events = await submitTransactionAsync(alice, transferTx); const result = getGenericResult(events); expect(result.success).to.be.true; diff --git a/tests/src/enableContractSponsoring.test.ts b/tests/src/.outdated/enableContractSponsoring.test.ts similarity index 95% rename from tests/src/enableContractSponsoring.test.ts rename to tests/src/.outdated/enableContractSponsoring.test.ts index 46c3386dde..d8df57e80e 100644 --- a/tests/src/enableContractSponsoring.test.ts +++ b/tests/src/.outdated/enableContractSponsoring.test.ts @@ -17,18 +17,19 @@ import {IKeyringPair} from '@polkadot/types/types'; import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import usingApi from './substrate/substrate-api'; -import {deployFlipper, getFlipValue, toggleFlipValueExpectSuccess} from './util/contracthelpers'; +import usingApi from '../substrate/substrate-api'; +import {deployFlipper, getFlipValue, toggleFlipValueExpectSuccess} from '../deprecated-helpers/contracthelpers'; import { enableContractSponsoringExpectFailure, enableContractSponsoringExpectSuccess, findUnusedAddress, setContractSponsoringRateLimitExpectSuccess, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; chai.use(chaiAsPromised); const expect = chai.expect; +// todo:playgrounds skipped ~ postponed describe.skip('Integration Test enableContractSponsoring', () => { it('ensure tx fee is paid from endowment', async () => { await usingApi(async (api, privateKeyWrapper) => { diff --git a/tests/src/eth/scheduling.test.ts b/tests/src/.outdated/eth/scheduling.test.ts similarity index 84% rename from tests/src/eth/scheduling.test.ts rename to tests/src/.outdated/eth/scheduling.test.ts index 1fc0ff0f84..91ba16b6de 100644 --- a/tests/src/eth/scheduling.test.ts +++ b/tests/src/.outdated/eth/scheduling.test.ts @@ -15,10 +15,15 @@ // along with Unique Network. If not, see . import {expect} from 'chai'; -import {createEthAccountWithBalance, deployFlipper, GAS_ARGS, itWeb3, subToEth, transferBalanceToEth} from './util/helpers'; -import {scheduleExpectSuccess, waitNewBlocks} from '../util/helpers'; +import {createEthAccountWithBalance, deployFlipper, GAS_ARGS, itWeb3, subToEth, transferBalanceToEth} from '../../deprecated-helpers/eth/helpers'; +import {scheduleExpectSuccess, waitNewBlocks, requirePallets, Pallets} from '../../deprecated-helpers/helpers'; + +// TODO mrshiposha update this test in #581 +describe.skip('Scheduing EVM smart contracts', () => { + before(async function() { + await requirePallets(this, [Pallets.Scheduler]); + }); -describe('Scheduing EVM smart contracts', () => { itWeb3('Successfully schedules and periodically executes an EVM contract', async ({api, web3, privateKeyWrapper}) => { const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); const flipper = await deployFlipper(web3, deployer); @@ -51,4 +56,4 @@ describe('Scheduing EVM smart contracts', () => { expect(await flipper.methods.getValue().call()).to.be.equal(initialValue); } }); -}); \ No newline at end of file +}); diff --git a/tests/src/flipper/flipper.wasm b/tests/src/.outdated/flipper/flipper.wasm similarity index 100% rename from tests/src/flipper/flipper.wasm rename to tests/src/.outdated/flipper/flipper.wasm diff --git a/tests/src/flipper/metadata.json b/tests/src/.outdated/flipper/metadata.json similarity index 100% rename from tests/src/flipper/metadata.json rename to tests/src/.outdated/flipper/metadata.json diff --git a/tests/src/load_test_sc/loadtester.wasm b/tests/src/.outdated/load_test_sc/loadtester.wasm similarity index 100% rename from tests/src/load_test_sc/loadtester.wasm rename to tests/src/.outdated/load_test_sc/loadtester.wasm diff --git a/tests/src/load_test_sc/metadata.json b/tests/src/.outdated/load_test_sc/metadata.json similarity index 100% rename from tests/src/load_test_sc/metadata.json rename to tests/src/.outdated/load_test_sc/metadata.json diff --git a/tests/src/overflow.test.ts b/tests/src/.outdated/overflow.test.ts similarity index 95% rename from tests/src/overflow.test.ts rename to tests/src/.outdated/overflow.test.ts index 7147d6b114..a413b3bc14 100644 --- a/tests/src/overflow.test.ts +++ b/tests/src/.outdated/overflow.test.ts @@ -17,12 +17,13 @@ import {IKeyringPair} from '@polkadot/types/types'; import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import usingApi from './substrate/substrate-api'; -import {approveExpectSuccess, createCollectionExpectSuccess, createFungibleItemExpectSuccess, getAllowance, getBalance, transferExpectFailure, transferExpectSuccess, transferFromExpectFail, transferFromExpectSuccess, U128_MAX} from './util/helpers'; +import usingApi from '../substrate/substrate-api'; +import {approveExpectSuccess, createCollectionExpectSuccess, createFungibleItemExpectSuccess, getAllowance, getBalance, transferExpectFailure, transferExpectSuccess, transferFromExpectFail, transferFromExpectSuccess, U128_MAX} from '../deprecated-helpers/helpers'; chai.use(chaiAsPromised); const expect = chai.expect; +// todo:playgrounds skipped ~ postponed describe.skip('Integration Test fungible overflows', () => { let alice: IKeyringPair; let bob: IKeyringPair; diff --git a/tests/src/removeFromContractAllowList.test.ts b/tests/src/.outdated/removeFromContractAllowList.test.ts similarity index 95% rename from tests/src/removeFromContractAllowList.test.ts rename to tests/src/.outdated/removeFromContractAllowList.test.ts index adf7afe8e8..d23e668a83 100644 --- a/tests/src/removeFromContractAllowList.test.ts +++ b/tests/src/.outdated/removeFromContractAllowList.test.ts @@ -14,12 +14,13 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import usingApi from './substrate/substrate-api'; -import {deployFlipper, toggleFlipValueExpectFailure, toggleFlipValueExpectSuccess} from './util/contracthelpers'; -import {addToContractAllowListExpectSuccess, isAllowlistedInContract, removeFromContractAllowListExpectFailure, removeFromContractAllowListExpectSuccess, toggleContractAllowlistExpectSuccess} from './util/helpers'; +import usingApi from '../substrate/substrate-api'; +import {deployFlipper, toggleFlipValueExpectFailure, toggleFlipValueExpectSuccess} from '../deprecated-helpers/contracthelpers'; +import {addToContractAllowListExpectSuccess, isAllowlistedInContract, removeFromContractAllowListExpectFailure, removeFromContractAllowListExpectSuccess, toggleContractAllowlistExpectSuccess} from '../deprecated-helpers/helpers'; import {IKeyringPair} from '@polkadot/types/types'; import {expect} from 'chai'; +// todo:playgrounds skipped again describe.skip('Integration Test removeFromContractAllowList', () => { let bob: IKeyringPair; diff --git a/tests/src/scheduler.test.ts b/tests/src/.outdated/scheduler.test.ts similarity index 98% rename from tests/src/scheduler.test.ts rename to tests/src/.outdated/scheduler.test.ts index 429c906101..08a257cb48 100644 --- a/tests/src/scheduler.test.ts +++ b/tests/src/.outdated/scheduler.test.ts @@ -19,7 +19,7 @@ import chaiAsPromised from 'chai-as-promised'; import { default as usingApi, submitTransactionAsync, -} from './substrate/substrate-api'; +} from '../substrate/substrate-api'; import { createItemExpectSuccess, createCollectionExpectSuccess, @@ -39,11 +39,12 @@ import { getFreeBalance, confirmSponsorshipByKeyExpectSuccess, scheduleExpectFailure, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; import {IKeyringPair} from '@polkadot/types/types'; chai.use(chaiAsPromised); +// todo:playgrounds skipped ~ postponed describe.skip('Scheduling token and balance transfers', () => { let alice: IKeyringPair; let bob: IKeyringPair; diff --git a/tests/src/setChainLimits.test.ts b/tests/src/.outdated/setChainLimits.test.ts similarity index 94% rename from tests/src/setChainLimits.test.ts rename to tests/src/.outdated/setChainLimits.test.ts index 314b3a031b..54531d2296 100644 --- a/tests/src/setChainLimits.test.ts +++ b/tests/src/.outdated/setChainLimits.test.ts @@ -15,14 +15,15 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import usingApi from './substrate/substrate-api'; +import usingApi from '../substrate/substrate-api'; import { createCollectionExpectSuccess, addCollectionAdminExpectSuccess, setChainLimitsExpectFailure, IChainLimits, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; +// todo:playgrounds skipped ~ postponed describe.skip('Negative Integration Test setChainLimits', () => { let alice: IKeyringPair; let bob: IKeyringPair; diff --git a/tests/src/setContractSponsoringRateLimit.test.ts b/tests/src/.outdated/setContractSponsoringRateLimit.test.ts similarity index 92% rename from tests/src/setContractSponsoringRateLimit.test.ts rename to tests/src/.outdated/setContractSponsoringRateLimit.test.ts index 4f0bc7f410..48dcf907dd 100644 --- a/tests/src/setContractSponsoringRateLimit.test.ts +++ b/tests/src/.outdated/setContractSponsoringRateLimit.test.ts @@ -15,16 +15,17 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import usingApi from './substrate/substrate-api'; -import waitNewBlocks from './substrate/wait-new-blocks'; -import {deployFlipper, toggleFlipValueExpectFailure, toggleFlipValueExpectSuccess} from './util/contracthelpers'; +import usingApi from '../substrate/substrate-api'; +import waitNewBlocks from '../substrate/wait-new-blocks'; +import {deployFlipper, toggleFlipValueExpectFailure, toggleFlipValueExpectSuccess} from '../deprecated-helpers/contracthelpers'; import { enableContractSponsoringExpectSuccess, findUnusedAddress, setContractSponsoringRateLimitExpectFailure, setContractSponsoringRateLimitExpectSuccess, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; +// todo:playgrounds skipped~postponed test describe.skip('Integration Test setContractSponsoringRateLimit', () => { it('ensure sponsored contract can\'t be called twice without pause for free', async () => { await usingApi(async (api, privateKeyWrapper) => { diff --git a/tests/src/toggleContractAllowList.test.ts b/tests/src/.outdated/toggleContractAllowList.test.ts similarity index 92% rename from tests/src/toggleContractAllowList.test.ts rename to tests/src/.outdated/toggleContractAllowList.test.ts index af500467f4..34fd608bcb 100644 --- a/tests/src/toggleContractAllowList.test.ts +++ b/tests/src/.outdated/toggleContractAllowList.test.ts @@ -16,14 +16,14 @@ import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; +import usingApi, {submitTransactionAsync, submitTransactionExpectFailAsync} from '../substrate/substrate-api'; import { deployFlipper, getFlipValue, -} from './util/contracthelpers'; +} from '../deprecated-helpers/contracthelpers'; import { getGenericResult, -} from './util/helpers'; +} from '../deprecated-helpers/helpers'; chai.use(chaiAsPromised); const expect = chai.expect; @@ -31,6 +31,7 @@ const expect = chai.expect; const value = 0; const gasLimit = 3000n * 1000000n; +// todo:playgrounds skipped ~ postpone describe.skip('Integration Test toggleContractAllowList', () => { it('Enable allow list contract mode', async () => { @@ -55,14 +56,14 @@ describe.skip('Integration Test toggleContractAllowList', () => { const [contract, deployer] = await deployFlipper(api, privateKeyWrapper); let flipValueBefore = await getFlipValue(contract, deployer); - const flip = contract.tx.flip(value, gasLimit); + const flip = contract.tx.flip({value, gasLimit}); await submitTransactionAsync(bob, flip); const flipValueAfter = await getFlipValue(contract,deployer); expect(flipValueAfter).to.be.eq(!flipValueBefore, 'Anyone can call new contract.'); const deployerCanFlip = async () => { const flipValueBefore = await getFlipValue(contract, deployer); - const deployerFlip = contract.tx.flip(value, gasLimit); + const deployerFlip = contract.tx.flip({value, gasLimit}); await submitTransactionAsync(deployer, deployerFlip); const aliceFlip1Response = await getFlipValue(contract, deployer); expect(aliceFlip1Response).to.be.eq(!flipValueBefore, 'Deployer always can flip.'); @@ -72,7 +73,7 @@ describe.skip('Integration Test toggleContractAllowList', () => { flipValueBefore = await getFlipValue(contract, deployer); const enableAllowListTx = api.tx.unique.toggleContractAllowList(contract.address, true); await submitTransactionAsync(deployer, enableAllowListTx); - const flipWithEnabledAllowList = contract.tx.flip(value, gasLimit); + const flipWithEnabledAllowList = contract.tx.flip({value, gasLimit}); await expect(submitTransactionExpectFailAsync(bob, flipWithEnabledAllowList)).to.be.rejected; const flipValueAfterEnableAllowList = await getFlipValue(contract, deployer); expect(flipValueAfterEnableAllowList).to.be.eq(flipValueBefore, 'Enabling allowlist doesn\'t make it possible to call contract for everyone.'); @@ -82,7 +83,7 @@ describe.skip('Integration Test toggleContractAllowList', () => { flipValueBefore = await getFlipValue(contract, deployer); const addBobToAllowListTx = api.tx.unique.addToContractAllowList(contract.address, bob.address); await submitTransactionAsync(deployer, addBobToAllowListTx); - const flipWithAllowlistedBob = contract.tx.flip(value, gasLimit); + const flipWithAllowlistedBob = contract.tx.flip({value, gasLimit}); await submitTransactionAsync(bob, flipWithAllowlistedBob); const flipAfterAllowListed = await getFlipValue(contract,deployer); expect(flipAfterAllowListed).to.be.eq(!flipValueBefore, 'Bob was allowlisted, now he can flip.'); @@ -92,7 +93,7 @@ describe.skip('Integration Test toggleContractAllowList', () => { flipValueBefore = await getFlipValue(contract, deployer); const removeBobFromAllowListTx = api.tx.unique.removeFromContractAllowList(contract.address, bob.address); await submitTransactionAsync(deployer, removeBobFromAllowListTx); - const bobRemoved = contract.tx.flip(value, gasLimit); + const bobRemoved = contract.tx.flip({value, gasLimit}); await expect(submitTransactionExpectFailAsync(bob, bobRemoved)).to.be.rejected; const afterBobRemoved = await getFlipValue(contract, deployer); expect(afterBobRemoved).to.be.eq(flipValueBefore, 'Bob can\'t call contract, now when he is removeed from allow list.'); @@ -102,7 +103,7 @@ describe.skip('Integration Test toggleContractAllowList', () => { flipValueBefore = await getFlipValue(contract, deployer); const disableAllowListTx = api.tx.unique.toggleContractAllowList(contract.address, false); await submitTransactionAsync(deployer, disableAllowListTx); - const allowListDisabledFlip = contract.tx.flip(value, gasLimit); + const allowListDisabledFlip = contract.tx.flip({value, gasLimit}); await submitTransactionAsync(bob, allowListDisabledFlip); const afterAllowListDisabled = await getFlipValue(contract,deployer); expect(afterAllowListDisabled).to.be.eq(!flipValueBefore, 'Anyone can call contract with disabled allowlist.'); diff --git a/tests/src/transfer_contract/metadata.json b/tests/src/.outdated/transfer_contract/metadata.json similarity index 100% rename from tests/src/transfer_contract/metadata.json rename to tests/src/.outdated/transfer_contract/metadata.json diff --git a/tests/src/transfer_contract/nft_transfer.wasm b/tests/src/.outdated/transfer_contract/nft_transfer.wasm similarity index 100% rename from tests/src/transfer_contract/nft_transfer.wasm rename to tests/src/.outdated/transfer_contract/nft_transfer.wasm diff --git a/tests/src/xcmTransfer.test.ts b/tests/src/.outdated/xcmTransfer.test.ts similarity index 86% rename from tests/src/xcmTransfer.test.ts rename to tests/src/.outdated/xcmTransfer.test.ts index a1c6c336c8..37e6ddfc5b 100644 --- a/tests/src/xcmTransfer.test.ts +++ b/tests/src/.outdated/xcmTransfer.test.ts @@ -20,10 +20,10 @@ import chaiAsPromised from 'chai-as-promised'; import {WsProvider} from '@polkadot/api'; import {ApiOptions} from '@polkadot/api/types'; import {IKeyringPair} from '@polkadot/types/types'; -import usingApi, {submitTransactionAsync} from './substrate/substrate-api'; -import {getGenericResult} from './util/helpers'; -import waitNewBlocks from './substrate/wait-new-blocks'; -import getBalance from './substrate/get-balance'; +import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; +import {getGenericResult} from '../deprecated-helpers/helpers'; +import waitNewBlocks from '../substrate/wait-new-blocks'; +import getBalance from '../substrate/get-balance'; chai.use(chaiAsPromised); const expect = chai.expect; @@ -31,10 +31,12 @@ const expect = chai.expect; const UNIQUE_CHAIN = 1000; const KARURA_CHAIN = 2000; const KARURA_PORT = '9946'; +const TRANSFER_AMOUNT = 2000000000000000000000000n; +// todo:playgrounds refit when XCM drops describe.skip('Integration test: Exchanging QTZ with Karura', () => { let alice: IKeyringPair; - + before(async () => { await usingApi(async (api, privateKeyWrapper) => { alice = privateKeyWrapper('//Alice'); @@ -74,9 +76,9 @@ describe.skip('Integration test: Exchanging QTZ with Karura', () => { it('Should connect and send QTZ to Karura', async () => { let balanceOnKaruraBefore: bigint; - + await usingApi(async (api) => { - const {free} = (await api.query.tokens.accounts(alice.addressRaw, {ForeignAsset: 0})).toJSON() as any; + const {free} = (await api.query.tokens.accounts(alice.addressRaw, {ForeignAssetId: 0})).toJSON() as any; balanceOnKaruraBefore = free; }, {provider: new WsProvider('ws://127.0.0.1:' + KARURA_PORT)}); @@ -113,7 +115,7 @@ describe.skip('Integration test: Exchanging QTZ with Karura', () => { }, }, fun: { - Fungible: 5000000000, + Fungible: TRANSFER_AMOUNT, }, }, ], @@ -134,41 +136,39 @@ describe.skip('Integration test: Exchanging QTZ with Karura', () => { await usingApi(async (api) => { // todo do something about instant sealing, where there might not be any new blocks await waitNewBlocks(api, 3); - const {free} = (await api.query.tokens.accounts(alice.addressRaw, {ForeignAsset: 0})).toJSON() as any; + const {free} = (await api.query.tokens.accounts(alice.addressRaw, {ForeignAssetId: 0})).toJSON() as any; expect(free > balanceOnKaruraBefore).to.be.true; }, {provider: new WsProvider('ws://127.0.0.1:' + KARURA_PORT)}); }); it('Should connect to Karura and send QTZ back', async () => { let balanceBefore: bigint; - + await usingApi(async (api) => { [balanceBefore] = await getBalance(api, [alice.address]); }); await usingApi(async (api) => { const destination = { - V0: { - X3: [ - 'Parent', - { - Parachain: UNIQUE_CHAIN, - }, - { - AccountId32: { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: UNIQUE_CHAIN}, + {AccountId32: { network: 'Any', id: alice.addressRaw, - }, - }, - ], + }}, + ], + }, }, }; const id = { - ForeignAsset: 0, + ForeignAssetId: 0, }; - const amount = 5000000000; + const amount = TRANSFER_AMOUNT; const destWeight = 50000000; const tx = api.tx.xTokens.transfer(id, amount, destination, destWeight); diff --git a/tests/src/addCollectionAdmin.test.ts b/tests/src/addCollectionAdmin.test.ts index 100c572451..7d22084f1c 100644 --- a/tests/src/addCollectionAdmin.test.ts +++ b/tests/src/addCollectionAdmin.test.ts @@ -14,136 +14,112 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import {addCollectionAdminExpectSuccess, createCollectionExpectSuccess, destroyCollectionExpectSuccess, getAdminList, normalizeAccountId, queryCollectionExpectSuccess} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; describe('Integration Test addCollectionAdmin(collection_id, new_admin_id):', () => { - it('Add collection admin.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + let donor: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.equal(alice.address); + itSub('Add collection admin.', async ({helper}) => { + const [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); - const changeAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, changeAdminTx); + const collection = await helper.collection.getData(collectionId); + expect(collection!.normalizedOwner!).to.be.equal(helper.address.normalizeSubstrate(alice.address)); - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); - }); + await helper.nft.addAdmin(alice, collectionId, {Substrate: bob.address}); + + const adminListAfterAddAdmin = await helper.collection.getAdmins(collectionId); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: bob.address}); }); }); describe('Negative Integration Test addCollectionAdmin(collection_id, new_admin_id):', () => { - it("Not owner can't add collection admin.", async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//CHARLIE'); - - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.equal(alice.address); - - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.not.deep.contains(normalizeAccountId(bob.address)); - - const changeAdminTxCharlie = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(charlie.address)); - await expect(submitTransactionAsync(bob, changeAdminTxCharlie)).to.be.rejected; - - const adminListAfterAddNewAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddNewAdmin).to.be.not.deep.contains(normalizeAccountId(bob.address)); - expect(adminListAfterAddNewAdmin).to.be.not.deep.contains(normalizeAccountId(charlie.address)); + let donor: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); }); }); - it("Admin can't add collection admin.", async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//CHARLIE'); + itSub("Not owner can't add collection admin.", async ({helper}) => { + const [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.equal(alice.address); + const collection = await helper.collection.getData(collectionId); + expect(collection?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(alice.address)); - const changeAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, changeAdminTx); + const changeAdminTxBob = async () => helper.collection.addAdmin(bob, collectionId, {Substrate: bob.address}); + const changeAdminTxCharlie = async () => helper.collection.addAdmin(bob, collectionId, {Substrate: charlie.address}); + await expect(changeAdminTxCharlie()).to.be.rejectedWith(/common\.NoPermission/); + await expect(changeAdminTxBob()).to.be.rejectedWith(/common\.NoPermission/); - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); + const adminListAfterAddAdmin = await helper.collection.getAdmins(collectionId); + expect(adminListAfterAddAdmin).to.be.not.deep.contains({Substrate: charlie.address}); + expect(adminListAfterAddAdmin).to.be.not.deep.contains({Substrate: bob.address}); + }); - const changeAdminTxCharlie = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(charlie.address)); - await expect(submitTransactionAsync(bob, changeAdminTxCharlie)).to.be.rejected; - - const adminListAfterAddNewAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddNewAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); - expect(adminListAfterAddNewAdmin).to.be.not.deep.contains(normalizeAccountId(charlie.address)); - }); + itSub("Admin can't add collection admin.", async ({helper}) => { + const [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); + const collection = await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); + + await collection.addAdmin(alice, {Substrate: bob.address}); + + const adminListAfterAddAdmin = await collection.getAdmins(); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: bob.address}); + + const changeAdminTxCharlie = async () => collection.addAdmin(bob, {Substrate: charlie.address}); + await expect(changeAdminTxCharlie()).to.be.rejectedWith(/common\.NoPermission/); + + const adminListAfterAddNewAdmin = await collection.getAdmins(); + expect(adminListAfterAddNewAdmin).to.be.deep.contains({Substrate: bob.address}); + expect(adminListAfterAddNewAdmin).to.be.not.deep.contains({Substrate: charlie.address}); }); - it("Can't add collection admin of not existing collection.", async () => { - await usingApi(async (api, privateKeyWrapper) => { - // tslint:disable-next-line: no-bitwise - const collectionId = (1 << 32) - 1; - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + itSub("Can't add collection admin of not existing collection.", async ({helper}) => { + const [alice, bob] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); + const collectionId = (1 << 32) - 1; - const changeOwnerTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, changeOwnerTx)).to.be.rejected; + const addAdminTx = async () => helper.collection.addAdmin(alice, collectionId, {Substrate: bob.address}); + await expect(addAdminTx()).to.be.rejectedWith(/common\.CollectionNotFound/); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); + // Verifying that nothing bad happened (network is live, new collections can be created, etc.) + await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); }); - it("Can't add an admin to a destroyed collection.", async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - await destroyCollectionExpectSuccess(collectionId); - const changeOwnerTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, changeOwnerTx)).to.be.rejected; - - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); + itSub("Can't add an admin to a destroyed collection.", async ({helper}) => { + const [alice, bob] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); + const collection = await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); + + await collection.burn(alice); + const addAdminTx = async () => collection.addAdmin(alice, {Substrate: bob.address}); + await expect(addAdminTx()).to.be.rejectedWith(/common\.CollectionNotFound/); + + // Verifying that nothing bad happened (network is live, new collections can be created, etc.) + await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); }); - it('Add an admin to a collection that has reached the maximum number of admins limit', async () => { - await usingApi(async (api: ApiPromise, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const accounts = [ - privateKeyWrapper('//AdminTest/1').address, - privateKeyWrapper('//AdminTest/2').address, - privateKeyWrapper('//AdminTest/3').address, - privateKeyWrapper('//AdminTest/4').address, - privateKeyWrapper('//AdminTest/5').address, - privateKeyWrapper('//AdminTest/6').address, - privateKeyWrapper('//AdminTest/7').address, - ]; - const collectionId = await createCollectionExpectSuccess(); - - const chainAdminLimit = (api.consts.common.collectionAdminsLimit as any).toNumber(); - expect(chainAdminLimit).to.be.equal(5); - - for (let i = 0; i < chainAdminLimit; i++) { - await addCollectionAdminExpectSuccess(alice, collectionId, accounts[i]); - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(accounts[i])); - } - - const tx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(accounts[chainAdminLimit])); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); + itSub('Add an admin to a collection that has reached the maximum number of admins limit', async ({helper}) => { + const [alice, ...accounts] = await helper.arrange.createAccounts([10n, 0n, 0n, 0n, 0n, 0n, 0n, 0n], donor); + const collection = await helper.nft.mintCollection(alice, {name: 'Collection Name', description: 'Collection Description', tokenPrefix: 'COL'}); + + const chainAdminLimit = (helper.getApi().consts.common.collectionAdminsLimit as any).toNumber(); + expect(chainAdminLimit).to.be.equal(5); + + for (let i = 0; i < chainAdminLimit; i++) { + await collection.addAdmin(alice, {Substrate: accounts[i].address}); + const adminListAfterAddAdmin = await collection.getAdmins(); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: accounts[i].address}); + } + + const addExtraAdminTx = async () => collection.addAdmin(alice, {Substrate: accounts[chainAdminLimit].address}); + await expect(addExtraAdminTx()).to.be.rejectedWith(/common\.CollectionAdminCountExceeded/); }); }); diff --git a/tests/src/addToAllowList.test.ts b/tests/src/addToAllowList.test.ts deleted file mode 100644 index 98cc22eb1a..0000000000 --- a/tests/src/addToAllowList.test.ts +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - destroyCollectionExpectSuccess, - enablePublicMintingExpectSuccess, - enableAllowListExpectSuccess, - normalizeAccountId, - addCollectionAdminExpectSuccess, - addToAllowListExpectFail, - getCreatedCollectionCount, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; - -describe('Integration Test ext. addToAllowList()', () => { - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Execute the extrinsic with parameters: Collection ID and address to add to the allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - }); - - it('Allowlisted minting: list restrictions', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await enableAllowListExpectSuccess(alice, collectionId); - await enablePublicMintingExpectSuccess(alice, collectionId); - await createItemExpectSuccess(bob, collectionId, 'NFT', bob.address); - }); -}); - -describe('Negative Integration Test ext. addToAllowList()', () => { - - it('Allow list an address in the collection that does not exist', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // tslint:disable-next-line: no-bitwise - const collectionId = await getCreatedCollectionCount(api) + 1; - const bob = privateKeyWrapper('//Bob'); - - const tx = api.tx.unique.addToAllowList(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); - }); - - it('Allow list an address in the collection that was destroyed', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - // tslint:disable-next-line: no-bitwise - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); - const tx = api.tx.unique.addToAllowList(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); - }); - - it('Allow list an address in the collection that does not have allow list access enabled', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const ferdie = privateKeyWrapper('//Ferdie'); - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await enablePublicMintingExpectSuccess(alice, collectionId); - const tx = api.tx.unique.createItem(collectionId, normalizeAccountId(ferdie.address), 'NFT'); - await expect(submitTransactionExpectFailAsync(ferdie, tx)).to.be.rejected; - }); - }); - -}); - -describe('Integration Test ext. addToAllowList() with collection admin permissions:', () => { - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - }); - }); - - it('Negative. Add to the allow list by regular user', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectFail(bob, collectionId, charlie.address); - }); - - it('Execute the extrinsic with parameters: Collection ID and address to add to the allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await addToAllowListExpectSuccess(bob, collectionId, charlie.address); - }); - - it('Allowlisted minting: list restrictions', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await addToAllowListExpectSuccess(bob, collectionId, charlie.address); - - // allowed only for collection owner - await enableAllowListExpectSuccess(alice, collectionId); - await enablePublicMintingExpectSuccess(alice, collectionId); - - await createItemExpectSuccess(charlie, collectionId, 'NFT', charlie.address); - }); -}); diff --git a/tests/src/adminTransferAndBurn.test.ts b/tests/src/adminTransferAndBurn.test.ts index 73fdfda8cc..05c6f89b1e 100644 --- a/tests/src/adminTransferAndBurn.test.ts +++ b/tests/src/adminTransferAndBurn.test.ts @@ -15,21 +15,7 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - createItemExpectSuccess, - transferExpectFailure, - transferFromExpectSuccess, - burnItemExpectFailure, - burnFromExpectSuccess, - setCollectionLimitsExpectSuccess, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {usingPlaygrounds, expect, itSub} from './util'; describe('Integration Test: ownerCanTransfer allows admins to use only transferFrom/burnFrom:', () => { let alice: IKeyringPair; @@ -37,36 +23,41 @@ describe('Integration Test: ownerCanTransfer allows admins to use only transferF let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); }); - it('admin transfers other user\'s token', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', {Substrate: bob.address}); + itSub('admin transfers other user\'s token', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: true}); + const limits = await helper.collection.getEffectiveLimits(collectionId); + expect(limits.ownerCanTransfer).to.be.true; - await transferExpectFailure(collectionId, tokenId, alice, charlie); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + const transferResult = async () => helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + await expect(transferResult()).to.be.rejected; - await transferFromExpectSuccess(collectionId, tokenId, alice, bob, charlie, 1); - }); + await helper.nft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: bob.address}, {Substrate: charlie.address}); + const newTokenOwner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(newTokenOwner.Substrate).to.be.equal(charlie.address); }); - it('admin burns other user\'s token', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); + itSub('admin burns other user\'s token', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', {Substrate: bob.address}); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: true}); + const limits = await helper.collection.getEffectiveLimits(collectionId); + expect(limits.ownerCanTransfer).to.be.true; - await burnItemExpectFailure(alice, collectionId, tokenId); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + const burnTxFailed = async () => helper.nft.burnToken(alice, collectionId, tokenId); - await burnFromExpectSuccess(alice, bob, collectionId, tokenId); - }); + await expect(burnTxFailed()).to.be.rejected; + + await helper.nft.burnToken(bob, collectionId, tokenId); + const token = await helper.nft.getToken(collectionId, tokenId); + expect(token).to.be.null; }); }); diff --git a/tests/src/allowLists.test.ts b/tests/src/allowLists.test.ts index 893822581f..6666cbac75 100644 --- a/tests/src/allowLists.test.ts +++ b/tests/src/allowLists.test.ts @@ -15,300 +15,346 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - destroyCollectionExpectSuccess, - enableAllowListExpectSuccess, - normalizeAccountId, - addCollectionAdminExpectSuccess, - addToAllowListExpectFail, - removeFromAllowListExpectSuccess, - removeFromAllowListExpectFailure, - addToAllowListAgainExpectSuccess, - transferExpectFailure, - approveExpectSuccess, - approveExpectFail, - transferExpectSuccess, - transferFromExpectSuccess, - setMintPermissionExpectSuccess, - createItemExpectFailure, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; +import {usingPlaygrounds, expect, itSub} from './util'; +import {ICollectionPermissions} from './util/playgrounds/types'; describe('Integration Test ext. Allow list tests', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([30n, 10n, 10n], donor); }); }); - it('Owner can add address to allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - }); + describe('Positive', async () => { + itSub('Owner can add address to allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + // allow list does not need to be enabled to add someone in advance + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + const allowList = await helper.nft.getAllowList(collectionId); + expect(allowList).to.deep.contain({Substrate: bob.address}); + }); - it('Admin can add address to allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await addToAllowListExpectSuccess(bob, collectionId, charlie.address); - }); + itSub('Admin can add address to allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addAdmin(alice, collectionId, {Substrate: bob.address}); - it('Non-privileged user cannot add address to allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectFail(bob, collectionId, charlie.address); - }); + // allow list does not need to be enabled to add someone in advance + await helper.nft.addToAllowList(bob, collectionId, {Substrate: charlie.address}); + const allowList = await helper.nft.getAllowList(collectionId); + expect(allowList).to.deep.contain({Substrate: charlie.address}); + }); - it('Nobody can add address to allow list of non-existing collection', async () => { - const collectionId = (1<<32) - 1; - await addToAllowListExpectFail(alice, collectionId, bob.address); + itSub('If address is already added to allow list, nothing happens', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + const allowList = await helper.nft.getAllowList(collectionId); + expect(allowList).to.deep.contain({Substrate: bob.address}); + }); }); - it('Nobody can add address to allow list of destroyed collection', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId, '//Alice'); - await addToAllowListExpectFail(alice, collectionId, bob.address); - }); + describe('Negative', async () => { + itSub('Nobody can add address to allow list of non-existing collection', async ({helper}) => { + const collectionId = (1<<32) - 1; + await expect(helper.nft.addToAllowList(bob, collectionId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); - it('If address is already added to allow list, nothing happens', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await addToAllowListAgainExpectSuccess(alice, collectionId, bob.address); - }); + itSub('Nobody can add address to allow list of destroyed collection', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.collection.burn(alice, collectionId); + await expect(helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); - it('Owner can remove address from allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(bob)); + itSub('Non-privileged user cannot add address to allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await expect(helper.nft.addToAllowList(bob, collectionId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.NoPermission/); + }); }); +}); - it('Admin can remove address from allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await removeFromAllowListExpectSuccess(bob, collectionId, normalizeAccountId(charlie)); - }); +describe('Integration Test ext. Remove from Allow List', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; - it('Non-privileged user cannot remove address from allow list', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await removeFromAllowListExpectFailure(bob, collectionId, normalizeAccountId(charlie)); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([30n, 10n, 10n], donor); + }); }); - it('Nobody can remove address from allow list of non-existing collection', async () => { - const collectionId = (1<<32) - 1; - await removeFromAllowListExpectFailure(alice, collectionId, normalizeAccountId(charlie)); - }); + describe('Positive', async () => { + itSub('Owner can remove address from allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); - it('Nobody can remove address from allow list of deleted collection', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await destroyCollectionExpectSuccess(collectionId, '//Alice'); - await removeFromAllowListExpectFailure(alice, collectionId, normalizeAccountId(charlie)); - }); + await helper.collection.removeFromAllowList(alice, collectionId, {Substrate: bob.address}); - it('If address is already removed from allow list, nothing happens', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(charlie)); - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(charlie)); - }); + const allowList = await helper.nft.getAllowList(collectionId); - it('If Public Access mode is set to AllowList, tokens can’t be transferred from a non-allowlisted address with transfer or transferFrom. Test1', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - - await transferExpectFailure( - collectionId, - itemId, - alice, - charlie, - 1, - ); - }); + expect(allowList).to.not.deep.contain({Substrate: bob.address}); + }); - it('If Public Access mode is set to AllowList, tokens can’t be transferred from a non-allowlisted address with transfer or transferFrom. Test2', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await approveExpectSuccess(collectionId, itemId, alice, charlie.address); - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(alice)); - - await transferExpectFailure( - collectionId, - itemId, - alice, - charlie, - 1, - ); - }); + itSub('Admin can remove address from allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addAdmin(alice, collectionId, {Substrate: charlie.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + await helper.collection.removeFromAllowList(charlie, collectionId, {Substrate: bob.address}); - it('If Public Access mode is set to AllowList, tokens can’t be transferred to a non-allowlisted address with transfer or transferFrom. Test1', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - - await transferExpectFailure( - collectionId, - itemId, - alice, - charlie, - 1, - ); - }); + const allowList = await helper.nft.getAllowList(collectionId); + expect(allowList).to.not.deep.contain({Substrate: bob.address}); + }); - it('If Public Access mode is set to AllowList, tokens can’t be transferred to a non-allowlisted address with transfer or transferFrom. Test2', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await approveExpectSuccess(collectionId, itemId, alice, charlie.address); - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(alice)); - - await transferExpectFailure( - collectionId, - itemId, - alice, - charlie, - 1, - ); - }); + itSub('If address is already removed from allow list, nothing happens', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + await helper.collection.removeFromAllowList(alice, collectionId, {Substrate: bob.address}); + const allowListBefore = await helper.nft.getAllowList(collectionId); + expect(allowListBefore).to.not.deep.contain({Substrate: bob.address}); - it('If Public Access mode is set to AllowList, tokens can’t be destroyed by a non-allowlisted address (even if it owned them before enabling AllowList mode)', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - - await usingApi(async (api) => { - const tx = api.tx.unique.burnItem(collectionId, itemId, /*normalizeAccountId(Alice.address),*/ 11); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(alice, tx); - }; - await expect(badTransaction()).to.be.rejected; + await helper.collection.removeFromAllowList(alice, collectionId, {Substrate: bob.address}); + + const allowListAfter = await helper.nft.getAllowList(collectionId); + expect(allowListAfter).to.not.deep.contain({Substrate: bob.address}); }); }); - it('If Public Access mode is set to AllowList, token transfers can’t be Approved by a non-allowlisted address (see Approve method)', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await approveExpectFail(collectionId, itemId, alice, bob); - }); + describe('Negative', async () => { + itSub('Non-privileged user cannot remove address from allow list', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + await expect(helper.collection.removeFromAllowList(charlie, collectionId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.NoPermission/); - it('If Public Access mode is set to AllowList, tokens can be transferred to a allowlisted address with transfer.', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await transferExpectSuccess(collectionId, itemId, alice, charlie, 1, 'NFT'); - }); + const allowList = await helper.nft.getAllowList(collectionId); + expect(allowList).to.deep.contain({Substrate: charlie.address}); + }); - it('If Public Access mode is set to AllowList, tokens can be transferred to a alowlisted address with transferFrom.', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await approveExpectSuccess(collectionId, itemId, alice, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, alice, alice, charlie, 1, 'NFT'); - }); + itSub('Nobody can remove address from allow list of non-existing collection', async ({helper}) => { + const collectionId = (1<<32) - 1; + await expect(helper.collection.removeFromAllowList(bob, collectionId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); - it('If Public Access mode is set to AllowList, tokens can be transferred from a allowlisted address with transfer', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await transferExpectSuccess(collectionId, itemId, alice, charlie, 1, 'NFT'); - }); + itSub('Nobody can remove address from allow list of deleted collection', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: bob.address}); + await helper.collection.burn(alice, collectionId); - it('If Public Access mode is set to AllowList, tokens can be transferred from a allowlisted address with transferFrom', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, alice.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await approveExpectSuccess(collectionId, itemId, alice, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, alice, alice, charlie, 1, 'NFT'); + await expect(helper.collection.removeFromAllowList(alice, collectionId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); }); +}); - it('If Public Access mode is set to AllowList, and Mint Permission is set to false, tokens can be created by owner', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - }); +describe('Integration Test ext. Transfer if included in Allow List', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; - it('If Public Access mode is set to AllowList, and Mint Permission is set to false, tokens can be created by admin', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT', bob.address); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([30n, 10n, 10n], donor); + }); }); - it('If Public Access mode is set to AllowList, and Mint Permission is set to false, tokens cannot be created by non-privileged and allow-listed address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await createItemExpectFailure(bob, collectionId, 'NFT', bob.address); - }); + describe('Positive', async () => { + itSub('If Public Access mode is set to AllowList, tokens can be transferred to a allowlisted address with transfer.', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + await helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(charlie.address); + }); - it('If Public Access mode is set to AllowList, and Mint Permission is set to false, tokens cannot be created by non-privileged and non-allow listed address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await createItemExpectFailure(bob, collectionId, 'NFT', bob.address); - }); + itSub('If Public Access mode is set to AllowList, tokens can be transferred to a allowlisted address with transferFrom.', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + + await helper.nft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: alice.address}, {Substrate: charlie.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(charlie.address); + }); - it('If Public Access mode is set to AllowList, and Mint Permission is set to true, tokens can be created by owner', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - }); + itSub('If Public Access mode is set to AllowList, tokens can be transferred from a allowlisted address with transfer', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + + await helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(charlie.address); + }); - it('If Public Access mode is set to AllowList, and Mint Permission is set to true, tokens can be created by admin', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT', bob.address); + itSub('If Public Access mode is set to AllowList, tokens can be transferred from a allowlisted address with transferFrom', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + + await helper.nft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: alice.address}, {Substrate: charlie.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(charlie.address); + }); }); - it('If Public Access mode is set to AllowList, and Mint Permission is set to true, tokens cannot be created by non-privileged and non-allow listed address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectFailure(bob, collectionId, 'NFT', bob.address); + describe('Negative', async () => { + itSub('If Public Access mode is set to AllowList, tokens can\'t be transferred from a non-allowlisted address with transfer or transferFrom. Test1', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + + await expect(helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); + }); + + itSub('If Public Access mode is set to AllowList, tokens can\'t be transferred from a non-allowlisted address with transfer or transferFrom. Test2', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + await helper.collection.removeFromAllowList(alice, collectionId, {Substrate: alice.address}); + + await expect(helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); + }); + + itSub('If Public Access mode is set to AllowList, tokens can\'t be transferred to a non-allowlisted address with transfer or transferFrom. Test1', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + + await expect(helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); + }); + + itSub('If Public Access mode is set to AllowList, tokens can\'t be transferred to a non-allowlisted address with transfer or transferFrom. Test2', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: alice.address}); + await helper.nft.addToAllowList(alice, collectionId, {Substrate: charlie.address}); + + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + await helper.collection.removeFromAllowList(alice, collectionId, {Substrate: alice.address}); + + await expect(helper.nft.transferToken(alice, collectionId, tokenId, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); + }); + + itSub('If Public Access mode is set to AllowList, tokens can\'t be destroyed by a non-allowlisted address (even if it owned them before enabling AllowList mode)', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await expect(helper.nft.burnToken(bob, collectionId, tokenId)) + .to.be.rejectedWith(/common\.NoPermission/); + }); + + itSub('If Public Access mode is set to AllowList, token transfers can\'t be Approved by a non-allowlisted address (see Approve method)', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.setPermissions(alice, collectionId, {access: 'AllowList'}); + await expect(helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); + }); }); +}); - it('If Public Access mode is set to AllowList, and Mint Permission is set to true, tokens can be created by non-privileged and allow listed address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT', bob.address); +describe('Integration Test ext. Mint if included in Allow List', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); + }); }); + + const permissionSet: ICollectionPermissions[] = [ + {access: 'Normal', mintMode: false}, + {access: 'Normal', mintMode: true}, + {access: 'AllowList', mintMode: false}, + {access: 'AllowList', mintMode: true}, + ]; + + const testPermissionSuite = async (permissions: ICollectionPermissions) => { + const allowlistedMintingShouldFail = !permissions.mintMode!; + + const appropriateRejectionMessage = permissions.mintMode! ? /common\.AddressNotInAllowlist/ : /common\.PublicMintingNotAllowed/; + + const allowlistedMintingTest = () => itSub( + `With the condtions above, tokens can${allowlistedMintingShouldFail ? '\'t' : ''} be created by allow-listed addresses`, + async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setPermissions(alice, permissions); + await collection.addToAllowList(alice, {Substrate: bob.address}); + + if (allowlistedMintingShouldFail) + await expect(collection.mintToken(bob, {Substrate: bob.address})).to.be.rejectedWith(appropriateRejectionMessage); + else + await expect(collection.mintToken(bob, {Substrate: bob.address})).to.not.be.rejected; + }, + ); + + + describe(`Public Access Mode = ${permissions.access}, Mint Mode = ${permissions.mintMode}`, async () => { + describe('Positive', async () => { + itSub('With the condtions above, tokens can be created by owner', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setPermissions(alice, permissions); + await expect(collection.mintToken(alice, {Substrate: alice.address})).to.not.be.rejected; + }); + + itSub('With the condtions above, tokens can be created by admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setPermissions(alice, permissions); + await collection.addAdmin(alice, {Substrate: bob.address}); + await expect(collection.mintToken(bob, {Substrate: bob.address})).to.not.be.rejected; + }); + + if (!allowlistedMintingShouldFail) allowlistedMintingTest(); + }); + + describe('Negative', async () => { + itSub('With the condtions above, tokens can\'t be created by non-priviliged non-allow-listed address', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setPermissions(alice, permissions); + await expect(collection.mintToken(bob, {Substrate: bob.address})) + .to.be.rejectedWith(appropriateRejectionMessage); + }); + + if (allowlistedMintingShouldFail) allowlistedMintingTest(); + }); + }); + }; + + for (const permissions of permissionSet) { + testPermissionSuite(permissions); + } }); diff --git a/tests/src/app-promotion.seqtest.ts b/tests/src/app-promotion.seqtest.ts new file mode 100644 index 0000000000..7bf1cb9350 --- /dev/null +++ b/tests/src/app-promotion.seqtest.ts @@ -0,0 +1,82 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, Pallets, requirePalletsOrSkip} from './util'; +import {expect} from './eth/util'; + +let superuser: IKeyringPair; +let donor: IKeyringPair; +let palletAdmin: IKeyringPair; + +describe('App promotion', () => { + before(async function () { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.AppPromotion]); + superuser = await privateKey('//Alice'); + donor = await privateKey({filename: __filename}); + palletAdmin = await privateKey('//PromotionAdmin'); + const api = helper.getApi(); + await helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: palletAdmin.address}))); + }); + }); + + after(async function () { + await usingPlaygrounds(async (helper) => { + if (helper.fetchMissingPalletNames([Pallets.AppPromotion]).length != 0) return; + const api = helper.getApi(); + await helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: palletAdmin.address}))); + }); + }); + + describe('admin adress', () => { + itSub('can be set by sudo only', async ({helper}) => { + const api = helper.getApi(); + const [nonAdmin] = await helper.arrange.createAccounts([10n], donor); + // nonAdmin can not set admin not from himself nor as a sudo + await expect(helper.signTransaction(nonAdmin, api.tx.appPromotion.setAdminAddress({Substrate: nonAdmin.address}))).to.be.rejected; + await expect(helper.signTransaction(nonAdmin, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: nonAdmin.address})))).to.be.rejected; + }); + + itSub('can be any valid CrossAccountId', async ({helper}) => { + // We are not going to set an eth address as a sponsor, + // but we do want to check, it doesn't break anything; + const api = helper.getApi(); + const [account] = await helper.arrange.createAccounts([10n], donor); + const ethAccount = helper.address.substrateToEth(account.address); + // Alice sets Ethereum address as a sudo. Then Substrate address back... + await expect(helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Ethereum: ethAccount})))).to.be.fulfilled; + await expect(helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: palletAdmin.address})))).to.be.fulfilled; + + // ...It doesn't break anything; + const collection = await helper.nft.mintCollection(account, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + await expect(helper.signTransaction(account, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.rejected; + }); + + itSub('can be reassigned', async ({helper}) => { + const api = helper.getApi(); + const [oldAdmin, newAdmin, collectionOwner] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + + await expect(helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: oldAdmin.address})))).to.be.fulfilled; + await expect(helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: newAdmin.address})))).to.be.fulfilled; + await expect(helper.signTransaction(oldAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.rejected; + + await expect(helper.signTransaction(newAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.fulfilled; + }); + }); +}); + diff --git a/tests/src/app-promotion.test.ts b/tests/src/app-promotion.test.ts new file mode 100644 index 0000000000..f97849ae52 --- /dev/null +++ b/tests/src/app-promotion.test.ts @@ -0,0 +1,675 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, Pallets, requirePalletsOrSkip} from './util'; +import {DevUniqueHelper} from './util/playgrounds/unique.dev'; +import {itEth, expect, SponsoringMode} from './eth/util'; + +let donor: IKeyringPair; +let palletAdmin: IKeyringPair; +let nominal: bigint; +let palletAddress: string; +let accounts: IKeyringPair[]; +const LOCKING_PERIOD = 20n; // 20 blocks of relay +const UNLOCKING_PERIOD = 10n; // 10 blocks of parachain +const rewardAvailableInBlock = (stakedInBlock: bigint) => { + if (stakedInBlock % LOCKING_PERIOD === 0n) return stakedInBlock + 20n; + return (stakedInBlock - stakedInBlock % LOCKING_PERIOD) + (LOCKING_PERIOD * 2n); +}; + +describe('App promotion', () => { + before(async function () { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.AppPromotion]); + donor = await privateKey({filename: __filename}); + palletAddress = helper.arrange.calculatePalletAddress('appstake'); + palletAdmin = await privateKey('//PromotionAdmin'); + nominal = helper.balance.getOneTokenNominal(); + accounts = await helper.arrange.createCrowd(100, 1000n, donor); // create accounts-pool to speed up tests + }); + }); + + describe('stake extrinsic', () => { + itSub('should "lock" staking balance, add it to "staked" map, and increase "totalStaked" amount', async ({helper}) => { + const [staker, recepient] = [accounts.pop()!, accounts.pop()!]; + const totalStakedBefore = await helper.staking.getTotalStaked(); + + // Minimum stake amount is 100: + await expect(helper.staking.stake(staker, 100n * nominal - 1n)).to.be.rejected; + await helper.staking.stake(staker, 100n * nominal); + + // Staker balance is: miscFrozen: 100, feeFrozen: 100, reserved: 0n... + // ...so he can not transfer 900 + expect (await helper.balance.getSubstrateFull(staker.address)).to.contain({miscFrozen: 100n * nominal, feeFrozen: 100n * nominal, reserved: 0n}); + await expect(helper.balance.transferToSubstrate(staker, recepient.address, 900n * nominal)).to.be.rejectedWith('balances.LiquidityRestrictions'); + + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(100n * nominal); + expect(await helper.balance.getSubstrate(staker.address) / nominal).to.be.equal(999n); + // it is potentially flaky test. Promotion can credited some tokens. Maybe we need to use closeTo? + expect(await helper.staking.getTotalStaked()).to.be.equal(totalStakedBefore + 100n * nominal); // total tokens amount staked in app-promotion increased + + + await helper.staking.stake(staker, 200n * nominal); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(300n * nominal); + const totalStakedPerBlock = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(totalStakedPerBlock[0].amount).to.equal(100n * nominal); + expect(totalStakedPerBlock[1].amount).to.equal(200n * nominal); + }); + + itSub('should allow to create maximum 10 stakes for account', async ({helper}) => { + const [staker] = await helper.arrange.createAccounts([2000n], donor); + for (let i = 0; i < 10; i++) { + await helper.staking.stake(staker, 100n * nominal); + } + + // can have 10 stakes + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(1000n * nominal); + expect(await helper.staking.getTotalStakedPerBlock({Substrate: staker.address})).to.have.length(10); + + await expect(helper.staking.stake(staker, 100n * nominal)).to.be.rejectedWith('appPromotion.NoPermission'); + + // After unstake can stake again + await helper.staking.unstake(staker); + await helper.staking.stake(staker, 100n * nominal); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.equal(100n * nominal); + }); + + itSub('should reject transaction if stake amount is more than total free balance minus frozen', async ({helper}) => { + const staker = accounts.pop()!; + + // Can't stake full balance because Alice needs to pay some fee + await expect(helper.staking.stake(staker, 1000n * nominal)).to.be.rejected; + await helper.staking.stake(staker, 500n * nominal); + + // Can't stake 500 tkn because Alice has Less than 500 transferable; + await expect(helper.staking.stake(staker, 500n * nominal)).to.be.rejectedWith('balances.LiquidityRestrictions'); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(500n * nominal); + }); + + itSub('for different accounts in one block is possible', async ({helper}) => { + const crowd = [accounts.pop()!, accounts.pop()!, accounts.pop()!, accounts.pop()!]; + + const crowdStartsToStake = crowd.map(user => helper.staking.stake(user, 100n * nominal)); + await expect(Promise.all(crowdStartsToStake)).to.be.fulfilled; + + const crowdStakes = await Promise.all(crowd.map(address => helper.staking.getTotalStaked({Substrate: address.address}))); + expect(crowdStakes).to.deep.equal([100n * nominal, 100n * nominal, 100n * nominal, 100n * nominal]); + }); + }); + + describe('unstake extrinsic', () => { + itSub('should change balance state from "frozen" to "reserved", add it to "pendingUnstake" map, and subtract it from totalStaked', async ({helper}) => { + const [staker, recepient] = [accounts.pop()!, accounts.pop()!]; + const totalStakedBefore = await helper.staking.getTotalStaked(); + await helper.staking.stake(staker, 900n * nominal); + await helper.staking.unstake(staker); + + // Right after unstake balance is reserved + // Staker can not transfer + expect(await helper.balance.getSubstrateFull(staker.address)).to.deep.contain({reserved: 900n * nominal, miscFrozen: 0n, feeFrozen: 0n}); + await expect(helper.balance.transferToSubstrate(staker, recepient.address, 100n * nominal)).to.be.rejectedWith('balances.InsufficientBalance'); + expect(await helper.staking.getPendingUnstake({Substrate: staker.address})).to.be.equal(900n * nominal); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(0n); + expect(await helper.staking.getTotalStaked()).to.be.equal(totalStakedBefore); + }); + + itSub('should unlock balance after unlocking period ends and remove it from "pendingUnstake"', async ({helper}) => { + const staker = accounts.pop()!; + await helper.staking.stake(staker, 100n * nominal); + await helper.staking.unstake(staker); + const [pendingUnstake] = await helper.staking.getPendingUnstakePerBlock({Substrate: staker.address}); + + // Wait for unstaking period. Balance now free ~1000; reserved, frozen, miscFrozeb: 0n + await helper.wait.forParachainBlockNumber(pendingUnstake.block); + expect(await helper.balance.getSubstrateFull(staker.address)).to.deep.contain({reserved: 0n, miscFrozen: 0n, feeFrozen: 0n}); + expect(await helper.balance.getSubstrate(staker.address) / nominal).to.be.equal(999n); + + // staker can transfer: + await helper.balance.transferToSubstrate(staker, donor.address, 998n * nominal); + expect(await helper.balance.getSubstrate(staker.address) / nominal).to.be.equal(1n); + }); + + itSub('should successfully unstake multiple stakes', async ({helper}) => { + const staker = accounts.pop()!; + await helper.staking.stake(staker, 100n * nominal); + await helper.staking.stake(staker, 200n * nominal); + await helper.staking.stake(staker, 300n * nominal); + + // staked: [100, 200, 300]; unstaked: 0 + let totalPendingUnstake = await helper.staking.getPendingUnstake({Substrate: staker.address}); + let pendingUnstake = await helper.staking.getPendingUnstakePerBlock({Substrate: staker.address}); + let stakes = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(totalPendingUnstake).to.be.deep.equal(0n); + expect(pendingUnstake).to.be.deep.equal([]); + expect(stakes[0].amount).to.equal(100n * nominal); + expect(stakes[1].amount).to.equal(200n * nominal); + expect(stakes[2].amount).to.equal(300n * nominal); + + // Can unstake multiple stakes + await helper.staking.unstake(staker); + pendingUnstake = await helper.staking.getPendingUnstakePerBlock({Substrate: staker.address}); + totalPendingUnstake = await helper.staking.getPendingUnstake({Substrate: staker.address}); + stakes = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(totalPendingUnstake).to.be.equal(600n * nominal); + expect(stakes).to.be.deep.equal([]); + expect(pendingUnstake[0].amount).to.equal(600n * nominal); + + expect (await helper.balance.getSubstrateFull(staker.address)).to.deep.contain({reserved: 600n * nominal, feeFrozen: 0n, miscFrozen: 0n}); + await helper.wait.forParachainBlockNumber(pendingUnstake[0].block); + expect (await helper.balance.getSubstrateFull(staker.address)).to.deep.contain({reserved: 0n, feeFrozen: 0n, miscFrozen: 0n}); + expect (await helper.balance.getSubstrate(staker.address) / nominal).to.be.equal(999n); + }); + + itSub('should not have any effects if no active stakes', async ({helper}) => { + const staker = accounts.pop()!; + + // unstake has no effect if no stakes at all + await helper.staking.unstake(staker); + expect(await helper.staking.getPendingUnstake({Substrate: staker.address})).to.be.equal(0n); + expect(await helper.balance.getSubstrate(staker.address) / nominal).to.be.equal(999n); // TODO bigint closeTo helper + + // TODO stake() unstake() waitUnstaked() unstake(); + + // can't unstake if there are only pendingUnstakes + await helper.staking.stake(staker, 100n * nominal); + await helper.staking.unstake(staker); + await helper.staking.unstake(staker); + + expect(await helper.staking.getPendingUnstake({Substrate: staker.address})).to.be.equal(100n * nominal); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(0n); + }); + + itSub('should keep different unlocking block for each unlocking stake', async ({helper}) => { + const staker = accounts.pop()!; + await helper.staking.stake(staker, 100n * nominal); + await helper.staking.unstake(staker); + await helper.staking.stake(staker, 120n * nominal); + await helper.staking.unstake(staker); + + const unstakingPerBlock = await helper.staking.getPendingUnstakePerBlock({Substrate: staker.address}); + expect(unstakingPerBlock).has.length(2); + expect(unstakingPerBlock[0].amount).to.equal(100n * nominal); + expect(unstakingPerBlock[1].amount).to.equal(120n * nominal); + }); + + itSub('should be possible for different accounts in one block', async ({helper}) => { + const stakers = [accounts.pop()!, accounts.pop()!, accounts.pop()!]; + + await Promise.all(stakers.map(staker => helper.staking.stake(staker, 100n * nominal))); + await Promise.all(stakers.map(staker => helper.staking.unstake(staker))); + + await Promise.all(stakers.map(async (staker) => { + expect(await helper.staking.getPendingUnstake({Substrate: staker.address})).to.be.equal(100n * nominal); + expect(await helper.staking.getTotalStaked({Substrate: staker.address})).to.be.equal(0n); + })); + }); + }); + + describe('collection sponsoring', () => { + itSub('should actually sponsor transactions', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, tokenSender, receiver] = [accounts.pop()!, accounts.pop()!, accounts.pop()!]; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'Name', description: 'Description', tokenPrefix: 'Prefix', limits: {sponsorTransferTimeout: 0}}); + const token = await collection.mintToken(collectionOwner, {Substrate: tokenSender.address}); + await helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId)); + const palletBalanceBefore = await helper.balance.getSubstrate(palletAddress); + + await token.transfer(tokenSender, {Substrate: receiver.address}); + expect (await token.getOwner()).to.be.deep.equal({Substrate: receiver.address}); + const palletBalanceAfter = await helper.balance.getSubstrate(palletAddress); + + // senders balance the same, transaction has sponsored + expect (await helper.balance.getSubstrate(tokenSender.address)).to.be.equal(1000n * nominal); + expect (palletBalanceBefore > palletBalanceAfter).to.be.true; + }); + + itSub('can not be set by non admin', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, nonAdmin] = [accounts.pop()!, accounts.pop()!]; + + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + + await expect(helper.signTransaction(nonAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.rejected; + expect((await collection.getData())?.raw.sponsorship).to.equal('Disabled'); + }); + + itSub('should set pallet address as confirmed admin', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, oldSponsor] = [accounts.pop()!, accounts.pop()!]; + + // Can set sponsoring for collection without sponsor + const collectionWithoutSponsor = await helper.nft.mintCollection(collectionOwner, {name: 'No-sponsor', description: 'New Collection', tokenPrefix: 'Promotion'}); + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collectionWithoutSponsor.collectionId))).to.be.fulfilled; + expect((await collectionWithoutSponsor.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: palletAddress}); + + // Can set sponsoring for collection with unconfirmed sponsor + const collectionWithUnconfirmedSponsor = await helper.nft.mintCollection(collectionOwner, {name: 'Unconfirmed', description: 'New Collection', tokenPrefix: 'Promotion', pendingSponsor: oldSponsor.address}); + expect((await collectionWithUnconfirmedSponsor.getData())?.raw.sponsorship).to.be.deep.equal({Unconfirmed: oldSponsor.address}); + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collectionWithUnconfirmedSponsor.collectionId))).to.be.fulfilled; + expect((await collectionWithUnconfirmedSponsor.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: palletAddress}); + + // Can set sponsoring for collection with confirmed sponsor + const collectionWithConfirmedSponsor = await helper.nft.mintCollection(collectionOwner, {name: 'Confirmed', description: 'New Collection', tokenPrefix: 'Promotion', pendingSponsor: oldSponsor.address}); + await collectionWithConfirmedSponsor.confirmSponsorship(oldSponsor); + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collectionWithConfirmedSponsor.collectionId))).to.be.fulfilled; + expect((await collectionWithConfirmedSponsor.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: palletAddress}); + }); + + itSub('can be overwritten by collection owner', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, newSponsor] = [accounts.pop()!, accounts.pop()!]; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + const collectionId = collection.collectionId; + + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collectionId))).to.be.fulfilled; + + // Collection limits still can be changed by the owner + expect(await collection.setLimits(collectionOwner, {sponsorTransferTimeout: 0})).to.be.true; + expect((await collection.getData())?.raw.limits.sponsorTransferTimeout).to.be.equal(0); + expect((await collection.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: palletAddress}); + + // Collection sponsor can be changed too + expect((await collection.setSponsor(collectionOwner, newSponsor.address))).to.be.true; + expect((await collection.getData())?.raw.sponsorship).to.be.deep.equal({Unconfirmed: newSponsor.address}); + }); + + itSub('should not overwrite collection limits set by the owner earlier', async ({helper}) => { + const api = helper.getApi(); + const limits = {ownerCanDestroy: true, ownerCanTransfer: true, sponsorTransferTimeout: 0}; + const collectionWithLimits = await helper.nft.mintCollection(accounts.pop()!, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion', limits}); + + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collectionWithLimits.collectionId))).to.be.fulfilled; + expect((await collectionWithLimits.getData())?.raw.limits).to.be.deep.contain(limits); + }); + + itSub('should reject transaction if collection doesn\'t exist', async ({helper}) => { + const api = helper.getApi(); + const collectionOwner = accounts.pop()!; + + // collection has never existed + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(999999999))).to.be.rejected; + // collection has been burned + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + await collection.burn(collectionOwner); + + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.rejected; + }); + }); + + describe('stopSponsoringCollection', () => { + itSub('can not be called by non-admin', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, nonAdmin] = [accounts.pop()!, accounts.pop()!]; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId))).to.be.fulfilled; + + await expect(helper.signTransaction(nonAdmin, api.tx.appPromotion.stopSponsoringCollection(collection.collectionId))).to.be.rejected; + expect((await collection.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: palletAddress}); + }); + + itSub('should set sponsoring as disabled', async ({helper}) => { + const api = helper.getApi(); + const [collectionOwner, recepient] = [accounts.pop()!, accounts.pop()!]; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion', limits: {sponsorTransferTimeout: 0}}); + const token = await collection.mintToken(collectionOwner, {Substrate: collectionOwner.address}); + + await helper.signTransaction(palletAdmin, api.tx.appPromotion.sponsorCollection(collection.collectionId)); + await helper.signTransaction(palletAdmin, api.tx.appPromotion.stopSponsoringCollection(collection.collectionId)); + + expect((await collection.getData())?.raw.sponsorship).to.be.equal('Disabled'); + + // Transactions are not sponsored anymore: + const ownerBalanceBefore = await helper.balance.getSubstrate(collectionOwner.address); + await token.transfer(collectionOwner, {Substrate: recepient.address}); + const ownerBalanceAfter = await helper.balance.getSubstrate(collectionOwner.address); + expect(ownerBalanceAfter < ownerBalanceBefore).to.be.equal(true); + }); + + itSub('should not affect collection which is not sponsored by pallete', async ({helper}) => { + const api = helper.getApi(); + const collectionOwner = accounts.pop()!; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion', pendingSponsor: collectionOwner.address}); + await collection.confirmSponsorship(collectionOwner); + + await expect(helper.signTransaction(palletAdmin, api.tx.appPromotion.stopSponsoringCollection(collection.collectionId))).to.be.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.be.deep.equal({Confirmed: collectionOwner.address}); + }); + + itSub('should reject transaction if collection does not exist', async ({helper}) => { + const collectionOwner = accounts.pop()!; + const collection = await helper.nft.mintCollection(collectionOwner, {name: 'New', description: 'New Collection', tokenPrefix: 'Promotion'}); + + await collection.burn(collectionOwner); + await expect(helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.stopSponsoringCollection', [collection.collectionId], true)).to.be.rejectedWith('common.CollectionNotFound'); + await expect(helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.stopSponsoringCollection', [999_999_999], true)).to.be.rejectedWith('common.CollectionNotFound'); + }); + }); + + describe('contract sponsoring', () => { + itEth('should set palletes address as a sponsor', async ({helper}) => { + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); // await deployFlipper(web3, contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address]); + + expect(await contractHelper.methods.hasSponsor(flipper.options.address).call()).to.be.true; + expect((await helper.callRpc('api.query.evmContractHelpers.owner', [flipper.options.address])).toJSON()).to.be.equal(contractOwner); + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.deep.equal({ + confirmed: { + substrate: palletAddress, + }, + }); + }); + + itEth('should overwrite sponsoring mode and existed sponsor', async ({helper}) => { + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); // await deployFlipper(web3, contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + await expect(contractHelper.methods.selfSponsoredEnable(flipper.options.address).send()).to.be.fulfilled; + + // Contract is self sponsored + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.be.deep.equal({ + confirmed: { + ethereum: flipper.options.address.toLowerCase(), + }, + }); + + // set promotion sponsoring + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address], true); + + // new sponsor is pallet address + expect(await contractHelper.methods.hasSponsor(flipper.options.address).call()).to.be.true; + expect((await helper.callRpc('api.query.evmContractHelpers.owner', [flipper.options.address])).toJSON()).to.be.equal(contractOwner); + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.deep.equal({ + confirmed: { + substrate: palletAddress, + }, + }); + }); + + itEth('can be overwritten by contract owner', async ({helper}) => { + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); // await deployFlipper(web3, contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + // contract sponsored by pallet + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address], true); + + // owner sets self sponsoring + await expect(contractHelper.methods.selfSponsoredEnable(flipper.options.address).send()).to.be.not.rejected; + + expect(await contractHelper.methods.hasSponsor(flipper.options.address).call()).to.be.true; + expect((await helper.callRpc('api.query.evmContractHelpers.owner', [flipper.options.address])).toJSON()).to.be.equal(contractOwner); + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.deep.equal({ + confirmed: { + ethereum: flipper.options.address.toLowerCase(), + }, + }); + }); + + itEth('can not be set by non admin', async ({helper}) => { + const nonAdmin = accounts.pop()!; + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); // await deployFlipper(web3, contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + await expect(contractHelper.methods.selfSponsoredEnable(flipper.options.address).send()).to.be.fulfilled; + + // nonAdmin calls sponsorContract + await expect(helper.executeExtrinsic(nonAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address], true)).to.be.rejectedWith('appPromotion.NoPermission'); + + // contract still self-sponsored + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.deep.equal({ + confirmed: { + ethereum: flipper.options.address.toLowerCase(), + }, + }); + }); + + itEth('should actually sponsor transactions', async ({helper}) => { + // Contract caller + const caller = await helper.eth.createAccountWithBalance(donor, 1000n); + const palletBalanceBefore = await helper.balance.getSubstrate(palletAddress); + + // Deploy flipper + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); // await deployFlipper(web3, contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + // Owner sets to sponsor every tx + await contractHelper.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: contractOwner}); + await contractHelper.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Generous).send({from: contractOwner}); + await helper.eth.transferBalanceFromSubstrate(donor, flipper.options.address, 1000n); // transferBalanceToEth(api, alice, flipper.options.address, 1000n); + + // Set promotion to the Flipper + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address], true); + + // Caller calls Flipper + await flipper.methods.flip().send({from: caller}); + expect(await flipper.methods.getValue().call()).to.be.true; + + // The contracts and caller balances have not changed + const callerBalance = await helper.balance.getEthereum(caller); + const contractBalanceAfter = await helper.balance.getEthereum(flipper.options.address); + expect(callerBalance).to.be.equal(1000n * nominal); + expect(1000n * nominal === contractBalanceAfter).to.be.true; + + // The pallet balance has decreased + const palletBalanceAfter = await helper.balance.getSubstrate(palletAddress); + expect(palletBalanceAfter < palletBalanceBefore).to.be.true; + }); + }); + + describe('stopSponsoringContract', () => { + itEth('should remove pallet address from contract sponsors', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor, 1000n); + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); + await helper.eth.transferBalanceFromSubstrate(donor, flipper.options.address); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + + await contractHelper.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Generous).send({from: contractOwner}); + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address], true); + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.stopSponsoringContract', [flipper.options.address], true); + + expect(await contractHelper.methods.hasSponsor(flipper.options.address).call()).to.be.false; + expect((await helper.callRpc('api.query.evmContractHelpers.owner', [flipper.options.address])).toJSON()).to.be.equal(contractOwner); + expect((await helper.callRpc('api.query.evmContractHelpers.sponsoring', [flipper.options.address])).toJSON()).to.deep.equal({ + disabled: null, + }); + + await flipper.methods.flip().send({from: caller}); + expect(await flipper.methods.getValue().call()).to.be.true; + + const callerBalance = await helper.balance.getEthereum(caller); + const contractBalanceAfter = await helper.balance.getEthereum(flipper.options.address); + + // caller payed for call + expect(1000n * nominal > callerBalance).to.be.true; + expect(contractBalanceAfter).to.be.equal(100n * nominal); + }); + + itEth('can not be called by non-admin', async ({helper}) => { + const nonAdmin = accounts.pop()!; + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); + + await helper.executeExtrinsic(palletAdmin, 'api.tx.appPromotion.sponsorContract', [flipper.options.address]); + await expect(helper.executeExtrinsic(nonAdmin, 'api.tx.appPromotion.stopSponsoringContract', [flipper.options.address])) + .to.be.rejectedWith(/appPromotion\.NoPermission/); + }); + + itEth('should not affect a contract which is not sponsored by pallete', async ({helper}) => { + const nonAdmin = accounts.pop()!; + const contractOwner = (await helper.eth.createAccountWithBalance(donor, 1000n)).toLowerCase(); + const flipper = await helper.eth.deployFlipper(contractOwner); + const contractHelper = helper.ethNativeContract.contractHelpers(contractOwner); + await expect(contractHelper.methods.selfSponsoredEnable(flipper.options.address).send()).to.be.fulfilled; + + await expect(helper.executeExtrinsic(nonAdmin, 'api.tx.appPromotion.stopSponsoringContract', [flipper.options.address], true)).to.be.rejectedWith('appPromotion.NoPermission'); + }); + }); + + describe('rewards', () => { + itSub('can not be called by non admin', async ({helper}) => { + const nonAdmin = accounts.pop()!; + await expect(helper.admin.payoutStakers(nonAdmin, 100)).to.be.rejectedWith('appPromotion.NoPermission'); + }); + + itSub('should increase total staked', async ({helper}) => { + const staker = accounts.pop()!; + const totalStakedBefore = await helper.staking.getTotalStaked(); + await helper.staking.stake(staker, 100n * nominal); + + // Wait for rewards and pay + const [stakedInBlock] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stakedInBlock.block)); + const totalPayout = (await helper.admin.payoutStakers(palletAdmin, 100)).reduce((prev, payout) => prev + payout.payout, 0n); + + const totalStakedAfter = await helper.staking.getTotalStaked(); + expect(totalStakedAfter).to.equal(totalStakedBefore + (100n * nominal) + totalPayout); + // staker can unstake + await helper.staking.unstake(staker); + expect(await helper.staking.getTotalStaked()).to.be.equal(totalStakedAfter - calculateIncome(100n * nominal, 10n)); + }); + + itSub('should credit 0.05% for staking period', async ({helper}) => { + const staker = accounts.pop()!; + + await waitPromotionPeriodDoesntEnd(helper); + + await helper.staking.stake(staker, 100n * nominal); + await helper.staking.stake(staker, 200n * nominal); + + // wait rewards are available: + const [_stake1, stake2] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stake2.block)); + + const payoutToStaker = (await helper.admin.payoutStakers(palletAdmin, 100)).find((payout) => payout.staker === staker.address)?.payout; + expect(payoutToStaker + 300n * nominal).to.equal(calculateIncome(300n * nominal, 10n)); + + const totalStakedPerBlock = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(totalStakedPerBlock[0].amount).to.equal(calculateIncome(100n * nominal, 10n)); + expect(totalStakedPerBlock[1].amount).to.equal(calculateIncome(200n * nominal, 10n)); + }); + + itSub('shoud be paid for more than one period if payments was missed', async ({helper}) => { + const staker = accounts.pop()!; + + await helper.staking.stake(staker, 100n * nominal); + // wait for two rewards are available: + let [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stake.block) + LOCKING_PERIOD); + + await helper.admin.payoutStakers(palletAdmin, 100); + [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + const frozenBalanceShouldBe = calculateIncome(100n * nominal, 10n, 2); + expect(stake.amount).to.be.equal(frozenBalanceShouldBe); + + const stakerFullBalance = await helper.balance.getSubstrateFull(staker.address); + + expect(stakerFullBalance).to.contain({reserved: 0n, feeFrozen: frozenBalanceShouldBe, miscFrozen: frozenBalanceShouldBe}); + }); + + itSub('should not be credited for unstaked (reserved) balance', async ({helper}) => { + // staker unstakes before rewards has been payed + const staker = accounts.pop()!; + await helper.staking.stake(staker, 100n * nominal); + const [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stake.block) + LOCKING_PERIOD); + await helper.staking.unstake(staker); + + // so he did not receive any rewards + const totalBalanceBefore = await helper.balance.getSubstrate(staker.address); + await helper.admin.payoutStakers(palletAdmin, 100); + const totalBalanceAfter = await helper.balance.getSubstrate(staker.address); + + expect(totalBalanceBefore).to.be.equal(totalBalanceAfter); + }); + + itSub('should bring compound interest', async ({helper}) => { + const staker = accounts.pop()!; + + await helper.staking.stake(staker, 100n * nominal); + + let [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stake.block)); + + await helper.admin.payoutStakers(palletAdmin, 100); + [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(stake.amount).to.equal(calculateIncome(100n * nominal, 10n)); + + await helper.wait.forRelayBlockNumber(rewardAvailableInBlock(stake.block) + LOCKING_PERIOD); + await helper.admin.payoutStakers(palletAdmin, 100); + [stake] = await helper.staking.getTotalStakedPerBlock({Substrate: staker.address}); + expect(stake.amount).to.equal(calculateIncome(100n * nominal, 10n, 2)); + }); + + itSub.skip('can be paid 1000 rewards in a time', async ({helper}) => { + // all other stakes should be unstaked + const oneHundredStakers = await helper.arrange.createCrowd(100, 1050n, donor); + + // stakers stakes 10 times each + for (let i = 0; i < 10; i++) { + await Promise.all(oneHundredStakers.map(staker => helper.staking.stake(staker, 100n * nominal))); + } + await helper.wait.newBlocks(40); + await helper.admin.payoutStakers(palletAdmin, 100); + }); + + itSub.skip('can handle 40.000 rewards', async ({helper}) => { + const crowdStakes = async () => { + // each account in the crowd stakes 2 times + const crowd = await helper.arrange.createCrowd(500, 300n, donor); + await Promise.all(crowd.map(account => helper.staking.stake(account, 100n * nominal))); + await Promise.all(crowd.map(account => helper.staking.stake(account, 100n * nominal))); + // + }; + + for (let i = 0; i < 40; i++) { + await crowdStakes(); + } + + // TODO pay rewards for some period + }); + }); +}); + +function calculateIncome(base: bigint, calcPeriod: bigint, iter = 0): bigint { + const DAY = 7200n; + const ACCURACY = 1_000_000_000n; + const income = base + base * (ACCURACY * (calcPeriod * 5n) / (10_000n * DAY)) / ACCURACY ; + + if (iter > 1) { + return calculateIncome(income, calcPeriod, iter - 1); + } else return income; +} + +// Wait while promotion period less than specified block, to avoid boundary cases +// 0 if this should be the beginning of the period. +async function waitPromotionPeriodDoesntEnd(helper: DevUniqueHelper, waitBlockLessThan = LOCKING_PERIOD / 3n) { + const relayBlockNumber = (await helper.callRpc('api.query.parachainSystem.validationData', [])).value.relayParentNumber.toNumber(); // await helper.chain.getLatestBlockNumber(); + const currentPeriodBlock = BigInt(relayBlockNumber) % LOCKING_PERIOD; + + if (currentPeriodBlock > waitBlockLessThan) { + await helper.wait.forRelayBlockNumber(BigInt(relayBlockNumber) + LOCKING_PERIOD - currentPeriodBlock); + } +} diff --git a/tests/src/approve.test.ts b/tests/src/approve.test.ts index 8b0732f1c6..0c2cc91702 100644 --- a/tests/src/approve.test.ts +++ b/tests/src/approve.test.ts @@ -15,26 +15,8 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import {ApiPromise} from '@polkadot/api'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import { - approveExpectFail, - approveExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - destroyCollectionExpectSuccess, - setCollectionLimitsExpectSuccess, - transferExpectSuccess, - addCollectionAdminExpectSuccess, - adminApproveFromExpectFail, - getCreatedCollectionCount, - transferFromExpectSuccess, - transferFromExpectFail, -} from './util/helpers'; - -chai.use(chaiAsPromised); +import {expect, itSub, Pallets, usingPlaygrounds} from './util'; + describe('Integration Test approve(spender, collection_id, item_id, amount):', () => { let alice: IKeyringPair; @@ -42,53 +24,76 @@ describe('Integration Test approve(spender, collection_id, item_id, amount):', ( let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('Execute the extrinsic and check approvedList', async () => { - const nftCollectionId = await createCollectionExpectSuccess(); - // nft - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address); - // reFungible - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address); - }); - - it('Remove approval by using 0 amount', async () => { - const nftCollectionId = await createCollectionExpectSuccess(); - // nft - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address, 1); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address, 0); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address, 1); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address, 0); - // reFungible - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address, 1); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address, 0); - }); - - it('can`t be called by collection owner on non-owned item when OwnerCanTransfer == false', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - - await adminApproveFromExpectFail(collectionId, itemId, alice, bob.address, charlie.address); + itSub('[nft] Execute the extrinsic and check approvedList', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: bob.address})).to.be.true; + }); + + itSub('[fungible] Execute the extrinsic and check approvedList', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, {Substrate: alice.address}); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amount = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amount).to.be.equal(BigInt(1)); + }); + + itSub.ifWithPallets('[refungible] Execute the extrinsic and check approvedList', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + await helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amount = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amount).to.be.equal(BigInt(1)); + }); + + itSub('[nft] Remove approval by using 0 amount', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const collectionId = collection.collectionId; + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: bob.address})).to.be.true; + await helper.signTransaction(alice, helper.constructApiCall('api.tx.unique.approve', [{Substrate: bob.address}, collectionId, tokenId, 0])); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: bob.address})).to.be.false; + }); + + itSub('[fungible] Remove approval by using 0 amount', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, {Substrate: alice.address}); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amountBefore = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountBefore).to.be.equal(BigInt(1)); + + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 0n); + const amountAfter = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountAfter).to.be.equal(BigInt(0)); + }); + + itSub.ifWithPallets('[refungible] Remove approval by using 0 amount', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + await helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amountBefore = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountBefore).to.be.equal(BigInt(1)); + + await helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 0n); + const amountAfter = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountAfter).to.be.equal(BigInt(0)); + }); + + itSub('can`t be called by collection owner on non-owned item when OwnerCanTransfer == false', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + const approveTokenTx = async () => helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + await expect(approveTokenTx()).to.be.rejected; }); }); @@ -98,29 +103,34 @@ describe('Normal user can approve other users to transfer:', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); - }); + }); - it('NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); + itSub('NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + await helper.nft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: charlie.address})).to.be.true; }); - it('Fungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); + itSub('Fungible up to an approved amount', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, bob.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + const amount = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: charlie.address}, {Substrate: bob.address}); + expect(amount).to.be.equal(BigInt(1)); }); - it('ReFungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); + itSub.ifWithPallets('ReFungible up to an approved amount', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: bob.address, pieces: 100n}); + await helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}, 100n); + const amount = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: charlie.address}, {Substrate: bob.address}); + expect(amount).to.be.equal(BigInt(100n)); }); }); @@ -130,32 +140,40 @@ describe('Approved users can transferFrom up to approved amount:', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); - }); + }); - it('NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'NFT'); + itSub('NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + await helper.nft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + await helper.nft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(alice.address); }); - it('Fungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'Fungible'); + itSub('Fungible up to an approved amount', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, bob.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + const before = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + await helper.ft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 1n); + const after = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + expect(after - before).to.be.equal(BigInt(1)); }); - it('ReFungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'ReFungible'); + itSub.ifWithPallets('ReFungible up to an approved amount', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: bob.address, pieces: 100n}); + await helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + const before = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + await helper.rft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 1n); + const after = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + expect(after - before).to.be.equal(BigInt(1)); }); }); @@ -165,59 +183,78 @@ describe('Approved users cannot use transferFrom to repeat transfers if approved let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); - }); - - it('NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'NFT'); - await transferFromExpectFail(collectionId, itemId, charlie, bob, alice, 1); }); - it('Fungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'Fungible'); - await transferFromExpectFail(collectionId, itemId, charlie, bob, alice, 1); - }); - - it('ReFungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', bob.address); - await approveExpectSuccess(collectionId, itemId, bob, charlie.address); - await transferFromExpectSuccess(collectionId, itemId, charlie, bob, alice, 1, 'ReFungible'); - await transferFromExpectFail(collectionId, itemId, charlie, bob, alice, 1); + itSub('NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + await helper.nft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + await helper.nft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + const owner = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner.Substrate).to.be.equal(alice.address); + const transferTokenFromTx = async () => helper.nft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + await expect(transferTokenFromTx()).to.be.rejected; + }); + + itSub('Fungible up to an approved amount', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, bob.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + const before = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + await helper.ft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 1n); + const after = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + expect(after - before).to.be.equal(BigInt(1)); + + const transferTokenFromTx = async () => helper.ft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 1n); + await expect(transferTokenFromTx()).to.be.rejected; + }); + + itSub.ifWithPallets('ReFungible up to an approved amount', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: bob.address, pieces: 100n}); + await helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}, 100n); + const before = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + await helper.rft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 100n); + const after = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + expect(after - before).to.be.equal(BigInt(100)); + const transferTokenFromTx = async () => helper.rft.transferTokenFrom(charlie, collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}, 100n); + await expect(transferTokenFromTx()).to.be.rejected; }); }); -describe('Approved amount decreases by the transferred amount.:', () => { +describe('Approved amount decreases by the transferred amount:', () => { let alice: IKeyringPair; let bob: IKeyringPair; let charlie: IKeyringPair; let dave: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - dave = privateKeyWrapper('//Dave'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie, dave] = await helper.arrange.createAccounts([100n, 100n, 100n, 100n], donor); }); - }); + }); + + itSub('If a user B is approved to transfer 10 Fungible tokens from user A, they can transfer 2 tokens to user C, which will result in decreasing approval from 10 to 8. Then user B can transfer 8 tokens to user D.', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, alice.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 10n); + + const charlieBefore = await helper.ft.getBalance(collectionId, {Substrate: charlie.address}); + await helper.ft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: alice.address}, {Substrate: charlie.address}, 2n); + const charlieAfter = await helper.ft.getBalance(collectionId, {Substrate: charlie.address}); + expect(charlieAfter - charlieBefore).to.be.equal(BigInt(2)); - it('If a user B is approved to transfer 10 Fungible tokens from user A, they can transfer 2 tokens to user C, which will result in decreasing approval from 10 to 8. Then user B can transfer 8 tokens to user D.', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', alice.address); - await approveExpectSuccess(collectionId, itemId, alice, bob.address, 10); - await transferFromExpectSuccess(collectionId, itemId, bob, alice, charlie, 2, 'Fungible'); - await transferFromExpectSuccess(collectionId, itemId, bob, alice, dave, 8, 'Fungible'); + const daveBefore = await helper.ft.getBalance(collectionId, {Substrate: dave.address}); + await helper.ft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: alice.address}, {Substrate: dave.address}, 8n); + const daveAfter = await helper.ft.getBalance(collectionId, {Substrate: dave.address}); + expect(daveAfter - daveBefore).to.be.equal(BigInt(8)); }); }); @@ -227,36 +264,52 @@ describe('User may clear the approvals to approving for 0 amount:', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT'); - await approveExpectSuccess(collectionId, itemId, alice, bob.address, 1); - await approveExpectSuccess(collectionId, itemId, alice, bob.address, 0); - await transferFromExpectFail(collectionId, itemId, bob, bob, charlie, 1); + itSub('NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: bob.address})).to.be.true; + await helper.signTransaction(alice, helper.constructApiCall('api.tx.unique.approve', [{Substrate: bob.address}, collectionId, tokenId, 0])); + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: bob.address})).to.be.false; + const transferTokenFromTx = async () => helper.nft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: bob.address}, {Substrate: bob.address}); + await expect(transferTokenFromTx()).to.be.rejected; }); - it('Fungible', async () => { - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address, 1); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address, 0); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, bob, bob, charlie, 1); + itSub('Fungible', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, alice.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amountBefore = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountBefore).to.be.equal(BigInt(1)); + + await helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 0n); + const amountAfter = await helper.ft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountAfter).to.be.equal(BigInt(0)); + + const transferTokenFromTx = async () => helper.ft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: bob.address}, {Substrate: charlie.address}, 1n); + await expect(transferTokenFromTx()).to.be.rejected; }); - it('ReFungible', async () => { - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address, 1); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address, 0); - await transferFromExpectFail(reFungibleCollectionId, newReFungibleTokenId, bob, bob, charlie, 1); + itSub.ifWithPallets('ReFungible', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + await helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}); + const amountBefore = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountBefore).to.be.equal(BigInt(1)); + + await helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 0n); + const amountAfter = await helper.rft.getTokenApprovedPieces(collectionId, tokenId, {Substrate: bob.address}, {Substrate: alice.address}); + expect(amountAfter).to.be.equal(BigInt(0)); + + const transferTokenFromTx = async () => helper.rft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: bob.address}, {Substrate: charlie.address}, 100n); + await expect(transferTokenFromTx()).to.be.rejected; }); }); @@ -266,29 +319,33 @@ describe('User cannot approve for the amount greater than they own:', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('1 for NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await approveExpectFail(collectionId, itemId, bob, charlie, 2); + itSub('1 for NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + const approveTx = async () => helper.signTransaction(bob, helper.constructApiCall('api.tx.unique.approve', [{Substrate: charlie.address}, collectionId, tokenId, 2])); + await expect(approveTx()).to.be.rejected; + expect(await helper.nft.isTokenApproved(collectionId, tokenId, {Substrate: charlie.address})).to.be.false; }); - it('Fungible', async () => { - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectFail(fungibleCollectionId, newFungibleTokenId, bob, charlie, 11); + itSub('Fungible', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.ft.mintTokens(alice, collectionId, 10n, alice.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + const approveTx = async () => helper.ft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 11n); + await expect(approveTx()).to.be.rejected; }); - it('ReFungible', async () => { - const reFungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectFail(reFungibleCollectionId, newReFungibleTokenId, bob, charlie, 101); + itSub.ifWithPallets('ReFungible', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + const approveTx = async () => helper.rft.approveToken(alice, collectionId, tokenId, {Substrate: bob.address}, 101n); + await expect(approveTx()).to.be.rejected; }); }); @@ -299,39 +356,62 @@ describe('Administrator and collection owner do not need approval in order to ex let dave: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - dave = privateKeyWrapper('//Dave'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie, dave] = await helper.arrange.createAccounts([100n, 100n, 100n, 100n], donor); }); - }); - - it('NFT', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', charlie.address); - await transferFromExpectSuccess(collectionId, itemId, alice, charlie, dave, 1, 'NFT'); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await transferFromExpectSuccess(collectionId, itemId, bob, dave, alice, 1, 'NFT'); - }); - - it('Fungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', charlie.address); - await transferFromExpectSuccess(collectionId, itemId, alice, charlie, dave, 1, 'Fungible'); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await transferFromExpectSuccess(collectionId, itemId, bob, dave, alice, 1, 'Fungible'); - }); - - it('ReFungible up to an approved amount', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', charlie.address); - await transferFromExpectSuccess(collectionId, itemId, alice, charlie, dave, 1, 'ReFungible'); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await transferFromExpectSuccess(collectionId, itemId, bob, dave, alice, 1, 'ReFungible'); + }); + + itSub('NFT', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: true}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: charlie.address}); + + await helper.nft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: charlie.address}, {Substrate: dave.address}); + const owner1 = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner1.Substrate).to.be.equal(dave.address); + + await helper.collection.addAdmin(alice, collectionId, {Substrate: bob.address}); + await helper.nft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: dave.address}, {Substrate: alice.address}); + const owner2 = await helper.nft.getTokenOwner(collectionId, tokenId); + expect(owner2.Substrate).to.be.equal(alice.address); + }); + + itSub('Fungible up to an approved amount', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: true}); + await helper.ft.mintTokens(alice, collectionId, 10n, charlie.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + + const daveBalanceBefore = await helper.ft.getBalance(collectionId, {Substrate: dave.address}); + await helper.ft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: charlie.address}, {Substrate: dave.address}, 1n); + const daveBalanceAfter = await helper.ft.getBalance(collectionId, {Substrate: dave.address}); + expect(daveBalanceAfter - daveBalanceBefore).to.be.equal(BigInt(1)); + + await helper.collection.addAdmin(alice ,collectionId, {Substrate: bob.address}); + + const aliceBalanceBefore = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + await helper.ft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: dave.address}, {Substrate: alice.address}, 1n); + const aliceBalanceAfter = await helper.ft.getBalance(collectionId, {Substrate: alice.address}); + expect(aliceBalanceAfter - aliceBalanceBefore).to.be.equal(BigInt(1)); + }); + + itSub.ifWithPallets('ReFungible up to an approved amount', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: true}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: charlie.address, pieces: 100n}); + + const daveBefore = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: dave.address}); + await helper.rft.transferTokenFrom(alice, collectionId, tokenId, {Substrate: charlie.address}, {Substrate: dave.address}, 1n); + const daveAfter = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: dave.address}); + expect(daveAfter - daveBefore).to.be.equal(BigInt(1)); + + await helper.collection.addAdmin(alice, collectionId, {Substrate: bob.address}); + + const aliceBefore = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + await helper.rft.transferTokenFrom(bob, collectionId, tokenId, {Substrate: dave.address}, {Substrate: alice.address}, 1n); + const aliceAfter = await helper.rft.getTokenBalance(collectionId, tokenId, {Substrate: alice.address}); + expect(aliceAfter - aliceBefore).to.be.equal(BigInt(1)); }); }); @@ -342,48 +422,48 @@ describe('Repeated approvals add up', () => { let dave: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - dave = privateKeyWrapper('//Dave'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie, dave] = await helper.arrange.createAccounts([100n, 100n, 100n, 100n], donor); }); - }); + }); - it.skip('Owned 10, approval 1: 1, approval 2: 1, resulting approved value: 2. Fungible', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - await createItemExpectSuccess(alice, collectionId, 'Fungible', alice.address); - await approveExpectSuccess(collectionId, 0, alice, bob.address, 1); - await approveExpectSuccess(collectionId, 0, alice, charlie.address, 1); + itSub.skip('Owned 10, approval 1: 1, approval 2: 1, resulting approved value: 2. Fungible', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}); + await collection.mint(alice, 10n); + await collection.approveTokens(alice, {Substrate: bob.address}, 1n); + await collection.approveTokens(alice, {Substrate: charlie.address}, 1n); // const allowances1 = await getAllowance(collectionId, 0, Alice.address, Bob.address); // const allowances2 = await getAllowance(collectionId, 0, Alice.address, Charlie.address); // expect(allowances1 + allowances2).to.be.eq(BigInt(2)); }); - it.skip('Owned 10, approval 1: 1, approval 2: 1, resulting approved value: 2. ReFungible', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', alice.address); - await approveExpectSuccess(collectionId, itemId, alice, bob.address, 1); - await approveExpectSuccess(collectionId, itemId, alice, charlie.address, 1); + itSub.skip('Owned 10, approval 1: 1, approval 2: 1, resulting approved value: 2. ReFungible', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + const token = await collection.mintToken(alice, 10n); + await token.approve(alice, {Substrate: bob.address}, 1n); + await token.approve(alice, {Substrate: charlie.address}, 1n); // const allowances1 = await getAllowance(collectionId, itemId, Alice.address, Bob.address); // const allowances2 = await getAllowance(collectionId, itemId, Alice.address, Charlie.address); // expect(allowances1 + allowances2).to.be.eq(BigInt(2)); }); // Canceled by changing approve logic - it.skip('Cannot approve for more than total user`s amount (owned: 10, approval 1: 5 - should succeed, approval 2: 6 - should fail). Fungible', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'Fungible', decimalPoints: 0}}); - await createItemExpectSuccess(alice, collectionId, 'Fungible', dave.address); - await approveExpectSuccess(collectionId, 0, dave, bob.address, 5); - await approveExpectFail(collectionId, 0, dave, charlie, 6); + itSub.skip('Cannot approve for more than total user\'s amount (owned: 10, approval 1: 5 - should succeed, approval 2: 6 - should fail). Fungible', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}); + await collection.mint(alice, 10n, {Substrate: dave.address}); + await collection.approveTokens(dave, {Substrate: bob.address}, 5n); + await expect(collection.approveTokens(dave, {Substrate: charlie.address}, 6n)) + .to.be.rejectedWith('this test would fail (since it is skipped), replace this expecting message with what would have been received'); }); // Canceled by changing approve logic - it.skip('Cannot approve for more than total users amount (owned: 100, approval 1: 50 - should succeed, approval 2: 51 - should fail). ReFungible', async () => { - const collectionId = await createCollectionExpectSuccess({mode:{type: 'ReFungible'}}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', dave.address); - await approveExpectSuccess(collectionId, itemId, dave, bob.address, 50); - await approveExpectFail(collectionId, itemId, dave, charlie, 51); + itSub.skip('Cannot approve for more than total user\'s amount (owned: 100, approval 1: 50 - should succeed, approval 2: 51 - should fail). ReFungible', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + const token = await collection.mintToken(alice, 100n, {Substrate: dave.address}); + await token.approve(dave, {Substrate: bob.address}, 50n); + await expect(token.approve(dave, {Substrate: charlie.address}, 51n)) + .to.be.rejectedWith('this test would fail (since it is skipped), replace this expecting message with what would have been received'); }); }); @@ -393,19 +473,18 @@ describe('Integration Test approve(spender, collection_id, item_id, amount) with let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('can be called by collection admin on non-owned item', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await adminApproveFromExpectFail(collectionId, itemId, bob, alice.address, charlie.address); + itSub('can be called by collection admin on non-owned item', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + await helper.collection.addAdmin(alice, collectionId, {Substrate: bob.address}); + const approveTx = async () => helper.nft.approveToken(bob, collectionId, tokenId, {Substrate: charlie.address}); + await expect(approveTx()).to.be.rejected; }); }); @@ -415,90 +494,112 @@ describe('Negative Integration Test approve(spender, collection_id, item_id, amo let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('Approve for a collection that does not exist', async () => { - await usingApi(async (api: ApiPromise) => { - // nft - const nftCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(nftCollectionCount + 1, 1, alice, bob); - // fungible - const fungibleCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(fungibleCollectionCount + 1, 0, alice, bob); - // reFungible - const reFungibleCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(reFungibleCollectionCount + 1, 1, alice, bob); - }); + itSub('[nft] Approve for a collection that does not exist', async ({helper}) => { + const collectionId = 1 << 32 - 1; + const approveTx = async () => helper.nft.approveToken(bob, collectionId, 1, {Substrate: charlie.address}); + await expect(approveTx()).to.be.rejected; }); - it('Approve for a collection that was destroyed', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(nftCollectionId); - await approveExpectFail(nftCollectionId, 1, alice, bob); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await destroyCollectionExpectSuccess(fungibleCollectionId); - await approveExpectFail(fungibleCollectionId, 0, alice, bob); - // reFungible - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await destroyCollectionExpectSuccess(reFungibleCollectionId); - await approveExpectFail(reFungibleCollectionId, 1, alice, bob); - }); - - it('Approve transfer of a token that does not exist', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - await approveExpectFail(nftCollectionId, 2, alice, bob); - // reFungible - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await approveExpectFail(reFungibleCollectionId, 2, alice, bob); - }); - - it('Approve using the address that does not own the approved token', async () => { - const nftCollectionId = await createCollectionExpectSuccess(); - // nft - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectFail(nftCollectionId, newNftTokenId, bob, alice); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectFail(fungibleCollectionId, newFungibleTokenId, bob, alice); - // reFungible - const reFungibleCollectionId = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectFail(reFungibleCollectionId, newReFungibleTokenId, bob, alice); - }); - - it('should fail if approved more ReFungibles than owned', async () => { - const nftCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'ReFungible'); - await transferExpectSuccess(nftCollectionId, newNftTokenId, alice, bob, 100, 'ReFungible'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, bob, alice.address, 100); - await approveExpectFail(nftCollectionId, newNftTokenId, bob, alice, 101); - }); - - it('should fail if approved more Fungibles than owned', async () => { - const nftCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'Fungible'); - await transferExpectSuccess(nftCollectionId, newNftTokenId, alice, bob, 10, 'Fungible'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, bob, alice.address, 10); - await approveExpectFail(nftCollectionId, newNftTokenId, bob, alice, 11); - }); - - it('fails when called by collection owner on non-owned item when OwnerCanTransfer == false', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: false}); - - await approveExpectFail(collectionId, itemId, alice, charlie); + itSub('[fungible] Approve for a collection that does not exist', async ({helper}) => { + const collectionId = 1 << 32 - 1; + const approveTx = async () => helper.ft.approveToken(bob, collectionId, 1, {Substrate: charlie.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub.ifWithPallets('[refungible] Approve for a collection that does not exist', [Pallets.ReFungible], async ({helper}) => { + const collectionId = 1 << 32 - 1; + const approveTx = async () => helper.rft.approveToken(bob, collectionId, 1, {Substrate: charlie.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub('[nft] Approve for a collection that was destroyed', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.nft.burn(alice, collectionId); + const approveTx = async () => helper.nft.approveToken(alice, collectionId, 1, {Substrate: bob.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub('[fungible] Approve for a collection that was destroyed', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.ft.burn(alice, collectionId); + const approveTx = async () => helper.ft.approveToken(alice, collectionId, 1, {Substrate: bob.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub.ifWithPallets('[refungible] Approve for a collection that was destroyed', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.rft.burn(alice, collectionId); + const approveTx = async () => helper.rft.approveToken(alice, collectionId, 1, {Substrate: bob.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub('[nft] Approve transfer of a token that does not exist', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const approveTx = async () => helper.nft.approveToken(alice, collectionId, 2, {Substrate: bob.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub.ifWithPallets('[refungible] Approve transfer of a token that does not exist', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const approveTx = async () => helper.rft.approveToken(alice, collectionId, 2, {Substrate: bob.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub('[nft] Approve using the address that does not own the approved token', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: alice.address}); + const approveTx = async () => helper.nft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub('[fungible] Approve using the address that does not own the approved token', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.ft.mintTokens(alice, collectionId, 10n, alice.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + const approveTx = async () => helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub.ifWithPallets('[refungible] Approve using the address that does not own the approved token', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + const approveTx = async () => helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}); + await expect(approveTx()).to.be.rejected; + }); + + itSub.ifWithPallets('should fail if approved more ReFungibles than owned', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.rft.mintToken(alice, {collectionId: collectionId, owner: alice.address, pieces: 100n}); + await helper.rft.transferToken(alice, collectionId, tokenId, {Substrate: bob.address}, 100n); + await helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}, 100n); + + const approveTx = async () => helper.rft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}, 101n); + await expect(approveTx()).to.be.rejected; + }); + + itSub('should fail if approved more Fungibles than owned', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await helper.ft.mintTokens(alice, collectionId, 10n, alice.address); + const tokenId = await helper.ft.getLastTokenId(collectionId); + + await helper.ft.transferToken(alice, collectionId, tokenId, {Substrate: bob.address}, 10n); + await helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}, 10n); + const approveTx = async () => helper.ft.approveToken(bob, collectionId, tokenId, {Substrate: alice.address}, 11n); + await expect(approveTx()).to.be.rejected; + }); + + itSub('fails when called by collection owner on non-owned item when OwnerCanTransfer == false', async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const {tokenId} = await helper.nft.mintToken(alice, {collectionId: collectionId, owner: bob.address}); + await helper.collection.setLimits(alice, collectionId, {ownerCanTransfer: false}); + + const approveTx = async () => helper.nft.approveToken(alice, collectionId, tokenId, {Substrate: charlie.address}); + await expect(approveTx()).to.be.rejected; }); }); diff --git a/tests/src/block-production.test.ts b/tests/src/block-production.test.ts index 1f01a2a54a..9ba293334a 100644 --- a/tests/src/block-production.test.ts +++ b/tests/src/block-production.test.ts @@ -14,9 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import usingApi from './substrate/substrate-api'; -import {expect} from 'chai'; import {ApiPromise} from '@polkadot/api'; +import {expect, itSub} from './util'; const BLOCK_TIME_MS = 12000; const TOLERANCE_MS = 3000; @@ -37,10 +36,8 @@ function getBlocks(api: ApiPromise): Promise { } describe('Block Production smoke test', () => { - it('Node produces new blocks', async () => { - await usingApi(async (api) => { - const blocks: number[] | undefined = await getBlocks(api); - expect(blocks[0]).to.be.lessThan(blocks[1]); - }); + itSub('Node produces new blocks', async ({helper}) => { + const blocks: number[] | undefined = await getBlocks(helper.getApi()); + expect(blocks[0]).to.be.lessThan(blocks[1]); }); }); diff --git a/tests/src/burnItem.test.ts b/tests/src/burnItem.test.ts index 7e686d186e..4439cf940a 100644 --- a/tests/src/burnItem.test.ts +++ b/tests/src/burnItem.test.ts @@ -14,281 +14,145 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; import {IKeyringPair} from '@polkadot/types/types'; -import { - createCollectionExpectSuccess, - createItemExpectSuccess, - getGenericResult, - normalizeAccountId, - addCollectionAdminExpectSuccess, - getBalance, - setCollectionLimitsExpectSuccess, - isTokenExists, -} from './util/helpers'; - -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; +import {expect, itSub, Pallets, usingPlaygrounds} from './util'; + describe('integration test: ext. burnItem():', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Burn item in NFT collection', async () => { - const createMode = 'NFT'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); + itSub('Burn item in NFT collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const token = await collection.mintToken(alice); - await usingApi(async (api) => { - const tx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const events = await submitTransactionAsync(alice, tx); - const result = getGenericResult(events); - - expect(result.success).to.be.true; - // Get the item - expect(await isTokenExists(api, collectionId, tokenId)).to.be.false; - }); + await token.burn(alice); + expect(await token.doesExist()).to.be.false; }); - it('Burn item in Fungible collection', async () => { - const createMode = 'Fungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await createItemExpectSuccess(alice, collectionId, createMode); // Helper creates 10 fungible tokens - const tokenId = 0; // ignored + itSub('Burn item in Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}, 10); + await collection.mint(alice, 10n); - await usingApi(async (api) => { - // Destroy 1 of 10 - const tx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const events = await submitTransactionAsync(alice, tx); - const result = getGenericResult(events); + await collection.burnTokens(alice, 1n); + expect(await collection.getBalance({Substrate: alice.address})).to.eq(9n); + }); - // Get alice balance - const balance = await getBalance(api, collectionId, alice.address, 0); + itSub.ifWithPallets('Burn item in ReFungible collection', [Pallets.ReFungible], async function({helper}) { + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 100n); - // What to expect - expect(result.success).to.be.true; - expect(balance).to.be.equal(9n); - }); + await token.burn(alice, 90n); + expect(await token.getBalance({Substrate: alice.address})).to.eq(10n); + + await token.burn(alice, 10n); + expect(await token.getBalance({Substrate: alice.address})).to.eq(0n); }); - it('Burn item in ReFungible collection', async () => { - const createMode = 'ReFungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); + itSub.ifWithPallets('Burn owned portion of item in ReFungible collection', [Pallets.ReFungible], async function({helper}) { + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 100n); - await usingApi(async (api) => { - const tx = api.tx.unique.burnItem(collectionId, tokenId, 100); - const events = await submitTransactionAsync(alice, tx); - const result = getGenericResult(events); + await token.transfer(alice, {Substrate: bob.address}, 1n); - // Get alice balance - const balance = await getBalance(api, collectionId, alice.address, tokenId); + expect(await token.getBalance({Substrate: alice.address})).to.eq(99n); + expect(await token.getBalance({Substrate: bob.address})).to.eq(1n); - // What to expect - expect(result.success).to.be.true; - expect(balance).to.be.equal(0n); - }); - }); - - it('Burn owned portion of item in ReFungible collection', async () => { - const createMode = 'ReFungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); - - await usingApi(async (api) => { - // Transfer 1/100 of the token to Bob - const transfertx = api.tx.unique.transfer(normalizeAccountId(bob.address), collectionId, tokenId, 1); - const events1 = await submitTransactionAsync(alice, transfertx); - const result1 = getGenericResult(events1); - - // Get balances - const bobBalanceBefore = await getBalance(api, collectionId, bob.address, tokenId); - const aliceBalanceBefore = await getBalance(api, collectionId, alice.address, tokenId); - - // Bob burns his portion - const tx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const events2 = await submitTransactionAsync(bob, tx); - const result2 = getGenericResult(events2); - - // Get balances - const bobBalanceAfter = await getBalance(api, collectionId, bob.address, tokenId); - const aliceBalanceAfter = await getBalance(api, collectionId, alice.address, tokenId); - // console.log(balance); - - // What to expect before burning - expect(result1.success).to.be.true; - expect(aliceBalanceBefore).to.be.equal(99n); - expect(bobBalanceBefore).to.be.equal(1n); - - // What to expect after burning - expect(result2.success).to.be.true; - expect(aliceBalanceAfter).to.be.equal(99n); - expect(bobBalanceAfter).to.be.equal(0n); - }); + await token.burn(bob, 1n); + expect(await token.getBalance({Substrate: alice.address})).to.eq(99n); + expect(await token.getBalance({Substrate: bob.address})).to.eq(0n); }); - }); describe('integration test: ext. burnItem() with admin permissions:', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Burn item in NFT collection', async () => { - const createMode = 'NFT'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - - await usingApi(async (api) => { - const tx = api.tx.unique.burnFrom(collectionId, {Substrate: alice.address}, tokenId, 1); - const events = await submitTransactionAsync(bob, tx); - const result = getGenericResult(events); - - expect(result.success).to.be.true; - // Get the item - expect(await isTokenExists(api, collectionId, tokenId)).to.be.false; - }); + itSub('Burn item in NFT collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const token = await collection.mintToken(alice); + + await token.burnFrom(bob, {Substrate: alice.address}); + expect(await token.doesExist()).to.be.false; }); - // TODO: burnFrom - it('Burn item in Fungible collection', async () => { - const createMode = 'Fungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); // Helper creates 10 fungible tokens - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - - await usingApi(async (api) => { - // Destroy 1 of 10 - const tx = api.tx.unique.burnFrom(collectionId, normalizeAccountId(alice.address), tokenId, 1); - const events = await submitTransactionAsync(bob, tx); - const result = getGenericResult(events); - - // Get alice balance - const balance = await getBalance(api, collectionId, alice.address, 0); - - // What to expect - expect(result.success).to.be.true; - expect(balance).to.be.equal(9n); - }); + itSub('Burn item in Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}, 0); + await collection.setLimits(alice, {ownerCanTransfer: true}); + await collection.addAdmin(alice, {Substrate: bob.address}); + await collection.mint(alice, 10n); + + await collection.burnTokensFrom(bob, {Substrate: alice.address}, 1n); + expect(await collection.getBalance({Substrate: alice.address})).to.eq(9n); }); - // TODO: burnFrom - it('Burn item in ReFungible collection', async () => { - const createMode = 'ReFungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - - await usingApi(async (api) => { - const tx = api.tx.unique.burnFrom(collectionId, normalizeAccountId(alice.address), tokenId, 100); - const events = await submitTransactionAsync(bob, tx); - const result = getGenericResult(events); - // Get alice balance - expect(result.success).to.be.true; - // Get the item - expect(await isTokenExists(api, collectionId, tokenId)).to.be.false; - }); + itSub.ifWithPallets('Burn item in ReFungible collection', [Pallets.ReFungible], async function({helper}) { + const collection = await helper.rft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const token = await collection.mintToken(alice, 100n); + + await token.burnFrom(bob, {Substrate: alice.address}, 100n); + expect(await token.doesExist()).to.be.false; }); }); describe('Negative integration test: ext. burnItem():', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Burn a token that was never created', async () => { - const createMode = 'NFT'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = 10; - - await usingApi(async (api) => { - const tx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(alice, tx); - }; - await expect(badTransaction()).to.be.rejected; - }); - + itSub('Burn a token that was never created', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + await expect(collection.burnToken(alice, 10)).to.be.rejectedWith('common.TokenNotFound'); }); - it('Burn a token using the address that does not own it', async () => { - const createMode = 'NFT'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); - - await usingApi(async (api) => { - const tx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(bob, tx); - }; - await expect(badTransaction()).to.be.rejected; - }); + itSub('Burn a token using the address that does not own it', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const token = await collection.mintToken(alice); + await expect(token.burn(bob)).to.be.rejectedWith('common.NoPermission'); }); - it('Transfer a burned a token', async () => { - const createMode = 'NFT'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, createMode); - - await usingApi(async (api) => { - - const burntx = api.tx.unique.burnItem(collectionId, tokenId, 1); - const events1 = await submitTransactionAsync(alice, burntx); - const result1 = getGenericResult(events1); - expect(result1.success).to.be.true; - - const tx = api.tx.unique.transfer(normalizeAccountId(bob.address), collectionId, tokenId, 1); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(alice, tx); - }; - await expect(badTransaction()).to.be.rejected; - }); + itSub('Transfer a burned token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const token = await collection.mintToken(alice); + await token.burn(alice); + await expect(token.transfer(alice, {Substrate: bob.address})).to.be.rejectedWith('common.TokenNotFound'); }); - it('Burn more than owned in Fungible collection', async () => { - const createMode = 'Fungible'; - const collectionId = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - // Helper creates 10 fungible tokens - await createItemExpectSuccess(alice, collectionId, createMode); - const tokenId = 0; // ignored - - await usingApi(async (api) => { - // Destroy 11 of 10 - const tx = api.tx.unique.burnItem(collectionId, tokenId, 11); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(alice, tx); - }; - await expect(badTransaction()).to.be.rejected; - - // Get alice balance - const balance = await getBalance(api, collectionId, alice.address, 0); - - // What to expect - expect(balance).to.be.equal(10n); - }); + itSub('Burn more than owned in Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}, 0); + await collection.mint(alice, 10n); + await expect(collection.burnTokens(alice, 11n)).to.be.rejectedWith('common.TokenValueTooLow'); + expect(await collection.getBalance({Substrate: alice.address})).to.eq(10n); }); - }); diff --git a/tests/src/calibrate.ts b/tests/src/calibrate.ts new file mode 100644 index 0000000000..390a7b4e8d --- /dev/null +++ b/tests/src/calibrate.ts @@ -0,0 +1,177 @@ +import {IKeyringPair} from '@polkadot/types/types'; + +import {usingEthPlaygrounds, EthUniqueHelper} from './eth/util'; + + +function linearRegression(points: { x: bigint, y: bigint }[]) { + let sumxy = 0n; + let sumx = 0n; + let sumy = 0n; + let sumx2 = 0n; + const n = points.length; + for (let i = 0; i < n; i++) { + const p = points[i]; + sumxy += p.x * p.y; + sumx += p.x; + sumy += p.y; + sumx2 += p.x * p.x; + } + + const nb = BigInt(n); + + const a = (nb * sumxy - sumx * sumy) / (nb * sumx2 - sumx * sumx); + const b = (sumy - a * sumx) / nb; + + return {a, b}; +} + +// JS has no builtin function to calculate sqrt of bigint +// https://stackoverflow.com/a/53684036/6190169 +function sqrt(value: bigint) { + if (value < 0n) { + throw 'square root of negative numbers is not supported'; + } + + if (value < 2n) { + return value; + } + + function newtonIteration(n: bigint, x0: bigint): bigint { + const x1 = ((n / x0) + x0) >> 1n; + if (x0 === x1 || x0 === (x1 - 1n)) { + return x0; + } + return newtonIteration(n, x1); + } + + return newtonIteration(value, 1n); +} + +function _error(points: { x: bigint, y: bigint }[], hypothesis: (a: bigint) => bigint) { + return sqrt(points.map(p => { + const v = hypothesis(p.x); + const vv = p.y; + + return (v - vv) ** 2n; + }).reduce((a, b) => a + b, 0n) / BigInt(points.length)); +} + +async function calibrateWeightToFee(helper: EthUniqueHelper, privateKey: (account: string) => Promise) { + const alice = await privateKey('//Alice'); + const bob = await privateKey('//Bob'); + const dataPoints = []; + + { + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + await token.transfer(alice, {Substrate: bob.address}); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + + console.log(`Original price: ${Number(aliceBalanceBefore - aliceBalanceAfter) / Number(helper.balance.getOneTokenNominal())} UNQ`); + } + + const api = helper.getApi(); + const defaultCoeff = (api.consts.configuration.defaultWeightToFeeCoefficient as any).toBigInt(); + for (let i = -5; i < 5; i++) { + await helper.signTransaction(alice, api.tx.sudo.sudo(api.tx.configuration.setWeightToFeeCoefficientOverride(defaultCoeff + defaultCoeff / 1000n * BigInt(i)))); + + const coefficient = (await api.query.configuration.weightToFeeCoefficientOverride() as any).toBigInt(); + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + await token.transfer(alice, {Substrate: bob.address}); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + + const transferPrice = aliceBalanceBefore - aliceBalanceAfter; + + dataPoints.push({x: transferPrice, y: coefficient}); + } + const {a, b} = linearRegression(dataPoints); + + // console.log(`Error: ${error(dataPoints, x => a*x+b)}`); + + const perfectValue = a * helper.balance.getOneTokenNominal() / 10n + b; + await helper.signTransaction(alice, api.tx.sudo.sudo(api.tx.configuration.setWeightToFeeCoefficientOverride(perfectValue.toString()))); + + { + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + await token.transfer(alice, {Substrate: bob.address}); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + + console.log(`Calibrated price: ${Number(aliceBalanceBefore - aliceBalanceAfter) / Number(helper.balance.getOneTokenNominal())} UNQ`); + } +} + +async function calibrateMinGasPrice(helper: EthUniqueHelper, privateKey: (account: string) => Promise) { + const alice = await privateKey('//Alice'); + const caller = await helper.eth.createAccountWithBalance(alice); + const receiver = helper.eth.createAccount(); + const dataPoints = []; + + { + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Ethereum: caller}); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); + + const cost = await helper.eth.calculateFee({Ethereum: caller}, () => contract.methods.transfer(receiver, token.tokenId).send({from: caller, gas: helper.eth.DEFAULT_GAS})); + + console.log(`Original price: ${Number(cost) / Number(helper.balance.getOneTokenNominal())} UNQ`); + } + + const api = helper.getApi(); + const defaultCoeff = (api.consts.configuration.defaultMinGasPrice as any).toBigInt(); + for (let i = -8; i < 8; i++) { + const gasPrice = defaultCoeff + defaultCoeff / 100000n * BigInt(i); + const gasPriceStr = '0x' + gasPrice.toString(16); + await helper.signTransaction(alice, api.tx.sudo.sudo(api.tx.configuration.setMinGasPriceOverride(gasPrice))); + + const coefficient = (await api.query.configuration.minGasPriceOverride() as any).toBigInt(); + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Ethereum: caller}); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); + + const transferPrice = await helper.eth.calculateFee({Ethereum: caller}, () => contract.methods.transfer(receiver, token.tokenId).send({from: caller, gasPrice: gasPriceStr, gas: helper.eth.DEFAULT_GAS})); + + dataPoints.push({x: transferPrice, y: coefficient}); + } + + const {a, b} = linearRegression(dataPoints); + + // console.log(`Error: ${error(dataPoints, x => a*x+b)}`); + + // * 0.15 = * 10000 / 66666 + const perfectValue = a * helper.balance.getOneTokenNominal() * 1000000n / 6666666n + b; + await helper.signTransaction(alice, api.tx.sudo.sudo(api.tx.configuration.setMinGasPriceOverride(perfectValue.toString()))); + + { + const collection = await helper.nft.mintCollection(alice, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); + const token = await collection.mintToken(alice, {Ethereum: caller}); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); + + const cost = await helper.eth.calculateFee({Ethereum: caller}, () => contract.methods.transfer(receiver, token.tokenId).send({from: caller, gas: helper.eth.DEFAULT_GAS})); + + console.log(`Calibrated price: ${Number(cost) / Number(helper.balance.getOneTokenNominal())} UNQ`); + } +} + +(async () => { + await usingEthPlaygrounds(async (helper: EthUniqueHelper, privateKey) => { + // Second run slightly reduces error sometimes, as price line is not actually straight, this is a curve + + await calibrateWeightToFee(helper, privateKey); + await calibrateWeightToFee(helper, privateKey); + + await calibrateMinGasPrice(helper, privateKey); + await calibrateMinGasPrice(helper, privateKey); + }); +})(); diff --git a/tests/src/calibrateApply.ts b/tests/src/calibrateApply.ts new file mode 100644 index 0000000000..b6566ad7db --- /dev/null +++ b/tests/src/calibrateApply.ts @@ -0,0 +1,38 @@ +import {readFile, writeFile} from 'fs/promises'; +import path from 'path'; +import usingApi from './substrate/substrate-api'; + +const formatNumber = (num: string): string => num.split('').reverse().join('').replace(/([0-9]{3})/g, '$1_').split('').reverse().join('').replace(/^_/, ''); + +(async () => { + let weightToFeeCoefficientOverride: string; + let minGasPriceOverride: string; + await usingApi(async (api, _privateKey) => { + weightToFeeCoefficientOverride = (await api.query.configuration.weightToFeeCoefficientOverride() as any).toBigInt().toString(); + minGasPriceOverride = (await api.query.configuration.minGasPriceOverride() as any).toBigInt().toString(); + }); + const constantsFile = path.resolve(__dirname, '../../primitives/common/src/constants.rs'); + let constants = (await readFile(constantsFile)).toString(); + + let weight2feeFound = false; + constants = constants.replace(/(\/\*\*\/)[0-9_]+(\/\*<\/weight2fee>\*\/)/, (_f, p, s) => { + weight2feeFound = true; + return p+formatNumber(weightToFeeCoefficientOverride)+s; + }); + if (!weight2feeFound) { + throw new Error('failed to find weight2fee marker in source code'); + } + + let minGasPriceFound = false; + constants = constants.replace(/(\/\*\*\/)[0-9_]+(\/\*<\/mingasprice>\*\/)/, (_f, p, s) => { + minGasPriceFound = true; + return p+formatNumber(minGasPriceOverride)+s; + }); + if (!minGasPriceFound) { + throw new Error('failed to find mingasprice marker in source code'); + } + + await writeFile(constantsFile, constants); +})().catch(e => { + console.log(e.stack); +}); diff --git a/tests/src/change-collection-owner.test.ts b/tests/src/change-collection-owner.test.ts index 92f4ad2872..26c2405f3c 100644 --- a/tests/src/change-collection-owner.test.ts +++ b/tests/src/change-collection-owner.test.ts @@ -14,231 +14,156 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import {createCollectionExpectSuccess, - addCollectionAdminExpectSuccess, - setCollectionSponsorExpectSuccess, - confirmSponsorshipExpectSuccess, - removeCollectionSponsorExpectSuccess, - enableAllowListExpectSuccess, - setMintPermissionExpectSuccess, - destroyCollectionExpectSuccess, - setCollectionSponsorExpectFailure, - confirmSponsorshipExpectFailure, - removeCollectionSponsorExpectFailure, - enableAllowListExpectFail, - setMintPermissionExpectFailure, - destroyCollectionExpectFailure, - setPublicAccessModeExpectSuccess, - queryCollectionExpectSuccess, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds, expect, itSub} from './util'; describe('Integration Test changeCollectionOwner(collection_id, new_owner):', () => { - it('Changing owner changes owner address', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + let alice: IKeyringPair; + let bob: IKeyringPair; - const collection =await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); - - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await submitTransactionAsync(alice, changeOwnerTx); - - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(bob.address); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); -}); - -describe('Integration Test changeCollectionOwner(collection_id, new_owner) special checks for exOwner:', () => { - it('Changing the owner of the collection is not allowed for the former owner', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await submitTransactionAsync(alice, changeOwnerTx); + itSub('Changing owner changes owner address', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const beforeChanging = await helper.collection.getData(collection.collectionId); + expect(beforeChanging?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(alice.address)); - const badChangeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, alice.address); - await expect(submitTransactionExpectFailAsync(alice, badChangeOwnerTx)).to.be.rejected; - - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(bob.address); - }); + await collection.changeOwner(alice, bob.address); + const afterChanging = await helper.collection.getData(collection.collectionId); + expect(afterChanging?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(bob.address)); }); +}); - it('New collectionOwner has access to sponsorship management operations in the collection', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); - - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await submitTransactionAsync(alice, changeOwnerTx); - - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(bob.address); - - // After changing the owner of the collection, all privileged methods are available to the new owner - // The new owner of the collection has access to sponsorship management operations in the collection - await setCollectionSponsorExpectSuccess(collectionId, charlie.address, '//Bob'); - await confirmSponsorshipExpectSuccess(collectionId, '//Charlie'); - await removeCollectionSponsorExpectSuccess(collectionId, '//Bob'); - - // The new owner of the collection has access to operations for managing the collection parameters - const collectionLimits = { - accountTokenOwnershipLimit: 1, - sponsoredMintSize: 1, - tokenLimit: 1, - sponsorTransferTimeout: 1, - ownerCanTransfer: true, - ownerCanDestroy: true, - }; - const tx1 = api.tx.unique.setCollectionLimits( - collectionId, - collectionLimits, - ); - await submitTransactionAsync(bob, tx1); - - await setPublicAccessModeExpectSuccess(bob, collectionId, 'AllowList'); - await enableAllowListExpectSuccess(bob, collectionId); - await setMintPermissionExpectSuccess(bob, collectionId, true); - await destroyCollectionExpectSuccess(collectionId, '//Bob'); +describe('Integration Test changeCollectionOwner(collection_id, new_owner) special checks for exOwner:', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('New collectionOwner has access to changeCollectionOwner', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); + itSub('Changing the owner of the collection is not allowed for the former owner', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); + await collection.changeOwner(alice, bob.address); - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await submitTransactionAsync(alice, changeOwnerTx); + const changeOwnerTx = async () => collection.changeOwner(alice, alice.address); + await expect(changeOwnerTx()).to.be.rejectedWith(/common\.NoPermission/); - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(bob.address); + const afterChanging = await helper.collection.getData(collection.collectionId); + expect(afterChanging?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(bob.address)); + }); - const changeOwnerTx2 = api.tx.unique.changeCollectionOwner(collectionId, charlie.address); - await submitTransactionAsync(bob, changeOwnerTx2); + itSub('New collectionOwner has access to sponsorship management operations in the collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.changeOwner(alice, bob.address); + + const afterChanging = await helper.collection.getData(collection.collectionId); + expect(afterChanging?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(bob.address)); + + await collection.setSponsor(bob, charlie.address); + await collection.confirmSponsorship(charlie); + await collection.removeSponsor(bob); + const limits = { + accountTokenOwnershipLimit: 1, + tokenLimit: 1, + sponsorTransferTimeout: 1, + ownerCanDestroy: true, + ownerCanTransfer: true, + }; + + await collection.setLimits(bob, limits); + const gotLimits = await collection.getEffectiveLimits(); + expect(gotLimits).to.be.deep.contains(limits); + + await collection.setPermissions(bob, {access: 'AllowList', mintMode: true}); + + await collection.burn(bob); + const collectionData = await helper.collection.getData(collection.collectionId); + expect(collectionData).to.be.null; + }); - // ownership lost - const collectionAfterOwnerChange2 = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange2.owner.toString()).to.be.deep.eq(charlie.address); - }); + itSub('New collectionOwner has access to changeCollectionOwner', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.changeOwner(alice, bob.address); + await collection.changeOwner(bob, charlie.address); + const collectionData = await collection.getData(); + expect(collectionData?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(charlie.address)); }); }); describe('Negative Integration Test changeCollectionOwner(collection_id, new_owner):', () => { - it('Not owner can\'t change owner.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await expect(submitTransactionExpectFailAsync(bob, changeOwnerTx)).to.be.rejected; + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); + }); + }); - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(alice.address); + itSub('Not owner can\'t change owner.', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const changeOwnerTx = async () => collection.changeOwner(bob, bob.address); + await expect(changeOwnerTx()).to.be.rejectedWith(/common\.NoPermission/); + }); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); + itSub('Collection admin can\'t change owner.', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const changeOwnerTx = async () => collection.changeOwner(bob, bob.address); + await expect(changeOwnerTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('Collection admin can\'t change owner.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + itSub('Can\'t change owner of a non-existing collection.', async ({helper}) => { + const collectionId = (1 << 32) - 1; + const changeOwnerTx = async () => helper.collection.changeOwner(bob, collectionId, bob.address); + await expect(changeOwnerTx()).to.be.rejectedWith(/common\.CollectionNotFound/); + }); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); + itSub('Former collectionOwner not allowed to sponsorship management operations in the collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.changeOwner(alice, bob.address); - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await expect(submitTransactionExpectFailAsync(bob, changeOwnerTx)).to.be.rejected; + const changeOwnerTx = async () => collection.changeOwner(alice, alice.address); + await expect(changeOwnerTx()).to.be.rejectedWith(/common\.NoPermission/); - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(alice.address); + const afterChanging = await helper.collection.getData(collection.collectionId); + expect(afterChanging?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(bob.address)); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); - }); + const setSponsorTx = async () => collection.setSponsor(alice, charlie.address); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(alice); + const removeSponsorTx = async () => collection.removeSponsor(alice); + await expect(setSponsorTx()).to.be.rejectedWith(/common\.NoPermission/); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); + await expect(removeSponsorTx()).to.be.rejectedWith(/common\.NoPermission/); - it('Can\'t change owner of a non-existing collection.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = (1<<32) - 1; - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + const limits = { + accountTokenOwnershipLimit: 1, + tokenLimit: 1, + sponsorTransferTimeout: 1, + ownerCanDestroy: true, + ownerCanTransfer: true, + }; - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await expect(submitTransactionExpectFailAsync(alice, changeOwnerTx)).to.be.rejected; + const setLimitsTx = async () => collection.setLimits(alice, limits); + await expect(setLimitsTx()).to.be.rejectedWith(/common\.NoPermission/); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); - }); + const setPermissionTx = async () => collection.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await expect(setPermissionTx()).to.be.rejectedWith(/common\.NoPermission/); - it('Former collectionOwner not allowed to sponsorship management operations in the collection', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); - - const changeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, bob.address); - await submitTransactionAsync(alice, changeOwnerTx); - - const badChangeOwnerTx = api.tx.unique.changeCollectionOwner(collectionId, alice.address); - await expect(submitTransactionExpectFailAsync(alice, badChangeOwnerTx)).to.be.rejected; - - const collectionAfterOwnerChange = await queryCollectionExpectSuccess(api, collectionId); - expect(collectionAfterOwnerChange.owner.toString()).to.be.deep.eq(bob.address); - - await setCollectionSponsorExpectFailure(collectionId, charlie.address, '//Alice'); - await confirmSponsorshipExpectFailure(collectionId, '//Alice'); - await removeCollectionSponsorExpectFailure(collectionId, '//Alice'); - - const collectionLimits = { - accountTokenOwnershipLimit: 1, - sponsoredMintSize: 1, - tokenLimit: 1, - sponsorTransferTimeout: 1, - ownerCanTransfer: true, - ownerCanDestroy: true, - }; - const tx1 = api.tx.unique.setCollectionLimits( - collectionId, - collectionLimits, - ); - await expect(submitTransactionExpectFailAsync(alice, tx1)).to.be.rejected; - - await enableAllowListExpectFail(alice, collectionId); - await setMintPermissionExpectFailure(alice, collectionId, true); - await destroyCollectionExpectFailure(collectionId, '//Alice'); - }); + const burnTx = async () => collection.burn(alice); + await expect(burnTx()).to.be.rejectedWith(/common\.NoPermission/); }); }); diff --git a/tests/src/check-event/burnItemEvent.test.ts b/tests/src/check-event/burnItemEvent.test.ts index 39e2b261a8..a14cea604a 100644 --- a/tests/src/check-event/burnItemEvent.test.ts +++ b/tests/src/check-event/burnItemEvent.test.ts @@ -15,36 +15,29 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, createItemExpectSuccess, uniqueEventMessage} from '../util/helpers'; +import {usingPlaygrounds, expect, itSub} from '../util'; +import {IEvent} from '../util/playgrounds/types'; -chai.use(chaiAsPromised); -const expect = chai.expect; describe('Burn Item event ', () => { let alice: IKeyringPair; - const checkSection = 'ItemDestroyed'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it('Check event from burnItem(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const itemID = await createItemExpectSuccess(alice, collectionID, 'NFT'); - const burnItem = api.tx.unique.burnItem(collectionID, itemID, 1); - const events = await submitTransactionAsync(alice, burnItem); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + itSub('Check event from burnItem(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await token.burn(alice); + + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.ItemDestroyed'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/createCollectionEvent.test.ts b/tests/src/check-event/createCollectionEvent.test.ts index 7c97c9c3ab..82f73009f3 100644 --- a/tests/src/check-event/createCollectionEvent.test.ts +++ b/tests/src/check-event/createCollectionEvent.test.ts @@ -15,34 +15,25 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {uniqueEventMessage} from '../util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {usingPlaygrounds, itSub, expect} from '../util'; +import {IEvent} from '../util/playgrounds/types'; describe('Create collection event ', () => { let alice: IKeyringPair; - const checkSection = 'CollectionCreated'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it('Check event from createCollection(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const tx = api.tx.unique.createCollectionEx({name: [0x31], description: [0x32], tokenPrefix: '0x33', mode: 'NFT'}); - const events = await submitTransactionAsync(alice, tx); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + itSub('Check event from createCollection(): ', async ({helper}) => { + await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.CollectionCreated'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/createItemEvent.test.ts b/tests/src/check-event/createItemEvent.test.ts index 08ebee1a92..06c7754b8a 100644 --- a/tests/src/check-event/createItemEvent.test.ts +++ b/tests/src/check-event/createItemEvent.test.ts @@ -15,35 +15,26 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, uniqueEventMessage, normalizeAccountId} from '../util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {itSub, usingPlaygrounds, expect} from '../util'; +import {IEvent} from '../util/playgrounds/types'; describe('Create Item event ', () => { let alice: IKeyringPair; - const checkSection = 'ItemCreated'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it('Check event from createItem(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const createItem = api.tx.unique.createItem(collectionID, normalizeAccountId(alice.address), 'NFT'); - const events = await submitTransactionAsync(alice, createItem); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + itSub('Check event from createItem(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + await collection.mintToken(alice, {Substrate: alice.address}); + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.ItemCreated'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/createMultipleItemsEvent.test.ts b/tests/src/check-event/createMultipleItemsEvent.test.ts index 899c4946e1..49dcb1a043 100644 --- a/tests/src/check-event/createMultipleItemsEvent.test.ts +++ b/tests/src/check-event/createMultipleItemsEvent.test.ts @@ -15,36 +15,31 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, uniqueEventMessage, normalizeAccountId} from '../util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {usingPlaygrounds, itSub, expect} from '../util'; +import {IEvent} from '../util/playgrounds/types'; describe('Create Multiple Items Event event ', () => { let alice: IKeyringPair; - const checkSection = 'ItemCreated'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it('Check event from createMultipleItems(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const args = [{NFT: {}}, {NFT: {}}, {NFT: {}}]; - const createMultipleItems = api.tx.unique.createMultipleItems(collectionID, normalizeAccountId(alice.address), args); - const events = await submitTransactionAsync(alice, createMultipleItems); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + itSub('Check event from createMultipleItems(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + await collection.mintMultipleTokens(alice, [ + {owner: {Substrate: alice.address}}, + {owner: {Substrate: alice.address}}, + ]); + + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.ItemCreated'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/destroyCollectionEvent.test.ts b/tests/src/check-event/destroyCollectionEvent.test.ts index 7c97c42c6c..682daf6188 100644 --- a/tests/src/check-event/destroyCollectionEvent.test.ts +++ b/tests/src/check-event/destroyCollectionEvent.test.ts @@ -15,33 +15,27 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, uniqueEventMessage} from '../util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {itSub, usingPlaygrounds, expect} from '../util'; +import {IEvent} from '../util/playgrounds/types'; describe('Destroy collection event ', () => { let alice: IKeyringPair; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it('Check event from destroyCollection(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const destroyCollection = api.tx.unique.destroyCollection(collectionID); - const events = await submitTransactionAsync(alice, destroyCollection); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + + itSub('Check event from destroyCollection(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + await collection.burn(alice); + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.CollectionDestroyed'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/transferEvent.test.ts b/tests/src/check-event/transferEvent.test.ts index 3e8fbb39af..f51e96fdf1 100644 --- a/tests/src/check-event/transferEvent.test.ts +++ b/tests/src/check-event/transferEvent.test.ts @@ -15,38 +15,30 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, createItemExpectSuccess, uniqueEventMessage, normalizeAccountId} from '../util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {usingPlaygrounds, expect, itSub} from '../util'; +import {IEvent} from '../util/playgrounds/types'; describe('Transfer event ', () => { let alice: IKeyringPair; let bob: IKeyringPair; - const checkSection = 'Transfer'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); }); }); - it('Check event from transfer(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const itemID = await createItemExpectSuccess(alice, collectionID, 'NFT'); - const transfer = api.tx.unique.transfer(normalizeAccountId(bob.address), collectionID, itemID, 1); - const events = await submitTransactionAsync(alice, transfer); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + + itSub('Check event from transfer(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await token.transfer(alice, {Substrate: bob.address}); + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.Transfer'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/check-event/transferFromEvent.test.ts b/tests/src/check-event/transferFromEvent.test.ts index c4cd4480b7..cb5be98f48 100644 --- a/tests/src/check-event/transferFromEvent.test.ts +++ b/tests/src/check-event/transferFromEvent.test.ts @@ -15,38 +15,29 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, createItemExpectSuccess, uniqueEventMessage, normalizeAccountId} from '../util/helpers'; +import {usingPlaygrounds, expect, itSub} from '../util'; +import {IEvent} from '../util/playgrounds/types'; -chai.use(chaiAsPromised); -const expect = chai.expect; - -describe('Transfer from event ', () => { +describe('Transfer event ', () => { let alice: IKeyringPair; let bob: IKeyringPair; - const checkSection = 'Transfer'; - const checkTreasury = 'Deposit'; - const checkSystem = 'ExtrinsicSuccess'; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); }); }); - it('Check event from transferFrom(): ', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionID = await createCollectionExpectSuccess(); - const itemID = await createItemExpectSuccess(alice, collectionID, 'NFT'); - const transferFrom = api.tx.unique.transferFrom(normalizeAccountId(alice.address), normalizeAccountId(bob.address), collectionID, itemID, 1); - const events = await submitTransactionAsync(alice, transferFrom); - const msg = JSON.stringify(uniqueEventMessage(events)); - expect(msg).to.be.contain(checkSection); - expect(msg).to.be.contain(checkTreasury); - expect(msg).to.be.contain(checkSystem); - }); + + itSub('Check event from transfer(): ', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await token.transferFrom(alice, {Substrate: alice.address}, {Substrate: bob.address}); + const event = helper.chainLog[helper.chainLog.length - 1].events as IEvent[]; + const eventStrings = event.map(e => `${e.section}.${e.method}`); + + expect(eventStrings).to.contains('common.Transfer'); + expect(eventStrings).to.contains('treasury.Deposit'); + expect(eventStrings).to.contains('system.ExtrinsicSuccess'); }); }); diff --git a/tests/src/config.ts b/tests/src/config.ts index 439fbcdb6e..a6ac6b9755 100644 --- a/tests/src/config.ts +++ b/tests/src/config.ts @@ -19,6 +19,12 @@ import process from 'process'; const config = { substrateUrl: process.env.substrateUrl || 'ws://127.0.0.1:9944', frontierUrl: process.env.frontierUrl || 'http://127.0.0.1:9933', + relayUrl: process.env.relayUrl || 'ws://127.0.0.1:9844', + acalaUrl: process.env.acalaUrl || 'ws://127.0.0.1:9946', + karuraUrl: process.env.acalaUrl || 'ws://127.0.0.1:9946', + moonbeamUrl: process.env.moonbeamUrl || 'ws://127.0.0.1:9947', + moonriverUrl: process.env.moonbeamUrl || 'ws://127.0.0.1:9947', + westmintUrl: process.env.westmintUrl || 'ws://127.0.0.1:9948', }; -export default config; \ No newline at end of file +export default config; diff --git a/tests/src/confirmSponsorship.test.ts b/tests/src/confirmSponsorship.test.ts index c934f26308..ba28b60694 100644 --- a/tests/src/confirmSponsorship.test.ts +++ b/tests/src/confirmSponsorship.test.ts @@ -14,398 +14,239 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - setCollectionSponsorExpectSuccess, - destroyCollectionExpectSuccess, - confirmSponsorshipExpectSuccess, - confirmSponsorshipExpectFailure, - createItemExpectSuccess, - findUnusedAddress, - getGenericResult, - enableAllowListExpectSuccess, - enablePublicMintingExpectSuccess, - addToAllowListExpectSuccess, - normalizeAccountId, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, - UNIQUE, -} from './util/helpers'; import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds, expect, itSub, Pallets} from './util'; -chai.use(chaiAsPromised); -const expect = chai.expect; +async function setSponsorHelper(collection: any, signer: IKeyringPair, sponsorAddress: string) { + await collection.setSponsor(signer, sponsorAddress); + const raw = (await collection.getData())?.raw; + expect(raw.sponsorship.Unconfirmed).to.be.equal(sponsorAddress); +} -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; +async function confirmSponsorHelper(collection: any, signer: IKeyringPair) { + await collection.confirmSponsorship(signer); + const raw = (await collection.getData())?.raw; + expect(raw.sponsorship.Confirmed).to.be.equal(signer.address); +} describe('integration test: ext. confirmSponsorship():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + let zeroBalance: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie, zeroBalance] = await helper.arrange.createAccounts([100n, 100n, 100n, 0n], donor); }); }); - it('Confirm collection sponsorship', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); + itSub('Confirm collection sponsorship', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await setSponsorHelper(collection, alice, bob.address); + await confirmSponsorHelper(collection, bob); }); - it('Add sponsor to a collection after the same sponsor was already added and confirmed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - }); - it('Add new sponsor to a collection after another sponsor was already added and confirmed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - await setCollectionSponsorExpectSuccess(collectionId, charlie.address); - }); - - it('NFT: Transfer fees are paid by the sponsor after confirmation', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', zeroBalance.address); - - // Transfer this tokens from unused address to Alice - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(zeroBalance.address), collectionId, itemId, 0); - const events = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result = getGenericResult(events); - expect(result.success).to.be.true; - - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; - }); + itSub('Add sponsor to a collection after the same sponsor was already added and confirmed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await setSponsorHelper(collection, alice, bob.address); + await confirmSponsorHelper(collection, bob); + await setSponsorHelper(collection, alice, bob.address); }); - - it('Fungible: Transfer fees are paid by the sponsor after confirmation', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', zeroBalance.address); - - // Transfer this tokens from unused address to Alice - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(zeroBalance.address), collectionId, itemId, 1); - const events1 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result1 = getGenericResult(events1); - expect(result1.success).to.be.true; - - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; - }); + itSub('Add new sponsor to a collection after another sponsor was already added and confirmed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await setSponsorHelper(collection, alice, bob.address); + await confirmSponsorHelper(collection, bob); + await setSponsorHelper(collection, alice, charlie.address); }); - it('ReFungible: Transfer fees are paid by the sponsor after confirmation', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', zeroBalance.address); - - // Transfer this tokens from unused address to Alice - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(zeroBalance.address), collectionId, itemId, 1); - const events1 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result1 = getGenericResult(events1); - - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(result1.success).to.be.true; - expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; - }); + itSub('NFT: Transfer fees are paid by the sponsor after confirmation', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + const token = await collection.mintToken(alice, {Substrate: zeroBalance.address}); + await token.transfer(zeroBalance, {Substrate: alice.address}); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); + expect(bobBalanceAfter < bobBalanceBefore).to.be.true; }); - it('CreateItem fees are paid by the sponsor after confirmation', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - // Enable collection allow list - await enableAllowListExpectSuccess(alice, collectionId); - - // Enable public minting - await enablePublicMintingExpectSuccess(alice, collectionId); - - // Create Item - await usingApi(async (api, privateKeyWrapper) => { - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Add zeroBalance address to allow list - await addToAllowListExpectSuccess(alice, collectionId, zeroBalance.address); - - // Mint token using unused address as signer - await createItemExpectSuccess(zeroBalance, collectionId, 'NFT', zeroBalance.address); - - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; - }); + itSub('Fungible: Transfer fees are paid by the sponsor after confirmation', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + await collection.mint(alice, 100n, {Substrate: zeroBalance.address}); + await collection.transfer(zeroBalance, {Substrate: alice.address}, 1n); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); + expect(bobBalanceAfter < bobBalanceBefore).to.be.true; }); - it('NFT: Sponsoring of transfers is rate limited', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for alice - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); - - // Transfer this token from Alice to unused address and back - // Alice to Zero gets sponsored - const aliceToZero = api.tx.unique.transfer(normalizeAccountId(zeroBalance.address), collectionId, itemId, 0); - const events1 = await submitTransactionAsync(alice, aliceToZero); - const result1 = getGenericResult(events1); - - // Second transfer should fail - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(alice.address), collectionId, itemId, 0); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(zeroBalance, zeroToAlice); - }; - await expect(badTransaction()).to.be.rejectedWith('Inability to pay some fees'); - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Try again after Zero gets some balance - now it should succeed - const balancetx = api.tx.balances.transfer(zeroBalance.address, 1n * UNIQUE); - await submitTransactionAsync(alice, balancetx); - const events2 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result2 = getGenericResult(events2); - - expect(result1.success).to.be.true; - expect(result2.success).to.be.true; - expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); - }); + itSub.ifWithPallets('ReFungible: Transfer fees are paid by the sponsor after confirmation', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + const token = await collection.mintToken(alice, 100n, {Substrate: zeroBalance.address}); + await token.transfer(zeroBalance, {Substrate: alice.address}, 1n); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); + expect(bobBalanceAfter < bobBalanceBefore).to.be.true; }); - it('Fungible: Sponsoring is rate limited', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); + itSub('CreateItem fees are paid by the sponsor after confirmation', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + await collection.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collection.addToAllowList(alice, {Substrate: zeroBalance.address}); - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + await collection.mintToken(zeroBalance, {Substrate: zeroBalance.address}); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); - // Mint token for unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'Fungible', zeroBalance.address); + expect(bobBalanceAfter < bobBalanceBefore).to.be.true; + }); - // Transfer this tokens in parts from unused address to Alice - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(zeroBalance.address), collectionId, itemId, 1); - const events1 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result1 = getGenericResult(events1); - expect(result1.success).to.be.true; + itSub('NFT: Sponsoring of transfers is rate limited', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', limits: { + sponsorTransferTimeout: 1000, + }}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - await expect(submitTransactionExpectFailAsync(zeroBalance, zeroToAlice)).to.be.rejected; - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await token.transfer(alice, {Substrate: zeroBalance.address}); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); - // Try again after Zero gets some balance - now it should succeed - const balancetx = api.tx.balances.transfer(zeroBalance.address, 1n * UNIQUE); - await submitTransactionAsync(alice, balancetx); - const events2 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result2 = getGenericResult(events2); - expect(result2.success).to.be.true; + const transferTx = async () => token.transfer(zeroBalance, {Substrate: alice.address}); + await expect(transferTx()).to.be.rejectedWith('Inability to pay some fees'); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); - expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); - }); + expect(bobBalanceAfter === bobBalanceBefore).to.be.true; }); - it('ReFungible: Sponsoring is rate limited', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for alice - const itemId = await createItemExpectSuccess(alice, collectionId, 'ReFungible', zeroBalance.address); - - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(alice.address), collectionId, itemId, 1); - - // Zero to alice gets sponsored - const events1 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result1 = getGenericResult(events1); - expect(result1.success).to.be.true; - - // Second transfer should fail - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - await expect(submitTransactionExpectFailAsync(zeroBalance, zeroToAlice)).to.be.rejectedWith('Inability to pay some fees'); - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); - - // Try again after Zero gets some balance - now it should succeed - const balancetx = api.tx.balances.transfer(zeroBalance.address, 1n * UNIQUE); - await submitTransactionAsync(alice, balancetx); - const events2 = await submitTransactionAsync(zeroBalance, zeroToAlice); - const result2 = getGenericResult(events2); - expect(result2.success).to.be.true; - }); - }); + itSub('Fungible: Sponsoring is rate limited', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', limits: { + sponsorTransferTimeout: 1000, + }}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); - it('NFT: Sponsoring of createItem is rate limited', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); + await collection.mint(alice, 100n, {Substrate: zeroBalance.address}); + await collection.transfer(zeroBalance, {Substrate: zeroBalance.address}, 1n); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); - // Enable collection allow list - await enableAllowListExpectSuccess(alice, collectionId); + const transferTx = async () => collection.transfer(zeroBalance, {Substrate: zeroBalance.address}); + await expect(transferTx()).to.be.rejected; + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); - // Enable public minting - await enablePublicMintingExpectSuccess(alice, collectionId); + expect(bobBalanceAfter === bobBalanceBefore).to.be.true; + }); - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); + itSub.ifWithPallets('ReFungible: Sponsoring is rate limited', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', limits: { + sponsorTransferTimeout: 1000, + }}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); - // Add zeroBalance address to allow list - await addToAllowListExpectSuccess(alice, collectionId, zeroBalance.address); + const token = await collection.mintToken(alice, 100n, {Substrate: zeroBalance.address}); + await token.transfer(zeroBalance, {Substrate: alice.address}); - // Mint token using unused address as signer - gets sponsored - await createItemExpectSuccess(zeroBalance, collectionId, 'NFT', zeroBalance.address); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + const transferTx = async () => token.transfer(zeroBalance, {Substrate: alice.address}); + await expect(transferTx()).to.be.rejectedWith('Inability to pay some fees'); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); - // Second mint should fail - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); + expect(bobBalanceAfter === bobBalanceBefore).to.be.true; + }); + + itSub('NFT: Sponsoring of createItem is rate limited', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', limits: { + sponsoredDataRateLimit: {blocks: 1000}, + sponsorTransferTimeout: 1000, + }}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + await collection.setPermissions(alice, {mintMode: true, access: 'AllowList'}); + await collection.addToAllowList(alice, {Substrate: zeroBalance.address}); - const badTransaction = async function () { - await createItemExpectSuccess(zeroBalance, collectionId, 'NFT', zeroBalance.address); - }; - await expect(badTransaction()).to.be.rejectedWith('Inability to pay some fees'); - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); + await collection.mintToken(zeroBalance, {Substrate: zeroBalance.address}); - // Try again after Zero gets some balance - now it should succeed - const balancetx = api.tx.balances.transfer(zeroBalance.address, 1n * UNIQUE); - await submitTransactionAsync(alice, balancetx); - await createItemExpectSuccess(zeroBalance, collectionId, 'NFT', zeroBalance.address); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + const mintTx = async () => collection.mintToken(zeroBalance, {Substrate: zeroBalance.address}); + await expect(mintTx()).to.be.rejectedWith('Inability to pay some fees'); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); - expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); - }); + expect(bobBalanceAfter === bobBalanceBefore).to.be.true; }); - }); describe('(!negative test!) integration test: ext. confirmSponsorship():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + let ownerZeroBalance: IKeyringPair; + let senderZeroBalance: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie, ownerZeroBalance, senderZeroBalance] = await helper.arrange.createAccounts([100n, 100n, 100n, 0n, 0n], donor); }); }); - it('(!negative test!) Confirm sponsorship for a collection that never existed', async () => { - // Find the collection that never existed - let collectionId = 0; - await usingApi(async (api) => { - collectionId = await getCreatedCollectionCount(api) + 1; - }); - - await confirmSponsorshipExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Confirm sponsorship for a collection that never existed', async ({helper}) => { + const collectionId = (1 << 32) - 1; + const confirmSponsorshipTx = async () => helper.collection.confirmSponsorship(bob, collectionId); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Confirm sponsorship using a non-sponsor address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - - await usingApi(async (api) => { - const transfer = api.tx.balances.transfer(charlie.address, 1e15); - await submitTransactionAsync(alice, transfer); - }); - - await confirmSponsorshipExpectFailure(collectionId, '//Charlie'); + itSub('(!negative test!) Confirm sponsorship using a non-sponsor address', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(charlie); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - it('(!negative test!) Confirm sponsorship using owner address', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectFailure(collectionId, '//Alice'); + itSub('(!negative test!) Confirm sponsorship using owner address', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(alice); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - it('(!negative test!) Confirm sponsorship by collection admin', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await addCollectionAdminExpectSuccess(alice, collectionId, charlie.address); - await confirmSponsorshipExpectFailure(collectionId, '//Charlie'); + itSub('(!negative test!) Confirm sponsorship by collection admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.addAdmin(alice, {Substrate: charlie.address}); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(charlie); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - it('(!negative test!) Confirm sponsorship without sponsor being set with setCollectionSponsor', async () => { - const collectionId = await createCollectionExpectSuccess(); - await confirmSponsorshipExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Confirm sponsorship without sponsor being set with setCollectionSponsor', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(charlie); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - it('(!negative test!) Confirm sponsorship in a collection that was destroyed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); - await confirmSponsorshipExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Confirm sponsorship in a collection that was destroyed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.burn(alice); + const confirmSponsorshipTx = async () => collection.confirmSponsorship(charlie); + await expect(confirmSponsorshipTx()).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Transfer fees are not paid by the sponsor if the transfer failed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const ownerZeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Find another unused address - const senderZeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for an unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', ownerZeroBalance.address); - - const sponsorBalanceBeforeTx = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - // Try to transfer this token from an unsponsored unused adress to Alice - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(alice.address), collectionId, itemId, 0); - await expect(submitTransactionExpectFailAsync(senderZeroBalance, zeroToAlice)).to.be.rejected; - - const sponsorBalanceAfterTx = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(sponsorBalanceAfterTx).to.equal(sponsorBalanceBeforeTx); - }); + itSub('(!negative test!) Transfer fees are not paid by the sponsor if the transfer failed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + const token = await collection.mintToken(alice, {Substrate: ownerZeroBalance.address}); + const sponsorBalanceBefore = await helper.balance.getSubstrate(bob.address); + const transferTx = async () => token.transfer(senderZeroBalance, {Substrate: alice.address}); + await expect(transferTx()).to.be.rejectedWith('Inability to pay some fees'); + const sponsorBalanceAfter = await helper.balance.getSubstrate(bob.address); + expect(sponsorBalanceAfter).to.equal(sponsorBalanceBefore); }); }); diff --git a/tests/src/connection.test.ts b/tests/src/connection.test.ts index a365a05c64..07a5f0a7aa 100644 --- a/tests/src/connection.test.ts +++ b/tests/src/connection.test.ts @@ -14,29 +14,19 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import usingApi from './substrate/substrate-api'; -import {WsProvider} from '@polkadot/api'; -import * as chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; - -chai.use(chaiAsPromised); - -const expect = chai.expect; +import {itSub, expect, usingPlaygrounds} from './util'; describe('Connection smoke test', () => { - it('Connection can be established', async () => { - await usingApi(async api => { - const health = await api.rpc.system.health(); - expect(health).to.be.not.empty; - }); + itSub('Connection can be established', async ({helper}) => { + const health = (await helper.callRpc('api.rpc.system.health')).toJSON(); + expect(health).to.be.not.empty; }); it('Cannot connect to 255.255.255.255', async () => { - const neverConnectProvider = new WsProvider('ws://255.255.255.255:9944'); await expect((async () => { - await usingApi(async api => { - await api.rpc.system.health(); - }, {provider: neverConnectProvider}); + await usingPlaygrounds(async helper => { + await helper.callRpc('api.rpc.system.health'); + }, 'ws://255.255.255.255:9944'); })()).to.be.eventually.rejected; }); }); diff --git a/tests/src/createCollection.test.ts b/tests/src/createCollection.test.ts index 4102f18527..f0e6c393c0 100644 --- a/tests/src/createCollection.test.ts +++ b/tests/src/createCollection.test.ts @@ -14,128 +14,143 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import usingApi, {executeTransaction, submitTransactionAsync} from './substrate/substrate-api'; -import {createCollectionWithPropsExpectFailure, createCollectionExpectFailure, createCollectionExpectSuccess, getCreateCollectionResult, getDetailedCollectionInfo, createCollectionWithPropsExpectSuccess} from './util/helpers'; +import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds, expect, itSub, Pallets} from './util'; +import {ICollectionCreationOptions, IProperty} from './util/playgrounds/types'; +import {UniqueHelper} from './util/playgrounds/unique'; + +async function mintCollectionHelper(helper: UniqueHelper, signer: IKeyringPair, options: ICollectionCreationOptions, type?: 'nft' | 'fungible' | 'refungible') { + let collection; + if (type === 'nft') { + collection = await helper.nft.mintCollection(signer, options); + } else if (type === 'fungible') { + collection = await helper.ft.mintCollection(signer, options, 0); + } else { + collection = await helper.rft.mintCollection(signer, options); + } + const data = await collection.getData(); + expect(data?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(signer.address)); + expect(data?.name).to.be.equal(options.name); + expect(data?.description).to.be.equal(options.description); + expect(data?.raw.tokenPrefix).to.be.equal(options.tokenPrefix); + if (options.properties) { + expect(data?.raw.properties).to.be.deep.equal(options.properties); + } + + if (options.tokenPropertyPermissions) { + expect(data?.raw.tokenPropertyPermissions).to.be.deep.equal(options.tokenPropertyPermissions); + } + + return collection; +} describe('integration test: ext. createCollection():', () => { - it('Create new NFT collection', async () => { - await createCollectionExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}}); - }); - it('Create new NFT collection whith collection_name of maximum length (64 bytes)', async () => { - await createCollectionExpectSuccess({name: 'A'.repeat(64)}); + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + }); }); - it('Create new NFT collection whith collection_description of maximum length (256 bytes)', async () => { - await createCollectionExpectSuccess({description: 'A'.repeat(256)}); + itSub('Create new NFT collection', async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 'nft'); }); - it('Create new NFT collection whith token_prefix of maximum length (16 bytes)', async () => { - await createCollectionExpectSuccess({tokenPrefix: 'A'.repeat(16)}); + itSub('Create new NFT collection whith collection_name of maximum length (64 bytes)', async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'A'.repeat(64), description: 'descr', tokenPrefix: 'COL'}, 'nft'); }); - it('Create new Fungible collection', async () => { - await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); + itSub('Create new NFT collection whith collection_description of maximum length (256 bytes)', async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'name', description: 'A'.repeat(256), tokenPrefix: 'COL'}, 'nft'); }); - it('Create new ReFungible collection', async () => { - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); + itSub('Create new NFT collection whith token_prefix of maximum length (16 bytes)', async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'name', description: 'descr', tokenPrefix: 'A'.repeat(16)}, 'nft'); }); - it('create new collection with properties #1', async () => { - await createCollectionWithPropsExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}, - properties: [{key: 'key1', value: 'val1'}], - propPerm: [{key: 'key1', permission: {tokenOwner: true, mutable: false, collectionAdmin: true}}]}); + itSub('Create new Fungible collection', async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}, 'fungible'); }); - it('create new collection with properties #2', async () => { - await createCollectionWithPropsExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}, - properties: [{key: 'key1', value: 'val1'}], - propPerm: [{key: 'key1', permission: {tokenOwner: true, mutable: false, collectionAdmin: true}}]}); + itSub.ifWithPallets('Create new ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + await mintCollectionHelper(helper, alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}, 'refungible'); }); - it('create new collection with properties #3', async () => { - await createCollectionWithPropsExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}, + itSub('create new collection with properties', async ({helper}) => { + await mintCollectionHelper(helper, alice, { + name: 'name', description: 'descr', tokenPrefix: 'COL', properties: [{key: 'key1', value: 'val1'}], - propPerm: [{key: 'key1', permission: {tokenOwner: true, mutable: false, collectionAdmin: true}}]}); - }); - - it('Create new collection with extra fields', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const tx = api.tx.unique.createCollectionEx({ - mode: {Fungible: 8}, - permissions: { - access: 'AllowList', - }, - name: [1], - description: [2], - tokenPrefix: '0x000000', - pendingSponsor: bob.address, - limits: { - accountTokenOwnershipLimit: 3, - }, - }); - const events = await submitTransactionAsync(alice, tx); - const result = getCreateCollectionResult(events); - - const collection = (await getDetailedCollectionInfo(api, result.collectionId))!; - expect(collection.owner.toString()).to.equal(alice.address); - expect(collection.mode.asFungible.toNumber()).to.equal(8); - expect(collection.permissions.access.toHuman()).to.equal('AllowList'); - expect(collection.name.map(v => v.toNumber())).to.deep.equal([1]); - expect(collection.description.map(v => v.toNumber())).to.deep.equal([2]); - expect(collection.tokenPrefix.toString()).to.equal('0x000000'); - expect(collection.sponsorship.asUnconfirmed.toString()).to.equal(bob.address); - expect(collection.limits.accountTokenOwnershipLimit.unwrap().toNumber()).to.equal(3); - }); + tokenPropertyPermissions: [{key: 'key1', permission: {tokenOwner: true, mutable: false, collectionAdmin: true}}], + }, 'nft'); }); - it('New collection is not external', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const tx = api.tx.unique.createCollectionEx({ }); - const events = await submitTransactionAsync(alice, tx); - const result = getCreateCollectionResult(events); + itSub('Create new collection with extra fields', async ({helper}) => { + const collection = await mintCollectionHelper(helper, alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}, 'fungible'); + await collection.setPermissions(alice, {access: 'AllowList'}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 3}); + const data = await collection.getData(); + const limits = await collection.getEffectiveLimits(); + const raw = data?.raw; - const collection = (await getDetailedCollectionInfo(api, result.collectionId))!; - expect(collection.readOnly.toHuman()).to.be.false; - }); + expect(data?.normalizedOwner).to.be.equal(helper.address.normalizeSubstrate(alice.address)); + expect(data?.name).to.be.equal('name'); + expect(data?.description).to.be.equal('descr'); + expect(raw.permissions.access).to.be.equal('AllowList'); + expect(raw.mode).to.be.deep.equal({Fungible: '0'}); + expect(limits.accountTokenOwnershipLimit).to.be.equal(3); + }); + + itSub('New collection is not external', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL'}); + const data = await collection.getData(); + expect(data?.raw.readOnly).to.be.false; }); }); describe('(!negative test!) integration test: ext. createCollection():', () => { - it('(!negative test!) create new NFT collection whith incorrect data (collection_name)', async () => { - await createCollectionExpectFailure({name: 'A'.repeat(65), mode: {type: 'NFT'}}); + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + }); }); - it('(!negative test!) create new NFT collection whith incorrect data (collection_description)', async () => { - await createCollectionExpectFailure({description: 'A'.repeat(257), mode: {type: 'NFT'}}); + + itSub('(!negative test!) create new NFT collection whith incorrect data (collection_name)', async ({helper}) => { + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'A'.repeat(65), description: 'descr', tokenPrefix: 'COL'}); + await expect(mintCollectionTx()).to.be.rejectedWith('Verification Error'); }); - it('(!negative test!) create new NFT collection whith incorrect data (token_prefix)', async () => { - await createCollectionExpectFailure({tokenPrefix: 'A'.repeat(17), mode: {type: 'NFT'}}); + itSub('(!negative test!) create new NFT collection whith incorrect data (collection_description)', async ({helper}) => { + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'name', description: 'A'.repeat(257), tokenPrefix: 'COL'}); + await expect(mintCollectionTx()).to.be.rejectedWith('Verification Error'); }); - it('fails when bad limits are set', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const tx = api.tx.unique.createCollectionEx({mode: 'NFT', limits: {tokenLimit: 0}}); - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/^common.CollectionTokenLimitExceeded$/); - }); + itSub('(!negative test!) create new NFT collection whith incorrect data (token_prefix)', async ({helper}) => { + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'A'.repeat(17)}); + await expect(mintCollectionTx()).to.be.rejectedWith('Verification Error'); + }); + + itSub('(!negative test!) fails when bad limits are set', async ({helper}) => { + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', limits: {tokenLimit: 0}}); + await expect(mintCollectionTx()).to.be.rejectedWith(/common\.CollectionTokenLimitExceeded/); }); - it('(!negative test!) create collection with incorrect property limit (64 elements)', async () => { - const props = []; + itSub('(!negative test!) create collection with incorrect property limit (64 elements)', async ({helper}) => { + const props: IProperty[] = []; for (let i = 0; i < 65; i++) { props.push({key: `key${i}`, value: `value${i}`}); } - - await createCollectionWithPropsExpectFailure({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}, properties: props}); + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', properties: props}); + await expect(mintCollectionTx()).to.be.rejectedWith('Verification Error'); }); - it('(!negative test!) create collection with incorrect property limit (40 kb)', async () => { - const props = []; + itSub('(!negative test!) create collection with incorrect property limit (40 kb)', async ({helper}) => { + const props: IProperty[] = []; for (let i = 0; i < 32; i++) { props.push({key: `key${i}`.repeat(80), value: `value${i}`.repeat(80)}); } - await createCollectionWithPropsExpectFailure({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}, properties: props}); + const mintCollectionTx = async () => helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', properties: props}); + await expect(mintCollectionTx()).to.be.rejectedWith('Verification Error'); }); }); diff --git a/tests/src/createItem.test.ts b/tests/src/createItem.test.ts index ef1e5a99b0..505a1abcb5 100644 --- a/tests/src/createItem.test.ts +++ b/tests/src/createItem.test.ts @@ -14,159 +14,259 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {default as usingApi} from './substrate/substrate-api'; -import chai from 'chai'; import {IKeyringPair} from '@polkadot/types/types'; -import { - createCollectionExpectSuccess, - createItemExpectSuccess, - addCollectionAdminExpectSuccess, - createCollectionWithPropsExpectSuccess, - createItemWithPropsExpectSuccess, - createItemWithPropsExpectFailure, -} from './util/helpers'; - -const expect = chai.expect; -let alice: IKeyringPair; -let bob: IKeyringPair; +import {usingPlaygrounds, expect, itSub, Pallets} from './util'; +import {IProperty, ICrossAccountId} from './util/playgrounds/types'; +import {UniqueHelper} from './util/playgrounds/unique'; + +async function mintTokenHelper(helper: UniqueHelper, collection: any, signer: IKeyringPair, owner: ICrossAccountId, type: 'nft' | 'fungible' | 'refungible'='nft', properties?: IProperty[]) { + let token; + const itemCountBefore = await helper.collection.getLastTokenId(collection.collectionId); + const itemBalanceBefore = (await helper.callRpc('api.rpc.unique.balance', [collection.collectionId, owner, 0])).toBigInt(); + if (type === 'nft') { + token = await collection.mintToken(signer, owner, properties); + } else if (type === 'fungible') { + await collection.mint(signer, 10n, owner); + } else { + token = await collection.mintToken(signer, 100n, owner, properties); + } + + const itemCountAfter = await helper.collection.getLastTokenId(collection.collectionId); + const itemBalanceAfter = (await helper.callRpc('api.rpc.unique.balance', [collection.collectionId, owner, 0])).toBigInt(); + + if (type === 'fungible') { + expect(itemBalanceAfter - itemBalanceBefore).to.be.equal(10n); + } else { + expect(itemCountAfter).to.be.equal(itemCountBefore + 1); + } + + return token; +} + describe('integration test: ext. ():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Create new item in NFT collection', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await createItemExpectSuccess(alice, newCollectionID, createMode); + itSub('Create new item in NFT collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await mintTokenHelper(helper, collection, alice, {Substrate: alice.address}); + }); + itSub('Create new item in Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await mintTokenHelper(helper, collection, alice, {Substrate: alice.address}, 'fungible'); }); - it('Create new item in Fungible collection', async () => { - const createMode = 'Fungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await createItemExpectSuccess(alice, newCollectionID, createMode); + itSub('Check events on create new item in Fungible collection', async ({helper}) => { + const {collectionId} = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const to = {Substrate: alice.address}; + { + const createData = {fungible: {value: 100}}; + const events = await helper.executeExtrinsic(alice, 'api.tx.unique.createItem', [collectionId, to, createData as any]); + const result = helper.util.extractTokensFromCreationResult(events); + expect(result.tokens[0].amount).to.be.equal(100n); + expect(result.tokens[0].collectionId).to.be.equal(collectionId); + expect(result.tokens[0].owner).to.be.deep.equal(to); + } + { + const createData = {fungible: {value: 50}}; + const events = await helper.executeExtrinsic(alice, 'api.tx.unique.createItem', [collectionId, to, createData as any]); + const result = helper.util.extractTokensFromCreationResult(events); + expect(result.tokens[0].amount).to.be.equal(50n); + expect(result.tokens[0].collectionId).to.be.equal(collectionId); + expect(result.tokens[0].owner).to.be.deep.equal(to); + } }); - it('Create new item in ReFungible collection', async () => { - const createMode = 'ReFungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await createItemExpectSuccess(alice, newCollectionID, createMode); + itSub.ifWithPallets('Create new item in ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await mintTokenHelper(helper, collection, alice, {Substrate: alice.address}, 'refungible'); }); - it('Create new item in NFT collection with collection admin permissions', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await addCollectionAdminExpectSuccess(alice, newCollectionID, bob.address); - await createItemExpectSuccess(bob, newCollectionID, createMode); + itSub('Create new item in NFT collection with collection admin permissions', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + await mintTokenHelper(helper, collection, bob, {Substrate: alice.address}); }); - it('Create new item in Fungible collection with collection admin permissions', async () => { - const createMode = 'Fungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await addCollectionAdminExpectSuccess(alice, newCollectionID, bob.address); - await createItemExpectSuccess(bob, newCollectionID, createMode); + itSub('Create new item in Fungible collection with collection admin permissions', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + await collection.addAdmin(alice, {Substrate: bob.address}); + await mintTokenHelper(helper, collection, bob, {Substrate: alice.address}, 'fungible'); + }); + itSub.ifWithPallets('Create new item in ReFungible collection with collection admin permissions', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + await mintTokenHelper(helper, collection, bob, {Substrate: alice.address}, 'refungible'); + }); + + itSub('Set property Admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', + properties: [{key: 'k', value: 'v'}], + tokenPropertyPermissions: [{key: 'k', permission: {tokenOwner: false, mutable: true, collectionAdmin: true}}], + }); + await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}, 'nft', [{key: 'k', value: 'v'}]); + }); + + itSub('Set property AdminConst', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', + properties: [{key: 'k', value: 'v'}], + tokenPropertyPermissions: [{key: 'k', permission: {tokenOwner: false, mutable: false, collectionAdmin: true}}], + }); + await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}, 'nft', [{key: 'k', value: 'v'}]); }); - it('Create new item in ReFungible collection with collection admin permissions', async () => { - const createMode = 'ReFungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await addCollectionAdminExpectSuccess(alice, newCollectionID, bob.address); - await createItemExpectSuccess(bob, newCollectionID, createMode); + + itSub('Set property itemOwnerOrAdmin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'descr', tokenPrefix: 'COL', + properties: [{key: 'k', value: 'v'}], + tokenPropertyPermissions: [{key: 'k', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}], + }); + await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}, 'nft', [{key: 'k', value: 'v'}]); }); - it('Set property Admin', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionWithPropsExpectSuccess({mode: {type: createMode}, - propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: false}}]}); - - await createItemWithPropsExpectSuccess(alice, newCollectionID, createMode, [{key: 'k', value: 't2'}]); + itSub('Check total pieces of Fungible token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + const amount = 10n; + await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}, 'fungible'); + { + const totalPieces = await collection.getTotalPieces(); + expect(totalPieces).to.be.equal(amount); + } + await collection.transfer(bob, {Substrate: alice.address}, 1n); + { + const totalPieces = await collection.getTotalPieces(); + expect(totalPieces).to.be.equal(amount); + } }); - it('Set property AdminConst', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionWithPropsExpectSuccess({mode: {type: createMode}, - propPerm: [{key: 'key1', permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}]}); - - await createItemWithPropsExpectSuccess(alice, newCollectionID, createMode, [{key: 'key1', value: 'val1'}]); + itSub('Check total pieces of NFT token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const amount = 1n; + const token = await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}); + { + const totalPieces = await helper.callRpc('api.rpc.unique.totalPieces', [collection.collectionId, token.tokenId]); + expect(totalPieces?.unwrap().toBigInt()).to.be.equal(amount); + } + await token.transfer(bob, {Substrate: alice.address}); + { + const totalPieces = await helper.callRpc('api.rpc.unique.totalPieces', [collection.collectionId, token.tokenId]); + expect(totalPieces?.unwrap().toBigInt()).to.be.equal(amount); + } }); - it('Set property itemOwnerOrAdmin', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionWithPropsExpectSuccess({mode: {type: createMode}, - propPerm: [{key: 'key1', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]}); - - await createItemWithPropsExpectSuccess(alice, newCollectionID, createMode, [{key: 'key1', value: 'val1'}]); + itSub.ifWithPallets('Check total pieces of ReFungible token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const amount = 100n; + const token = await mintTokenHelper(helper, collection, alice, {Substrate: bob.address}, 'refungible'); + { + const totalPieces = await token.getTotalPieces(); + expect(totalPieces).to.be.equal(amount); + } + await token.transfer(bob, {Substrate: alice.address}, 60n); + { + const totalPieces = await token.getTotalPieces(); + expect(totalPieces).to.be.equal(amount); + } }); }); describe('Negative integration test: ext. createItem():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Regular user cannot create new item in NFT collection', async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await expect(createItemExpectSuccess(bob, newCollectionID, createMode)).to.be.rejected; + itSub('Regular user cannot create new item in NFT collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const mintTx = async () => collection.mintToken(bob, {Substrate: bob.address}); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); }); - it('Regular user cannot create new item in Fungible collection', async () => { - const createMode = 'Fungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await expect(createItemExpectSuccess(bob, newCollectionID, createMode)).to.be.rejected; + itSub('Regular user cannot create new item in Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + const mintTx = async () => collection.mint(bob, 10n, {Substrate: bob.address}); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); }); - it('Regular user cannot create new item in ReFungible collection', async () => { - const createMode = 'ReFungible'; - const newCollectionID = await createCollectionExpectSuccess({mode: {type: createMode}}); - await expect(createItemExpectSuccess(bob, newCollectionID, createMode)).to.be.rejected; + itSub.ifWithPallets('Regular user cannot create new item in ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const mintTx = async () => collection.mintToken(bob, 100n, {Substrate: bob.address}); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); }); - it('No editing rights', async () => { - await usingApi(async () => { - const createMode = 'NFT'; - const newCollectionID = await createCollectionWithPropsExpectSuccess({mode: {type: createMode}, - propPerm: [{key: 'key1', permission: {mutable: false, collectionAdmin: false, tokenOwner: false}}]}); - await addCollectionAdminExpectSuccess(alice, newCollectionID, bob.address); - - await createItemWithPropsExpectFailure(bob, newCollectionID, 'NFT', [{key: 'key1', value: 'v'}]); + itSub('No editing rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', + tokenPropertyPermissions: [{key: 'k', permission: {mutable: false, collectionAdmin: false, tokenOwner: false}}], }); + const mintTx = async () => collection.mintToken(alice, {Substrate: bob.address}, [{key: 'k', value: 'v'}]); + await expect(mintTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('User doesnt have editing rights', async () => { - await usingApi(async () => { - const newCollectionID = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'key1', permission: {mutable: true, collectionAdmin: false, tokenOwner: false}}]}); - await createItemWithPropsExpectFailure(bob, newCollectionID, 'NFT', [{key: 'key1', value: 'v'}]); + itSub('User doesnt have editing rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', + tokenPropertyPermissions: [{key: 'k', permission: {mutable: true, collectionAdmin: false, tokenOwner: false}}], }); + const mintTx = async () => collection.mintToken(alice, {Substrate: bob.address}, [{key: 'k', value: 'v'}]); + await expect(mintTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('Adding property without access rights', async () => { - await usingApi(async () => { - const newCollectionID = await createCollectionWithPropsExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, newCollectionID, bob.address); - - await createItemWithPropsExpectFailure(bob, newCollectionID, 'NFT', [{key: 'k', value: 'v'}]); - }); + itSub('Adding property without access rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const mintTx = async () => collection.mintToken(alice, {Substrate: bob.address}, [{key: 'k', value: 'v'}]); + await expect(mintTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('Adding more than 64 prps', async () => { - await usingApi(async () => { - const prps = []; + itSub('Adding more than 64 prps', async ({helper}) => { + const props: IProperty[] = []; - for (let i = 0; i < 65; i++) { - prps.push({key: `key${i}`, value: `value${i}`}); - } + for (let i = 0; i < 65; i++) { + props.push({key: `key${i}`, value: `value${i}`}); + } - const newCollectionID = await createCollectionWithPropsExpectSuccess(); - - await createItemWithPropsExpectFailure(alice, newCollectionID, 'NFT', prps); - }); + + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const mintTx = async () => collection.mintToken(alice, {Substrate: bob.address}, props); + await expect(mintTx()).to.be.rejectedWith('Verification Error'); }); - it('Trying to add bigger property than allowed', async () => { - await usingApi(async () => { - const newCollectionID = await createCollectionWithPropsExpectSuccess(); - - await createItemWithPropsExpectFailure(alice, newCollectionID, 'NFT', [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}]); + itSub('Trying to add bigger property than allowed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'k1', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}, + {key: 'k2', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}, + ], }); + const mintTx = async () => collection.mintToken(alice, {Substrate: bob.address}, [ + {key: 'k1', value: 'vvvvvv'.repeat(5000)}, + {key: 'k2', value: 'vvv'.repeat(5000)}, + ]); + await expect(mintTx()).to.be.rejectedWith(/common\.NoSpaceForProperty/); + }); + + itSub('Check total pieces for invalid Fungible token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}, 0); + const invalidTokenId = 1_000_000; + expect((await helper.callRpc('api.rpc.unique.totalPieces', [collection.collectionId, invalidTokenId]))?.isNone).to.be.true; + expect((await helper.callRpc('api.rpc.unique.tokenData', [collection.collectionId, invalidTokenId]))?.pieces.toBigInt()).to.be.equal(0n); + }); + + itSub('Check total pieces for invalid NFT token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const invalidTokenId = 1_000_000; + expect((await helper.callRpc('api.rpc.unique.totalPieces', [collection.collectionId, invalidTokenId]))?.isNone).to.be.true; + expect((await helper.callRpc('api.rpc.unique.tokenData', [collection.collectionId, invalidTokenId]))?.pieces.toBigInt()).to.be.equal(0n); + }); + + itSub.ifWithPallets('Check total pieces for invalid Refungible token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'col', description: 'descr', tokenPrefix: 'COL'}); + const invalidTokenId = 1_000_000; + expect((await helper.callRpc('api.rpc.unique.totalPieces', [collection.collectionId, invalidTokenId]))?.isNone).to.be.true; + expect((await helper.callRpc('api.rpc.unique.tokenData', [collection.collectionId, invalidTokenId]))?.pieces.toBigInt()).to.be.equal(0n); }); }); diff --git a/tests/src/createMultipleItems.test.ts b/tests/src/createMultipleItems.test.ts index f1a7d88540..b9f3820bcf 100644 --- a/tests/src/createMultipleItems.test.ts +++ b/tests/src/createMultipleItems.test.ts @@ -14,285 +14,151 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync, executeTransaction} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - destroyCollectionExpectSuccess, - getGenericResult, - normalizeAccountId, - setCollectionLimitsExpectSuccess, - addCollectionAdminExpectSuccess, - getBalance, - getTokenOwner, - getLastTokenId, - getCreatedCollectionCount, - createCollectionWithPropsExpectSuccess, - createMultipleItemsWithPropsExpectSuccess, - getTokenProperties, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {usingPlaygrounds, expect, Pallets, itSub} from './util'; describe('Integration Test createMultipleItems(collection_id, owner, items_data):', () => { - it('Create 0x31, 0x32, 0x33 items in active NFT collection and verify tokens data in chain', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - - const alice = privateKeyWrapper('//Alice'); - await submitTransactionAsync( - alice, - api.tx.unique.setTokenPropertyPermissions(collectionId, [{key: 'data', permission: {tokenOwner: true}}]), - ); - - const args = [ - {NFT: {properties: [{key: 'data', value: '1'}]}}, - {NFT: {properties: [{key: 'data', value: '2'}]}}, - {NFT: {properties: [{key: 'data', value: '3'}]}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await submitTransactionAsync(alice, createMultipleItemsTx); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getTokenOwner(api, collectionId, 1)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 2)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 3)).to.be.deep.equal(normalizeAccountId(alice.address)); - - expect((await getTokenProperties(api, collectionId, 1, ['data']))[0].value).to.be.equal('1'); - expect((await getTokenProperties(api, collectionId, 2, ['data']))[0].value).to.be.equal('2'); - expect((await getTokenProperties(api, collectionId, 3, ['data']))[0].value).to.be.equal('3'); - }); - }); - - it('Create 0x01, 0x02, 0x03 items in active Fungible collection and verify tokens data in chain', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const alice = privateKeyWrapper('//Alice'); - const args = [ - {Fungible: {value: 1}}, - {Fungible: {value: 2}}, - {Fungible: {value: 3}}, - ]; - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await submitTransactionAsync(alice, createMultipleItemsTx); - const token1Data = await getBalance(api, collectionId, alice.address, 0); - - expect(token1Data).to.be.equal(6n); // 1 + 2 + 3 - }); - }); - - it('Create 0x31, 0x32, 0x33 items in active ReFungible collection and verify tokens data in chain', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const alice = privateKeyWrapper('//Alice'); - const args = [ - {ReFungible: {pieces: 1}}, - {ReFungible: {pieces: 2}}, - {ReFungible: {pieces: 3}}, - ]; - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await submitTransactionAsync(alice, createMultipleItemsTx); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getBalance(api, collectionId, alice.address, 1)).to.be.equal(1n); - expect(await getBalance(api, collectionId, alice.address, 2)).to.be.equal(2n); - expect(await getBalance(api, collectionId, alice.address, 3)).to.be.equal(3n); - }); - }); - - it('Can mint amount of items equals to collection limits', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); + let alice: IKeyringPair; - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, { - tokenLimit: 2, - }); - const args = [ - {NFT: {}}, - {NFT: {}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - const events = await submitTransactionAsync(alice, createMultipleItemsTx); - const result = getGenericResult(events); - expect(result.success).to.be.true; + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); }); }); - it('Create 0x31, 0x32, 0x33 items in active NFT with property Admin', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: false}}]}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const alice = privateKeyWrapper('//Alice'); - const args = [ - {NFT: {properties: [{key: 'k', value: 'v1'}]}}, - {NFT: {properties: [{key: 'k', value: 'v2'}]}}, - {NFT: {properties: [{key: 'k', value: 'v3'}]}}, - ]; - - await createMultipleItemsWithPropsExpectSuccess(alice, collectionId, args); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getTokenOwner(api, collectionId, 1)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 2)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 3)).to.be.deep.equal(normalizeAccountId(alice.address)); - - expect((await getTokenProperties(api, collectionId, 1, ['k']))[0].value).to.be.equal('v1'); - expect((await getTokenProperties(api, collectionId, 2, ['k']))[0].value).to.be.equal('v2'); - expect((await getTokenProperties(api, collectionId, 3, ['k']))[0].value).to.be.equal('v3'); + itSub('Create 0x31, 0x32, 0x33 items in active NFT collection and verify tokens data in chain', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: false, collectionAdmin: false}}, + ], }); + const args = [ + {properties: [{key: 'data', value: '1'}]}, + {properties: [{key: 'data', value: '2'}]}, + {properties: [{key: 'data', value: '3'}]}, + ]; + const tokens = await helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + for (const [i, token] of tokens.entries()) { + const tokenData = await token.getData(); + expect(tokenData?.normalizedOwner.Substrate).to.be.deep.equal(helper.address.normalizeSubstrate(alice.address)); + expect(tokenData?.properties[0].value).to.be.equal(args[i].properties[0].value); + } }); - it('Create 0x31, 0x32, 0x33 items in active NFT with property AdminConst', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'k', permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}]}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const args = [ - {NFT: {properties: [{key: 'k', value: 'v1'}]}}, - {NFT: {properties: [{key: 'k', value: 'v2'}]}}, - {NFT: {properties: [{key: 'k', value: 'v3'}]}}, - ]; - - await createMultipleItemsWithPropsExpectSuccess(alice, collectionId, args); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getTokenOwner(api, collectionId, 1)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 2)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 3)).to.be.deep.equal(normalizeAccountId(alice.address)); - - expect((await getTokenProperties(api, collectionId, 1, ['k']))[0].value).to.be.equal('v1'); - expect((await getTokenProperties(api, collectionId, 2, ['k']))[0].value).to.be.equal('v2'); - expect((await getTokenProperties(api, collectionId, 3, ['k']))[0].value).to.be.equal('v3'); + itSub('Create 0x01, 0x02, 0x03 items in active Fungible collection and verify tokens data in chain', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + {value: 1n}, + {value: 2n}, + {value: 3n}, + ]; + await helper.ft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, args, {Substrate: alice.address}); + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(6n); }); - it('Create 0x31, 0x32, 0x33 items in active NFT with property itemOwnerOrAdmin', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const alice = privateKeyWrapper('//Alice'); - const args = [ - {NFT: {properties: [{key: 'k', value: 'v1'}]}}, - {NFT: {properties: [{key: 'k', value: 'v2'}]}}, - {NFT: {properties: [{key: 'k', value: 'v3'}]}}, - ]; - - await createMultipleItemsWithPropsExpectSuccess(alice, collectionId, args); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getTokenOwner(api, collectionId, 1)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 2)).to.be.deep.equal(normalizeAccountId(alice.address)); - expect(await getTokenOwner(api, collectionId, 3)).to.be.deep.equal(normalizeAccountId(alice.address)); - - expect((await getTokenProperties(api, collectionId, 1, ['k']))[0].value).to.be.equal('v1'); - expect((await getTokenProperties(api, collectionId, 2, ['k']))[0].value).to.be.equal('v2'); - expect((await getTokenProperties(api, collectionId, 3, ['k']))[0].value).to.be.equal('v3'); + itSub.ifWithPallets('Create 0x31, 0x32, 0x33 items in active ReFungible collection and verify tokens data in chain', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + {pieces: 1n}, + {pieces: 2n}, + {pieces: 3n}, + ]; + const tokens = await helper.rft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + + for (const [i, token] of tokens.entries()) { + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(BigInt(i + 1)); + } }); -}); - -describe('Integration Test createMultipleItems(collection_id, owner, items_data) with collection admin permissions:', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + itSub('Can mint amount of items equals to collection limits', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + limits: { + tokenLimit: 2, + }, }); + const args = [{}, {}]; + await helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); }); - it('Create 0x31, 0x32, 0x33 items in active NFT collection and verify tokens data in chain', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'data', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const args = [ - {NFT: {properties: [{key: 'data', value: 'v1'}]}}, - {NFT: {properties: [{key: 'data', value: 'v2'}]}}, - {NFT: {properties: [{key: 'data', value: 'v3'}]}}, - ]; - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(bob.address), args); - await submitTransactionAsync(bob, createMultipleItemsTx); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getTokenOwner(api, collectionId, 1)).to.be.deep.equal(normalizeAccountId(bob.address)); - expect(await getTokenOwner(api, collectionId, 2)).to.be.deep.equal(normalizeAccountId(bob.address)); - expect(await getTokenOwner(api, collectionId, 3)).to.be.deep.equal(normalizeAccountId(bob.address)); - - expect((await getTokenProperties(api, collectionId, 1, ['data']))[0].value).to.be.equal('v1'); - expect((await getTokenProperties(api, collectionId, 2, ['data']))[0].value).to.be.equal('v2'); - expect((await getTokenProperties(api, collectionId, 3, ['data']))[0].value).to.be.equal('v3'); + itSub('Create 0x31, 0x32, 0x33 items in active NFT with property Admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: false, mutable: true, collectionAdmin: true}}, + ], }); + const args = [ + {properties: [{key: 'data', value: '1'}]}, + {properties: [{key: 'data', value: '2'}]}, + {properties: [{key: 'data', value: '3'}]}, + ]; + const tokens = await helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + for (const [i, token] of tokens.entries()) { + const tokenData = await token.getData(); + expect(tokenData?.normalizedOwner.Substrate).to.be.deep.equal(helper.address.normalizeSubstrate(alice.address)); + expect(tokenData?.properties[0].value).to.be.equal(args[i].properties[0].value); + } }); - it('Create 0x01, 0x02, 0x03 items in active Fungible collection and verify tokens data in chain', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const args = [ - {Fungible: {value: 1}}, - {Fungible: {value: 2}}, - {Fungible: {value: 3}}, - ]; - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(bob.address), args); - await submitTransactionAsync(bob, createMultipleItemsTx); - const token1Data = await getBalance(api, collectionId, bob.address, 0); - - expect(token1Data).to.be.equal(6n); // 1 + 2 + 3 + itSub('Create 0x31, 0x32, 0x33 items in active NFT with property AdminConst', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: false, mutable: false, collectionAdmin: true}}, + ], }); + const args = [ + {properties: [{key: 'data', value: '1'}]}, + {properties: [{key: 'data', value: '2'}]}, + {properties: [{key: 'data', value: '3'}]}, + ]; + const tokens = await helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + for (const [i, token] of tokens.entries()) { + const tokenData = await token.getData(); + expect(tokenData?.normalizedOwner.Substrate).to.be.deep.equal(helper.address.normalizeSubstrate(alice.address)); + expect(tokenData?.properties[0].value).to.be.equal(args[i].properties[0].value); + } }); - it('Create 0x31, 0x32, 0x33 items in active ReFungible collection and verify tokens data in chain', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const args = [ - {ReFungible: {pieces: 1}}, - {ReFungible: {pieces: 2}}, - {ReFungible: {pieces: 3}}, - ]; - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(bob.address), args); - await submitTransactionAsync(bob, createMultipleItemsTx); - const itemsListIndexAfter = await getLastTokenId(api, collectionId); - expect(itemsListIndexAfter).to.be.equal(3); - - expect(await getBalance(api, collectionId, bob.address, 1)).to.be.equal(1n); - expect(await getBalance(api, collectionId, bob.address, 2)).to.be.equal(2n); - expect(await getBalance(api, collectionId, bob.address, 3)).to.be.equal(3n); + itSub('Create 0x31, 0x32, 0x33 items in active NFT with property itemOwnerOrAdmin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}, + ], }); + const args = [ + {properties: [{key: 'data', value: '1'}]}, + {properties: [{key: 'data', value: '2'}]}, + {properties: [{key: 'data', value: '3'}]}, + ]; + const tokens = await helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + for (const [i, token] of tokens.entries()) { + const tokenData = await token.getData(); + expect(tokenData?.normalizedOwner.Substrate).to.be.equal(helper.address.normalizeSubstrate(alice.address)); + expect(tokenData?.properties[0].value).to.be.equal(args[i].properties[0].value); + } }); }); @@ -301,213 +167,208 @@ describe('Negative Integration Test createMultipleItems(collection_id, owner, it let bob: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('Regular user cannot create items in active NFT collection', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess(); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const args = [{NFT: {}}, - {NFT: {}}, - {NFT: {}}]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(executeTransaction(api, bob, createMultipleItemsTx)).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); + itSub('Regular user cannot create items in active NFT collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + {}, + {}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(bob, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); }); - it('Regular user cannot create items in active Fungible collection', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const args = [ - {Fungible: {value: 1}}, - {Fungible: {value: 2}}, - {Fungible: {value: 3}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(executeTransaction(api, bob, createMultipleItemsTx)).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); + itSub('Regular user cannot create items in active Fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + {value: 1n}, + {value: 2n}, + {value: 3n}, + ]; + const mintTx = async () => helper.ft.mintMultipleTokensWithOneOwner(bob, collection.collectionId, args, {Substrate: alice.address}); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); }); - it('Regular user cannot create items in active ReFungible collection', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const args = [ - {ReFungible: {pieces: 1}}, - {ReFungible: {pieces: 1}}, - {ReFungible: {pieces: 1}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(executeTransaction(api, bob, createMultipleItemsTx)).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); + itSub.ifWithPallets('Regular user cannot create items in active ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + {pieces: 1n}, + {pieces: 1n}, + {pieces: 1n}, + ]; + const mintTx = async () => helper.rft.mintMultipleTokensWithOneOwner(bob, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.PublicMintingNotAllowed/); + }); + + itSub('Create token in not existing collection', async ({helper}) => { + const collectionId = 1_000_000; + const args = [ + {}, + {}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(bob, collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('Create token in not existing collection', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await getCreatedCollectionCount(api) + 1; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), ['NFT', 'NFT', 'NFT']); - await expect(executeTransaction(api, alice, createMultipleItemsTx)).to.be.rejectedWith(/common\.CollectionNotFound/); + itSub('Create NFTs that has reached the maximum data limit', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}, + ], }); + const args = [ + {properties: [{key: 'data', value: 'A'.repeat(32769)}]}, + {properties: [{key: 'data', value: 'B'.repeat(32769)}]}, + {properties: [{key: 'data', value: 'C'.repeat(32769)}]}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith('Verification Error'); }); - it('Create NFT and Re-fungible tokens that has reached the maximum data limit', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // NFT - const collectionId = await createCollectionWithPropsExpectSuccess({ - propPerm: [{key: 'key', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}], - }); - const alice = privateKeyWrapper('//Alice'); - const args = [ - {NFT: {properties: [{key: 'key', value: 'A'.repeat(32769)}]}}, - {NFT: {properties: [{key: 'key', value: 'B'.repeat(32769)}]}}, - {NFT: {properties: [{key: 'key', value: 'C'.repeat(32769)}]}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(submitTransactionExpectFailAsync(alice, createMultipleItemsTx)).to.be.rejected; - - // ReFungible - const collectionIdReFungible = - await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const argsReFungible = [ - {ReFungible: ['1'.repeat(2049), 10]}, - {ReFungible: ['2'.repeat(2049), 10]}, - {ReFungible: ['3'.repeat(2049), 10]}, - ]; - const createMultipleItemsTxFungible = api.tx.unique - .createMultipleItems(collectionIdReFungible, normalizeAccountId(alice.address), argsReFungible); - await expect(submitTransactionExpectFailAsync(alice, createMultipleItemsTxFungible)).to.be.rejected; + itSub.ifWithPallets('Create Refungible tokens that has reached the maximum data limit', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}, + ], }); + const args = [ + {pieces: 10n, properties: [{key: 'data', value: 'A'.repeat(32769)}]}, + {pieces: 10n, properties: [{key: 'data', value: 'B'.repeat(32769)}]}, + {pieces: 10n, properties: [{key: 'data', value: 'C'.repeat(32769)}]}, + ]; + const mintTx = async () => helper.rft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith('Verification Error'); }); - it('Create tokens with different types', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess(); - const createMultipleItemsTx = api.tx.unique - .createMultipleItems(collectionId, normalizeAccountId(alice.address), ['NFT', 'Fungible', 'ReFungible']); - await expect(executeTransaction(api, alice, createMultipleItemsTx)).to.be.rejectedWith(/nonfungible\.NotNonfungibleDataUsedToMintFungibleCollectionToken/); - // garbage collection :-D // lol - await destroyCollectionExpectSuccess(collectionId); + itSub.ifWithPallets('Create tokens with different types', [Pallets.ReFungible], async ({helper}) => { + const {collectionId} = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + + const types = ['NFT', 'Fungible', 'ReFungible']; + await expect(helper.executeExtrinsic( + alice, + 'api.tx.unique.createMultipleItems', + [collectionId, {Substrate: alice.address}, types], + )).to.be.rejectedWith(/nonfungible\.NotNonfungibleDataUsedToMintFungibleCollectionToken/); }); - it('Create tokens with different data limits <> maximum data limit', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionWithPropsExpectSuccess({ - propPerm: [{key: 'key', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}], - }); - const args = [ - {NFT: {properties: [{key: 'key', value: 'A'}]}}, - {NFT: {properties: [{key: 'key', value: 'B'.repeat(32769)}]}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(submitTransactionExpectFailAsync(alice, createMultipleItemsTx)).to.be.rejected; + itSub('Create tokens with different data limits <> maximum data limit', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}, + ], }); + const args = [ + {properties: [{key: 'data', value: 'A'}]}, + {properties: [{key: 'data', value: 'B'.repeat(32769)}]}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith('Verification Error'); }); - it('Fails when minting tokens exceeds collectionLimits amount', async () => { - await usingApi(async (api) => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, { + itSub('Fails when minting tokens exceeds collectionLimits amount', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}, + ], + limits: { tokenLimit: 1, - }); - const args = [ - {NFT: {}}, - {NFT: {}}, - ]; - const createMultipleItemsTx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(executeTransaction(api, alice, createMultipleItemsTx)).to.be.rejectedWith(/common\.CollectionTokenLimitExceeded/); + }, }); + const args = [ + {}, + {}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.CollectionTokenLimitExceeded/); }); - it('User doesnt have editing rights', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionWithPropsExpectSuccess({ - propPerm: [{key: 'key1', permission: {mutable: true, collectionAdmin: false, tokenOwner: false}}], - }); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const args = [ - {NFT: {properties: [{key: 'key1', value: 'v2'}]}}, - {NFT: {}}, - {NFT: {}}, - ]; - - const tx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(bob.address), args); - await expect(executeTransaction(api, bob, tx)).to.be.rejectedWith(/common\.NoPermission/); + itSub('User doesnt have editing rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'data', permission: {tokenOwner: false, mutable: true, collectionAdmin: false}}, + ], }); + const args = [ + {properties: [{key: 'data', value: 'A'}]}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('Adding property without access rights', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionWithPropsExpectSuccess({properties: [{key: 'k', value: 'v1'}]}); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - expect(itemsListIndexBefore).to.be.equal(0); - const args = [{NFT: {properties: [{key: 'k', value: 'v'}]}}, - {NFT: {}}, - {NFT: {}}]; - - const tx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(bob.address), args); - await expect(executeTransaction(api, bob, tx)).to.be.rejectedWith(/common\.NoPermission/); + itSub('Adding property without access rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + properties: [ + { + key: 'data', + value: 'v', + }, + ], }); + const args = [ + {properties: [{key: 'data', value: 'A'}]}, + ]; + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith(/common\.NoPermission/); }); - it('Adding more than 64 prps', async () => { - await usingApi(async (api: ApiPromise) => { - const propPerms = [{key: 'key', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]; - for (let i = 0; i < 65; i++) { - propPerms.push({key: `key${i}`, permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}); - } - - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - - const tx1 = api.tx.unique.setTokenPropertyPermissions(collectionId, propPerms); - await expect(executeTransaction(api, alice, tx1)).to.be.rejectedWith(/common\.PropertyLimitReached/); - - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - - const prps = []; + itSub('Adding more than 64 prps', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + }); + const prps = []; - for (let i = 0; i < 65; i++) { - prps.push({key: `key${i}`, value: `value${i}`}); - } + for (let i = 0; i < 65; i++) { + prps.push({key: `key${i}`, value: `value${i}`}); + } - const args = [ - {NFT: {properties: prps}}, - {NFT: {properties: prps}}, - {NFT: {properties: prps}}, - ]; + const args = [ + {properties: prps}, + {properties: prps}, + {properties: prps}, + ]; - // there are no permissions, but will fail anyway because of too much weight for a block - const tx2 = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(submitTransactionExpectFailAsync(alice, tx2)).to.be.rejected; - }); - }); - - it('Trying to add bigger property than allowed', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionWithPropsExpectSuccess({ - propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}], - }); - const itemsListIndexBefore = await getLastTokenId(api, collectionId); - expect(itemsListIndexBefore).to.be.equal(0); - const args = [{NFT: {properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}]}}, - {NFT: {properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}]}}, - {NFT: {properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}]}}]; - - const tx = api.tx.unique.createMultipleItems(collectionId, normalizeAccountId(alice.address), args); - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/common\.NoPermission/); - }); + const mintTx = async () => helper.nft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, args); + await expect(mintTx()).to.be.rejectedWith('Verification Error'); }); }); diff --git a/tests/src/createMultipleItemsEx.test.ts b/tests/src/createMultipleItemsEx.test.ts index 18250fa0ce..350c386ac1 100644 --- a/tests/src/createMultipleItemsEx.test.ts +++ b/tests/src/createMultipleItemsEx.test.ts @@ -14,381 +14,429 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import usingApi, {executeTransaction} from './substrate/substrate-api'; -import {addCollectionAdminExpectSuccess, createCollectionExpectSuccess, createCollectionWithPropsExpectSuccess, getBalance, getLastTokenId} from './util/helpers'; +import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds, expect, Pallets, itSub} from './util'; +import {IProperty} from './util/playgrounds/types'; describe('Integration Test: createMultipleItemsEx', () => { - it('can initialize multiple NFT with different owners', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ - { - owner: {substrate: alice.address}, - }, { - owner: {substrate: bob.address}, - }, { - owner: {substrate: charlie.address}, - }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - const tokens = await api.query.nonfungible.tokenData.entries(collection); - const json = tokens.map(([, token]) => token.toJSON()); - expect(json).to.be.deep.equal(data); + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('createMultipleItemsEx with property Admin', async () => { - const collection = await createCollectionWithPropsExpectSuccess({mode: {type: 'NFT'}, propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: false}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ - { - owner: {substrate: alice.address}, - properties: [{key: 'k', value: 'v1'}], - }, { - owner: {substrate: bob.address}, - properties: [{key: 'k', value: 'v2'}], - }, { - owner: {substrate: charlie.address}, - properties: [{key: 'k', value: 'v3'}], - }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - for (let i = 1; i < 4; i++) { - expect(await api.rpc.unique.tokenProperties(collection, i)).not.to.be.empty; - } + itSub('can initialize multiple NFT with different owners', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + const args = [ + { + owner: {Substrate: alice.address}, + }, + { + owner: {Substrate: bob.address}, + }, + { + owner: {Substrate: charlie.address}, + }, + ]; + + const tokens = await collection.mintMultipleTokens(alice, args); + for (const [i, token] of tokens.entries()) { + expect(await token.getOwner()).to.be.deep.equal(args[i].owner); + } }); - it('createMultipleItemsEx with property AdminConst', async () => { - const collection = await createCollectionWithPropsExpectSuccess({mode: {type: 'NFT'}, propPerm: [{key: 'k', permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ + itSub('createMultipleItemsEx with property Admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, - properties: [{key: 'k', value: 'v1'}], - }, { - owner: {substrate: bob.address}, - properties: [{key: 'k', value: 'v2'}], - }, { - owner: {substrate: charlie.address}, - properties: [{key: 'k', value: 'v3'}], + key: 'k', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: false, + }, }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - for (let i = 1; i < 4; i++) { - expect(await api.rpc.unique.tokenProperties(collection, i)).not.to.be.empty; - } + ], }); + + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + const tokens = await collection.mintMultipleTokens(alice, args); + for (const [i, token] of tokens.entries()) { + expect(await token.getOwner()).to.be.deep.equal(args[i].owner); + expect(await token.getData()).to.not.be.empty; + } }); - it('createMultipleItemsEx with property itemOwnerOrAdmin', async () => { - const collection = await createCollectionWithPropsExpectSuccess({mode: {type: 'NFT'}, propPerm: [{key: 'k', permission: {mutable: false, collectionAdmin: true, tokenOwner: true}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ + itSub('createMultipleItemsEx with property AdminConst', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, - properties: [{key: 'k', value: 'v1'}], - }, { - owner: {substrate: bob.address}, - properties: [{key: 'k', value: 'v2'}], - }, { - owner: {substrate: charlie.address}, - properties: [{key: 'k', value: 'v3'}], + key: 'k', + permission: { + mutable: false, + collectionAdmin: true, + tokenOwner: false, + }, }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - for (let i = 1; i < 4; i++) { - expect(await api.rpc.unique.tokenProperties(collection, i)).not.to.be.empty; - } + ], }); - }); - - it('can initialize fungible with multiple owners', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const users = new Map(); - users.set(JSON.stringify({Substrate: alice.address}), 50); - users.set(JSON.stringify({Substrate: bob.address}), 100); - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - Fungible: users, - })); - expect(await getBalance(api, collection, alice.address, 0)).to.equal(50n); - expect(await getBalance(api, collection, bob.address, 0)).to.equal(100n); - }); + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + const tokens = await collection.mintMultipleTokens(alice, args); + for (const [i, token] of tokens.entries()) { + expect(await token.getOwner()).to.be.deep.equal(args[i].owner); + expect(await token.getData()).to.not.be.empty; + } }); - it('can initialize an RFT with multiple owners', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const users = new Map(); - users.set(JSON.stringify({Substrate: alice.address}), 1); - users.set(JSON.stringify({Substrate: bob.address}), 2); - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - RefungibleMultipleOwners: { - users: users, + itSub('createMultipleItemsEx with property itemOwnerOrAdmin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + { + key: 'k', + permission: { + mutable: false, + collectionAdmin: true, + tokenOwner: true, + }, }, - })); - - const itemsListIndexAfter = await getLastTokenId(api, collection); - expect(itemsListIndexAfter).to.be.equal(1); - - expect(await getBalance(api, collection, alice.address, 1)).to.be.equal(1n); - expect(await getBalance(api, collection, bob.address, 1)).to.be.equal(2n); + ], }); - }); - it('can initialize multiple RFTs with the same owner', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + const tokens = await collection.mintMultipleTokens(alice, args); + for (const [i, token] of tokens.entries()) { + expect(await token.getOwner()).to.be.deep.equal(args[i].owner); + expect(await token.getData()).to.not.be.empty; + } + }); - const item1User = new Map(); - item1User.set(JSON.stringify({Substrate: alice.address}), 1); + itSub('can initialize fungible with multiple owners', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + }, 0); + + await helper.executeExtrinsic(alice, 'api.tx.unique.createMultipleItemsEx',[collection.collectionId, { + Fungible: new Map([ + [JSON.stringify({Substrate: alice.address}), 50], + [JSON.stringify({Substrate: bob.address}), 100], + ]), + }], true); + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(50n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(100n); + }); - const item2User = new Map(); - item2User.set(JSON.stringify({Substrate: alice.address}), 3); + itSub.ifWithPallets('can initialize an RFT with multiple owners', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'k', permission: {tokenOwner: true, mutable: false, collectionAdmin: false}}, + ], + }); - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - RefungibleMultipleItems: [ - {users: item1User}, - {users: item2User}, + await helper.executeExtrinsic(alice, 'api.tx.unique.createMultipleItemsEx', [collection.collectionId, { + RefungibleMultipleOwners: { + users: new Map([ + [JSON.stringify({Substrate: alice.address}), 1], + [JSON.stringify({Substrate: bob.address}), 2], + ]), + properties: [ + {key: 'k', value: 'v'}, ], - })); - - const itemsListIndexAfter = await getLastTokenId(api, collection); - expect(itemsListIndexAfter).to.be.equal(2); + }, + }], true); + const tokenId = await collection.getLastTokenId(); + expect(tokenId).to.be.equal(1); + expect(await collection.getTokenBalance(1, {Substrate: alice.address})).to.be.equal(1n); + expect(await collection.getTokenBalance(1, {Substrate: bob.address})).to.be.equal(2n); + }); - expect(await getBalance(api, collection, alice.address, 1)).to.be.equal(1n); - expect(await getBalance(api, collection, alice.address, 2)).to.be.equal(3n); + itSub.ifWithPallets('can initialize multiple RFTs with the same owner', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ + {key: 'k', permission: {tokenOwner: true, mutable: false, collectionAdmin: false}}, + ], }); - }); -}); -describe('Negative test: createMultipleItemsEx', () => { - it('No editing rights', async () => { - const collection = await createCollectionWithPropsExpectSuccess({properties: [{key: 'key1', value: 'v'}], - propPerm: [{key: 'key1', permission: {mutable: true, collectionAdmin: false, tokenOwner: false}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const data = [ + await helper.executeExtrinsic(alice, 'api.tx.unique.createMultipleItemsEx', [collection.collectionId, { + RefungibleMultipleItems: [ { - owner: {substrate: alice.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: bob.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: charlie.address}, - properties: [{key: 'key1', value: 'v2'}], + user: {Substrate: alice.address}, pieces: 1, + properties: [ + {key: 'k', value: 'v1'}, + ], }, - ]; - - const tx = api.tx.unique.createMultipleItemsEx(collection, {NFT: data}); - // await executeTransaction(api, alice, tx); - - //await submitTransactionExpectFailAsync(alice, tx); - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/common\.NoPermission/); - }); - }); - - it('User doesnt have editing rights', async () => { - const collection = await createCollectionWithPropsExpectSuccess({properties: [{key: 'key1', value: 'v'}], - propPerm: [{key: 'key1', permission: {mutable: false, collectionAdmin: false, tokenOwner: false}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const data = [ { - owner: {substrate: alice.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: alice.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: alice.address}, - properties: [{key: 'key1', value: 'v2'}], + user: {Substrate: alice.address}, pieces: 3, + properties: [ + {key: 'k', value: 'v2'}, + ], }, - ]; + ], + }], true); + + expect(await collection.getLastTokenId()).to.be.equal(2); + expect(await collection.getTokenBalance(1, {Substrate: alice.address})).to.be.equal(1n); + expect(await collection.getTokenBalance(2, {Substrate: alice.address})).to.be.equal(3n); - const tx = api.tx.unique.createMultipleItemsEx(collection, {NFT: data}); - // await executeTransaction(api, alice, tx); + const tokenData1 = await helper.rft.getToken(collection.collectionId, 1); + expect(tokenData1).to.not.be.null; + expect(tokenData1?.properties[0]).to.be.deep.equal({key: 'k', value: 'v1'}); + + const tokenData2 = await helper.rft.getToken(collection.collectionId, 2); + expect(tokenData2).to.not.be.null; + expect(tokenData2?.properties[0]).to.be.deep.equal({key: 'k', value: 'v2'}); + }); +}); - //await submitTransactionExpectFailAsync(alice, tx); - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/common\.NoPermission/); +describe('Negative test: createMultipleItemsEx', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); }); }); - it('Adding property without access rights', async () => { - const collection = await createCollectionWithPropsExpectSuccess({properties: [{key: 'key1', value: 'v'}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const data = [ + itSub('No editing rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: bob.address}, - properties: [{key: 'key1', value: 'v2'}], - }, { - owner: {substrate: charlie.address}, - properties: [{key: 'key1', value: 'v2'}], + key: 'k', + permission: { + mutable: true, + collectionAdmin: false, + tokenOwner: false, + }, }, - ]; - - const tx = api.tx.unique.createMultipleItemsEx(collection, {NFT: data}); - - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/common\.NoPermission/); - //await submitTransactionExpectFailAsync(alice, tx); + ], }); - }); - it('Adding more than 64 properties', async () => { - const propPerms = [{key: 'key', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]; - - for (let i = 0; i < 65; i++) { - propPerms.push({key: `key${i}`, permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}); - } - - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - await expect(executeTransaction(api, alice, api.tx.unique.setTokenPropertyPermissions(collection, propPerms))).to.be.rejectedWith(/common\.PropertyLimitReached/); - }); + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + await expect(collection.mintMultipleTokens(alice, args)).to.be.rejectedWith(/common\.NoPermission/); }); - it('Trying to add bigger property than allowed', async () => { - const collection = await createCollectionWithPropsExpectSuccess({propPerm: [{key: 'k', permission: {mutable: true, collectionAdmin: true, tokenOwner: true}}]}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const data = [ + itSub('User doesnt have editing rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}], - }, { - owner: {substrate: bob.address}, properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}], - }, { - owner: {substrate: charlie.address}, properties: [{key: 'k', value: 'vvvvvv'.repeat(5000)}, {key: 'k2', value: 'vvv'.repeat(5000)}], + key: 'k', + permission: { + mutable: false, + collectionAdmin: false, + tokenOwner: false, + }, }, - ]; + ], + }); - const tx = api.tx.unique.createMultipleItemsEx(collection, {NFT: data}); + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + await expect(collection.mintMultipleTokens(alice, args)).to.be.rejectedWith(/common\.NoPermission/); + }); - //await submitTransactionExpectFailAsync(alice, tx); - await expect(executeTransaction(api, alice, tx)).to.be.rejectedWith(/common\.NoPermission/); + itSub('Adding property without access rights', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', }); + + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'v1'}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'v2'}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'v3'}], + }, + ]; + + await expect(collection.mintMultipleTokens(alice, args)).to.be.rejectedWith(/common\.NoPermission/); }); - it('can initialize multiple NFT with different owners', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ + itSub('Adding more than 64 properties', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, - }, { - owner: {substrate: bob.address}, - }, { - owner: {substrate: charlie.address}, + key: 'k', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: true, + }, }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - const tokens = await api.query.nonfungible.tokenData.entries(collection); - const json = tokens.map(([, token]) => token.toJSON()); - expect(json).to.be.deep.equal(data); + ], }); + + const properties: IProperty[] = []; + + for (let i = 0; i < 65; i++) { + properties.push({key: `k${i}`, value: `v${i}`}); + } + + const args = [ + { + owner: {Substrate: alice.address}, + properties: properties, + }, + { + owner: {Substrate: bob.address}, + properties: properties, + }, + { + owner: {Substrate: charlie.address}, + properties: properties, + }, + ]; + + await expect(collection.mintMultipleTokens(alice, args)).to.be.rejectedWith('Verification Error'); }); - it('can initialize multiple NFT with different owners', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - const data = [ + itSub('Trying to add bigger property than allowed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'name', + description: 'descr', + tokenPrefix: 'COL', + tokenPropertyPermissions: [ { - owner: {substrate: alice.address}, - }, { - owner: {substrate: bob.address}, - }, { - owner: {substrate: charlie.address}, + key: 'k', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: true, + }, }, - ]; - - await executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - NFT: data, - })); - const tokens = await api.query.nonfungible.tokenData.entries(collection); - const json = tokens.map(([, token]) => token.toJSON()); - expect(json).to.be.deep.equal(data); + ], }); - }); - it('fails when trying to set multiple owners when creating multiple refungibles', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - // Polkadot requires map, and yet requires keys to be JSON encoded - const users = new Map(); - users.set(JSON.stringify({substrate: alice.address}), 1); - users.set(JSON.stringify({substrate: bob.address}), 1); - - // TODO: better error message? - await expect(executeTransaction(api, alice, api.tx.unique.createMultipleItemsEx(collection, { - RefungibleMultipleItems: [ - {users}, - {users}, - ], - }))).to.be.rejectedWith(/^refungible\.NotRefungibleDataUsedToMintFungibleCollectionToken$/); - }); + const args = [ + { + owner: {Substrate: alice.address}, + properties: [{key: 'k', value: 'A'.repeat(32769)}], + }, + { + owner: {Substrate: bob.address}, + properties: [{key: 'k', value: 'A'.repeat(32769)}], + }, + { + owner: {Substrate: charlie.address}, + properties: [{key: 'k', value: 'A'.repeat(32769)}], + }, + ]; + + await expect(collection.mintMultipleTokens(alice, args)).to.be.rejectedWith('Verification Error'); }); }); diff --git a/tests/src/creditFeesToTreasury.seqtest.ts b/tests/src/creditFeesToTreasury.seqtest.ts new file mode 100644 index 0000000000..95e120d261 --- /dev/null +++ b/tests/src/creditFeesToTreasury.seqtest.ts @@ -0,0 +1,165 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import './interfaces/augment-api-consts'; +import {IKeyringPair} from '@polkadot/types/types'; +import {ApiPromise} from '@polkadot/api'; +import {usingPlaygrounds, expect, itSub} from './util'; + +const TREASURY = '5EYCAe5ijiYfyeZ2JJCGq56LmPyNRAKzpG4QkoQkkQNB5e6Z'; +const saneMinimumFee = 0.05; +const saneMaximumFee = 0.5; +const createCollectionDeposit = 100; + +// Skip the inflation block pauses if the block is close to inflation block +// until the inflation happens +/*eslint no-async-promise-executor: "off"*/ +function skipInflationBlock(api: ApiPromise): Promise { + const promise = new Promise(async (resolve) => { + const blockInterval = api.consts.inflation.inflationBlockInterval.toNumber(); + const unsubscribe = await api.rpc.chain.subscribeNewHeads(head => { + const currentBlock = head.number.toNumber(); + if (currentBlock % blockInterval < blockInterval - 10) { + unsubscribe(); + resolve(); + } else { + console.log(`Skipping inflation block, current block: ${currentBlock}`); + } + }); + }); + + return promise; +} + +describe('integration test: Fees must be credited to Treasury:', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); + }); + }); + + itSub('Total issuance does not change', async ({helper}) => { + const api = helper.getApi(); + await skipInflationBlock(api); + await helper.wait.newBlocks(1); + + const totalBefore = (await helper.callRpc('api.query.balances.totalIssuance', [])).toBigInt(); + + await helper.balance.transferToSubstrate(alice, bob.address, 1n); + + const totalAfter = (await helper.callRpc('api.query.balances.totalIssuance', [])).toBigInt(); + + expect(totalAfter).to.be.equal(totalBefore); + }); + + itSub('Sender balance decreased by fee+sent amount, Treasury balance increased by fee', async ({helper}) => { + await skipInflationBlock(helper.getApi()); + await helper.wait.newBlocks(1); + + const treasuryBalanceBefore = await helper.balance.getSubstrate(TREASURY); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + + const amount = 1n; + await helper.balance.transferToSubstrate(alice, bob.address, amount); + + const treasuryBalanceAfter = await helper.balance.getSubstrate(TREASURY); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + + const fee = aliceBalanceBefore - aliceBalanceAfter - amount; + const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; + + expect(treasuryIncrease).to.be.equal(fee); + }); + + itSub('Treasury balance increased by failed tx fee', async ({helper}) => { + const api = helper.getApi(); + await helper.wait.newBlocks(1); + + const treasuryBalanceBefore = await helper.balance.getSubstrate(TREASURY); + const bobBalanceBefore = await helper.balance.getSubstrate(bob.address); + + await expect(helper.signTransaction(bob, api.tx.balances.setBalance(alice.address, 0, 0))).to.be.rejected; + + const treasuryBalanceAfter = await helper.balance.getSubstrate(TREASURY); + const bobBalanceAfter = await helper.balance.getSubstrate(bob.address); + + const fee = bobBalanceBefore - bobBalanceAfter; + const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; + + expect(treasuryIncrease).to.be.equal(fee); + }); + + itSub('NFT Transactions also send fees to Treasury', async ({helper}) => { + await skipInflationBlock(helper.getApi()); + await helper.wait.newBlocks(1); + + const treasuryBalanceBefore = await helper.balance.getSubstrate(TREASURY); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + + await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + const treasuryBalanceAfter = await helper.balance.getSubstrate(TREASURY); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + const fee = aliceBalanceBefore - aliceBalanceAfter; + const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; + + expect(treasuryIncrease).to.be.equal(fee); + }); + + itSub('Fees are sane', async ({helper}) => { + const unique = helper.balance.getOneTokenNominal(); + await skipInflationBlock(helper.getApi()); + await helper.wait.newBlocks(1); + + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + + await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + const fee = aliceBalanceBefore - aliceBalanceAfter; + + expect(fee / unique < BigInt(Math.ceil(saneMaximumFee + createCollectionDeposit))).to.be.true; + expect(fee / unique < BigInt(Math.ceil(saneMinimumFee + createCollectionDeposit))).to.be.true; + }); + + itSub('NFT Transfer fee is close to 0.1 Unique', async ({helper}) => { + await skipInflationBlock(helper.getApi()); + await helper.wait.newBlocks(1); + + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + // const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT'); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); + await token.transfer(alice, {Substrate: bob.address}); + const aliceBalanceAfter = await helper.balance.getSubstrate(alice.address); + + const fee = Number(aliceBalanceBefore - aliceBalanceAfter) / Number(helper.balance.getOneTokenNominal()); + const expectedTransferFee = 0.1; + // fee drifts because of NextFeeMultiplier + const tolerance = 0.001; + + expect(Math.abs(fee - expectedTransferFee)).to.be.lessThan(tolerance); + }); +}); diff --git a/tests/src/creditFeesToTreasury.test.ts b/tests/src/creditFeesToTreasury.test.ts deleted file mode 100644 index a9487fa26b..0000000000 --- a/tests/src/creditFeesToTreasury.test.ts +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import './interfaces/augment-api-consts'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import {IKeyringPair} from '@polkadot/types/types'; -import { - createCollectionExpectSuccess, - createItemExpectSuccess, - getGenericResult, - transferExpectSuccess, - UNIQUE, -} from './util/helpers'; - -import {default as waitNewBlocks} from './substrate/wait-new-blocks'; -import {ApiPromise} from '@polkadot/api'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -const TREASURY = '5EYCAe5ijiYfyeZ2JJCGq56LmPyNRAKzpG4QkoQkkQNB5e6Z'; -const saneMinimumFee = 0.05; -const saneMaximumFee = 0.5; -const createCollectionDeposit = 100; - -let alice: IKeyringPair; -let bob: IKeyringPair; - -// Skip the inflation block pauses if the block is close to inflation block -// until the inflation happens -/*eslint no-async-promise-executor: "off"*/ -function skipInflationBlock(api: ApiPromise): Promise { - const promise = new Promise(async (resolve) => { - const blockInterval = (await api.consts.inflation.inflationBlockInterval).toNumber(); - const unsubscribe = await api.rpc.chain.subscribeNewHeads(head => { - const currentBlock = head.number.toNumber(); - if (currentBlock % blockInterval < blockInterval - 10) { - unsubscribe(); - resolve(); - } else { - console.log(`Skipping inflation block, current block: ${currentBlock}`); - } - }); - }); - - return promise; -} - -describe('integration test: Fees must be credited to Treasury:', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Total issuance does not change', async () => { - await usingApi(async (api) => { - await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const totalBefore = (await api.query.balances.totalIssuance()).toBigInt(); - - const amount = 1n; - const transfer = api.tx.balances.transfer(bob.address, amount); - - const result = getGenericResult(await submitTransactionAsync(alice, transfer)); - - const totalAfter = (await api.query.balances.totalIssuance()).toBigInt(); - - expect(result.success).to.be.true; - expect(totalAfter).to.be.equal(totalBefore); - }); - }); - - it('Sender balance decreased by fee+sent amount, Treasury balance increased by fee', async () => { - await usingApi(async (api) => { - await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const treasuryBalanceBefore: bigint = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const aliceBalanceBefore: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - - const amount = 1n; - const transfer = api.tx.balances.transfer(bob.address, amount); - const result = getGenericResult(await submitTransactionAsync(alice, transfer)); - - const treasuryBalanceAfter: bigint = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const aliceBalanceAfter: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - const fee = aliceBalanceBefore - aliceBalanceAfter - amount; - const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; - - expect(result.success).to.be.true; - expect(treasuryIncrease).to.be.equal(fee); - }); - }); - - it('Treasury balance increased by failed tx fee', async () => { - await usingApi(async (api) => { - //await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const treasuryBalanceBefore = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const bobBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - const badTx = api.tx.balances.setBalance(alice.address, 0, 0); - await expect(submitTransactionExpectFailAsync(bob, badTx)).to.be.rejected; - - const treasuryBalanceAfter = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const bobBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - const fee = bobBalanceBefore - bobBalanceAfter; - const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; - - expect(treasuryIncrease).to.be.equal(fee); - }); - }); - - it('NFT Transactions also send fees to Treasury', async () => { - await usingApi(async (api) => { - await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const treasuryBalanceBefore = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const aliceBalanceBefore = (await api.query.system.account(alice.address)).data.free.toBigInt(); - - await createCollectionExpectSuccess(); - - const treasuryBalanceAfter = (await api.query.system.account(TREASURY)).data.free.toBigInt(); - const aliceBalanceAfter = (await api.query.system.account(alice.address)).data.free.toBigInt(); - const fee = aliceBalanceBefore - aliceBalanceAfter; - const treasuryIncrease = treasuryBalanceAfter - treasuryBalanceBefore; - - expect(treasuryIncrease).to.be.equal(fee); - }); - }); - - it('Fees are sane', async () => { - await usingApi(async (api) => { - await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const aliceBalanceBefore: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - - await createCollectionExpectSuccess(); - - const aliceBalanceAfter: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - const fee = aliceBalanceBefore - aliceBalanceAfter; - - expect(fee / UNIQUE < BigInt(Math.ceil(saneMaximumFee + createCollectionDeposit))).to.be.true; - expect(fee / UNIQUE < BigInt(Math.ceil(saneMinimumFee + createCollectionDeposit))).to.be.true; - }); - }); - - it('NFT Transfer fee is close to 0.1 Unique', async () => { - await usingApi(async (api) => { - await skipInflationBlock(api); - await waitNewBlocks(api, 1); - - const collectionId = await createCollectionExpectSuccess(); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT'); - - const aliceBalanceBefore: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - await transferExpectSuccess(collectionId, tokenId, alice, bob, 1, 'NFT'); - const aliceBalanceAfter: bigint = (await api.query.system.account(alice.address)).data.free.toBigInt(); - - const fee = Number(aliceBalanceBefore - aliceBalanceAfter) / Number(UNIQUE); - const expectedTransferFee = 0.1; - // fee drifts because of NextFeeMultiplier - const tolerance = 0.001; - - expect(Math.abs(fee - expectedTransferFee)).to.be.lessThan(tolerance); - }); - }); - -}); diff --git a/tests/src/destroyCollection.test.ts b/tests/src/destroyCollection.test.ts index 9f3cfdbcfe..d51c9f142a 100644 --- a/tests/src/destroyCollection.test.ts +++ b/tests/src/destroyCollection.test.ts @@ -15,32 +15,44 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import {createCollectionExpectSuccess, - destroyCollectionExpectSuccess, - destroyCollectionExpectFailure, - setCollectionLimitsExpectSuccess, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, - createItemExpectSuccess, -} from './util/helpers'; - -chai.use(chaiAsPromised); +import {itSub, expect, usingPlaygrounds, Pallets} from './util'; describe('integration test: ext. destroyCollection():', () => { - it('NFT collection can be destroyed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + }); }); - it('Fungible collection can be destroyed', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await destroyCollectionExpectSuccess(collectionId); + + itSub('NFT collection can be destroyed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await collection.burn(alice); + expect(await collection.getData()).to.be.null; }); - it('ReFungible collection can be destroyed', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await destroyCollectionExpectSuccess(collectionId); + itSub('Fungible collection can be destroyed', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }, 0); + await collection.burn(alice); + expect(await collection.getData()).to.be.null; + }); + itSub.ifWithPallets('ReFungible collection can be destroyed', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await collection.burn(alice); + expect(await collection.getData()).to.be.null; }); }); @@ -49,44 +61,60 @@ describe('(!negative test!) integration test: ext. destroyCollection():', () => let bob: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('(!negative test!) Destroy a collection that never existed', async () => { - await usingApi(async (api) => { - // Find the collection that never existed - const collectionId = await getCreatedCollectionCount(api) + 1; - await destroyCollectionExpectFailure(collectionId); - }); + itSub('(!negative test!) Destroy a collection that never existed', async ({helper}) => { + const collectionId = 1_000_000; + await expect(helper.collection.burn(alice, collectionId)).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Destroy a collection that has already been destroyed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); - await destroyCollectionExpectFailure(collectionId); + itSub('(!negative test!) Destroy a collection that has already been destroyed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await collection.burn(alice); + await expect(collection.burn(alice)).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Destroy a collection using non-owner account', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectFailure(collectionId, '//Bob'); - await destroyCollectionExpectSuccess(collectionId, '//Alice'); + itSub('(!negative test!) Destroy a collection using non-owner account', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await expect(collection.burn(bob)).to.be.rejectedWith(/common\.NoPermission/); }); - it('(!negative test!) Destroy a collection using collection admin account', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await destroyCollectionExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Destroy a collection using collection admin account', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await collection.addAdmin(alice, {Substrate: bob.address}); + await expect(collection.burn(bob)).to.be.rejectedWith(/common\.NoPermission/); }); - it('fails when OwnerCanDestroy == false', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanDestroy: false}); - - await destroyCollectionExpectFailure(collectionId, '//Alice'); + itSub('fails when OwnerCanDestroy == false', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + limits: { + ownerCanDestroy: false, + }, + }); + await expect(collection.burn(alice)).to.be.rejectedWith(/common\.NoPermission/); }); - it('fails when a collection still has a token', async () => { - const collectionId = await createCollectionExpectSuccess(); - await createItemExpectSuccess(alice, collectionId, 'NFT'); - - await destroyCollectionExpectFailure(collectionId, '//Alice'); + itSub('fails when a collection still has a token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + }); + await collection.mintToken(alice, {Substrate: alice.address}); + await expect(collection.burn(alice)).to.be.rejectedWith(/common\.CantDestroyNotEmptyCollection/); }); }); diff --git a/tests/src/enableDisableTransfer.test.ts b/tests/src/enableDisableTransfer.test.ts index ac644c462d..401c1d67d7 100644 --- a/tests/src/enableDisableTransfer.test.ts +++ b/tests/src/enableDisableTransfer.test.ts @@ -14,61 +14,69 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi from './substrate/substrate-api'; -import { - createItemExpectSuccess, - createCollectionExpectSuccess, - transferExpectSuccess, - transferExpectFailure, - setTransferFlagExpectSuccess, - setTransferFlagExpectFailure, -} from './util/helpers'; - -chai.use(chaiAsPromised); +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; describe('Enable/Disable Transfers', () => { - it('User can transfer token with enabled transfer flag', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - - // explicitely set transfer flag - await setTransferFlagExpectSuccess(alice, nftCollectionId, true); + let alice: IKeyringPair; + let bob: IKeyringPair; - await transferExpectSuccess(nftCollectionId, newNftTokenId, alice, bob, 1); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); - it('User can\'n transfer token with disabled transfer flag', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - - // explicitely set transfer flag - await setTransferFlagExpectSuccess(alice, nftCollectionId, false); + itSub('User can transfer token with enabled transfer flag', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + limits: { + transfersEnabled: true, + }, + }); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await token.transfer(alice, {Substrate: bob.address}); + expect(await token.getOwner()).to.be.deep.equal({Substrate: bob.address}); + }); - await transferExpectFailure(nftCollectionId, newNftTokenId, alice, bob, 1); + itSub('User can\'n transfer token with disabled transfer flag', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + limits: { + transfersEnabled: false, + }, }); + const token = await collection.mintToken(alice, {Substrate: alice.address}); + await expect(token.transfer(alice, {Substrate: bob.address})).to.be.rejectedWith(/common\.TransferNotAllowed/); }); }); describe('Negative Enable/Disable Transfers', () => { - it('Non-owner cannot change transfer flag', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const bob = privateKeyWrapper('//Bob'); - // nft - const nftCollectionId = await createCollectionExpectSuccess(); + let alice: IKeyringPair; + let bob: IKeyringPair; - // Change transfer flag - await setTransferFlagExpectFailure(bob, nftCollectionId, false); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 100n], donor); }); }); + + itSub('Non-owner cannot change transfer flag', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, { + name: 'test', + description: 'test', + tokenPrefix: 'test', + limits: { + transfersEnabled: true, + }, + }); + + await expect(collection.setLimits(bob, {transfersEnabled: false})).to.be.rejectedWith(/common\.NoPermission/); + }); }); diff --git a/tests/src/eth/allowlist.test.ts b/tests/src/eth/allowlist.test.ts index 14c4414979..76c5cfbde7 100644 --- a/tests/src/eth/allowlist.test.ts +++ b/tests/src/eth/allowlist.test.ts @@ -14,15 +14,22 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import {contractHelpers, createEthAccountWithBalance, deployFlipper, itWeb3} from './util/helpers'; +import {IKeyringPair} from '@polkadot/types/types'; +import {itEth, usingEthPlaygrounds, expect} from './util'; -describe('EVM allowlist', () => { - itWeb3('Contract allowlist can be toggled', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); +describe('EVM contract allowlist', () => { + let donor: IKeyringPair; - const helpers = contractHelpers(web3, owner); + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Contract allowlist can be toggled', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const flipper = await helper.eth.deployFlipper(owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); // Any user is allowed by default expect(await helpers.methods.allowlistEnabled(flipper.options.address).call()).to.be.false; @@ -36,12 +43,11 @@ describe('EVM allowlist', () => { expect(await helpers.methods.allowlistEnabled(flipper.options.address).call()).to.be.false; }); - itWeb3('Non-allowlisted user can\'t call contract with allowlist enabled', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const helpers = contractHelpers(web3, owner); + itEth('Non-allowlisted user can\'t call contract with allowlist enabled', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const flipper = await helper.eth.deployFlipper(owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); // User can flip with allowlist disabled await flipper.methods.flip().send({from: caller}); @@ -58,3 +64,81 @@ describe('EVM allowlist', () => { expect(await flipper.methods.getValue().call()).to.be.false; }); }); + +describe('EVM collection allowlist', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Collection allowlist can be added and removed by [eth] address', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const user = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.false; + await collectionEvm.methods.addToCollectionAllowList(user).send({from: owner}); + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.true; + + await collectionEvm.methods.removeFromCollectionAllowList(user).send({from: owner}); + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.false; + }); + + // TODO: Temprorary off. Need refactor + // itEth('Collection allowlist can be added and removed by [sub] address', async ({helper}) => { + // const owner = await helper.eth.createAccountWithBalance(donor); + // const user = donor; + + // const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + // const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.false; + // await collectionEvm.methods.addToCollectionAllowListSubstrate(user.addressRaw).send({from: owner}); + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.true; + + // await collectionEvm.methods.removeFromCollectionAllowListSubstrate(user.addressRaw).send({from: owner}); + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.false; + // }); + + itEth('Collection allowlist can not be add and remove [eth] address by not owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const notOwner = await helper.eth.createAccountWithBalance(donor); + const user = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.false; + await expect(collectionEvm.methods.addToCollectionAllowList(user).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.false; + await collectionEvm.methods.addToCollectionAllowList(user).send({from: owner}); + + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.true; + await expect(collectionEvm.methods.removeFromCollectionAllowList(user).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + expect(await collectionEvm.methods.allowed(user).call({from: owner})).to.be.true; + }); + + // TODO: Temprorary off. Need refactor + // itEth('Collection allowlist can not be add and remove [sub] address by not owner', async ({helper}) => { + // const owner = await helper.eth.createAccountWithBalance(donor); + // const notOwner = await helper.eth.createAccountWithBalance(donor); + // const user = donor; + + // const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + // const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.false; + // await expect(collectionEvm.methods.addToCollectionAllowListSubstrate(user.addressRaw).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.false; + // await collectionEvm.methods.addToCollectionAllowListSubstrate(user.addressRaw).send({from: owner}); + + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.true; + // await expect(collectionEvm.methods.removeFromCollectionAllowListSubstrate(user.addressRaw).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + // expect(await helper.collection.allowed(collectionId, {Substrate: user.address})).to.be.true; + // }); +}); diff --git a/tests/src/eth/api/CollectionHelpers.sol b/tests/src/eth/api/CollectionHelpers.sol index ddd545deb4..327c86159a 100644 --- a/tests/src/eth/api/CollectionHelpers.sol +++ b/tests/src/eth/api/CollectionHelpers.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder interface Dummy { } @@ -12,26 +12,56 @@ interface ERC165 is Dummy { function supportsInterface(bytes4 interfaceID) external view returns (bool); } -// Inline +/// @dev inlined interface interface CollectionHelpersEvents { - event CollectionCreated( - address indexed owner, - address indexed collectionId - ); + event CollectionCreated(address indexed owner, address indexed collectionId); } -// Selector: 20947cd0 +/// @title Contract, which allows users to operate with collections +/// @dev the ERC-165 identifier for this interface is 0x58918631 interface CollectionHelpers is Dummy, ERC165, CollectionHelpersEvents { - // Selector: createNonfungibleCollection(string,string,string) e34a6844 - function createNonfungibleCollection( + /// Create an NFT collection + /// @param name Name of the collection + /// @param description Informative description of the collection + /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + /// @return address Address of the newly created collection + /// @dev EVM selector for this function is: 0x844af658, + /// or in textual repr: createNFTCollection(string,string,string) + function createNFTCollection( string memory name, string memory description, string memory tokenPrefix - ) external returns (address); + ) external payable returns (address); - // Selector: isCollectionExist(address) c3de1494 - function isCollectionExist(address collectionAddress) - external - view - returns (bool); + // /// Create an NFT collection + // /// @param name Name of the collection + // /// @param description Informative description of the collection + // /// @param tokenPrefix Token prefix to represent the collection tokens in UI and user applications + // /// @return address Address of the newly created collection + // /// @dev EVM selector for this function is: 0xe34a6844, + // /// or in textual repr: createNonfungibleCollection(string,string,string) + // function createNonfungibleCollection(string memory name, string memory description, string memory tokenPrefix) external payable returns (address); + + /// @dev EVM selector for this function is: 0xab173450, + /// or in textual repr: createRFTCollection(string,string,string) + function createRFTCollection( + string memory name, + string memory description, + string memory tokenPrefix + ) external payable returns (address); + + /// @dev EVM selector for this function is: 0x85624258, + /// or in textual repr: makeCollectionERC721MetadataCompatible(address,string) + function makeCollectionERC721MetadataCompatible(address collection, string memory baseUri) external; + + /// Check if a collection exists + /// @param collectionAddress Address of the collection in question + /// @return bool Does the collection exist? + /// @dev EVM selector for this function is: 0xc3de1494, + /// or in textual repr: isCollectionExist(address) + function isCollectionExist(address collectionAddress) external view returns (bool); + + /// @dev EVM selector for this function is: 0xd23a7ab1, + /// or in textual repr: collectionCreationFee() + function collectionCreationFee() external view returns (uint256); } diff --git a/tests/src/eth/api/ContractHelpers.sol b/tests/src/eth/api/ContractHelpers.sol index 0205993a7e..dd0970b96b 100644 --- a/tests/src/eth/api/ContractHelpers.sol +++ b/tests/src/eth/api/ContractHelpers.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder interface Dummy { } @@ -12,63 +12,167 @@ interface ERC165 is Dummy { function supportsInterface(bytes4 interfaceID) external view returns (bool); } -// Selector: 7b4866f9 -interface ContractHelpers is Dummy, ERC165 { - // Selector: contractOwner(address) 5152b14c - function contractOwner(address contractAddress) - external - view - returns (address); - - // Selector: sponsoringEnabled(address) 6027dc61 - function sponsoringEnabled(address contractAddress) - external - view - returns (bool); - - // Deprecated - // - // Selector: toggleSponsoring(address,bool) fcac6d86 - function toggleSponsoring(address contractAddress, bool enabled) external; - - // Selector: setSponsoringMode(address,uint8) fde8a560 +/// @dev inlined interface +interface ContractHelpersEvents { + event ContractSponsorSet(address indexed contractAddress, address sponsor); + event ContractSponsorshipConfirmed(address indexed contractAddress, address sponsor); + event ContractSponsorRemoved(address indexed contractAddress); +} + +/// @title Magic contract, which allows users to reconfigure other contracts +/// @dev the ERC-165 identifier for this interface is 0x30afad04 +interface ContractHelpers is Dummy, ERC165, ContractHelpersEvents { + /// Get user, which deployed specified contract + /// @dev May return zero address in case if contract is deployed + /// using uniquenetwork evm-migration pallet, or using other terms not + /// intended by pallet-evm + /// @dev Returns zero address if contract does not exists + /// @param contractAddress Contract to get owner of + /// @return address Owner of contract + /// @dev EVM selector for this function is: 0x5152b14c, + /// or in textual repr: contractOwner(address) + function contractOwner(address contractAddress) external view returns (address); + + /// Set sponsor. + /// @param contractAddress Contract for which a sponsor is being established. + /// @param sponsor User address who set as pending sponsor. + /// @dev EVM selector for this function is: 0xf01fba93, + /// or in textual repr: setSponsor(address,address) + function setSponsor(address contractAddress, address sponsor) external; + + /// Set contract as self sponsored. + /// + /// @param contractAddress Contract for which a self sponsoring is being enabled. + /// @dev EVM selector for this function is: 0x89f7d9ae, + /// or in textual repr: selfSponsoredEnable(address) + function selfSponsoredEnable(address contractAddress) external; + + /// Remove sponsor. + /// + /// @param contractAddress Contract for which a sponsorship is being removed. + /// @dev EVM selector for this function is: 0xef784250, + /// or in textual repr: removeSponsor(address) + function removeSponsor(address contractAddress) external; + + /// Confirm sponsorship. + /// + /// @dev Caller must be same that set via [`setSponsor`]. + /// + /// @param contractAddress Сontract for which need to confirm sponsorship. + /// @dev EVM selector for this function is: 0xabc00001, + /// or in textual repr: confirmSponsorship(address) + function confirmSponsorship(address contractAddress) external; + + /// Get current sponsor. + /// + /// @param contractAddress The contract for which a sponsor is requested. + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x766c4f37, + /// or in textual repr: sponsor(address) + function sponsor(address contractAddress) external view returns (Tuple0 memory); + + /// Check tat contract has confirmed sponsor. + /// + /// @param contractAddress The contract for which the presence of a confirmed sponsor is checked. + /// @return **true** if contract has confirmed sponsor. + /// @dev EVM selector for this function is: 0x97418603, + /// or in textual repr: hasSponsor(address) + function hasSponsor(address contractAddress) external view returns (bool); + + /// Check tat contract has pending sponsor. + /// + /// @param contractAddress The contract for which the presence of a pending sponsor is checked. + /// @return **true** if contract has pending sponsor. + /// @dev EVM selector for this function is: 0x39b9b242, + /// or in textual repr: hasPendingSponsor(address) + function hasPendingSponsor(address contractAddress) external view returns (bool); + + /// @dev EVM selector for this function is: 0x6027dc61, + /// or in textual repr: sponsoringEnabled(address) + function sponsoringEnabled(address contractAddress) external view returns (bool); + + /// @dev EVM selector for this function is: 0xfde8a560, + /// or in textual repr: setSponsoringMode(address,uint8) function setSponsoringMode(address contractAddress, uint8 mode) external; - // Selector: sponsoringMode(address) b70c7267 - function sponsoringMode(address contractAddress) - external - view - returns (uint8); - - // Selector: setSponsoringRateLimit(address,uint32) 77b6c908 - function setSponsoringRateLimit(address contractAddress, uint32 rateLimit) - external; - - // Selector: getSponsoringRateLimit(address) 610cfabd - function getSponsoringRateLimit(address contractAddress) - external - view - returns (uint32); - - // Selector: allowed(address,address) 5c658165 - function allowed(address contractAddress, address user) - external - view - returns (bool); - - // Selector: allowlistEnabled(address) c772ef6c - function allowlistEnabled(address contractAddress) - external - view - returns (bool); - - // Selector: toggleAllowlist(address,bool) 36de20f5 - function toggleAllowlist(address contractAddress, bool enabled) external; + /// Get current contract sponsoring rate limit + /// @param contractAddress Contract to get sponsoring rate limit of + /// @return uint32 Amount of blocks between two sponsored transactions + /// @dev EVM selector for this function is: 0xf29694d8, + /// or in textual repr: sponsoringRateLimit(address) + function sponsoringRateLimit(address contractAddress) external view returns (uint32); + + /// Set contract sponsoring rate limit + /// @dev Sponsoring rate limit - is a minimum amount of blocks that should + /// pass between two sponsored transactions + /// @param contractAddress Contract to change sponsoring rate limit of + /// @param rateLimit Target rate limit + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x77b6c908, + /// or in textual repr: setSponsoringRateLimit(address,uint32) + function setSponsoringRateLimit(address contractAddress, uint32 rateLimit) external; + + /// Set contract sponsoring fee limit + /// @dev Sponsoring fee limit - is maximum fee that could be spent by + /// single transaction + /// @param contractAddress Contract to change sponsoring fee limit of + /// @param feeLimit Fee limit + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x03aed665, + /// or in textual repr: setSponsoringFeeLimit(address,uint256) + function setSponsoringFeeLimit(address contractAddress, uint256 feeLimit) external; + + /// Get current contract sponsoring fee limit + /// @param contractAddress Contract to get sponsoring fee limit of + /// @return uint256 Maximum amount of fee that could be spent by single + /// transaction + /// @dev EVM selector for this function is: 0x75b73606, + /// or in textual repr: sponsoringFeeLimit(address) + function sponsoringFeeLimit(address contractAddress) external view returns (uint256); + + /// Is specified user present in contract allow list + /// @dev Contract owner always implicitly included + /// @param contractAddress Contract to check allowlist of + /// @param user User to check + /// @return bool Is specified users exists in contract allowlist + /// @dev EVM selector for this function is: 0x5c658165, + /// or in textual repr: allowed(address,address) + function allowed(address contractAddress, address user) external view returns (bool); - // Selector: toggleAllowed(address,address,bool) 4706cc1c + /// Toggle user presence in contract allowlist + /// @param contractAddress Contract to change allowlist of + /// @param user Which user presence should be toggled + /// @param isAllowed `true` if user should be allowed to be sponsored + /// or call this contract, `false` otherwise + /// @dev Only contract owner can change this setting + /// @dev EVM selector for this function is: 0x4706cc1c, + /// or in textual repr: toggleAllowed(address,address,bool) function toggleAllowed( address contractAddress, address user, - bool allowed + bool isAllowed ) external; + + /// Is this contract has allowlist access enabled + /// @dev Allowlist always can have users, and it is used for two purposes: + /// in case of allowlist sponsoring mode, users will be sponsored if they exist in allowlist + /// in case of allowlist access enabled, only users from allowlist may call this contract + /// @param contractAddress Contract to get allowlist access of + /// @return bool Is specified contract has allowlist access enabled + /// @dev EVM selector for this function is: 0xc772ef6c, + /// or in textual repr: allowlistEnabled(address) + function allowlistEnabled(address contractAddress) external view returns (bool); + + /// Toggle contract allowlist access + /// @param contractAddress Contract to change allowlist access of + /// @param enabled Should allowlist access to be enabled? + /// @dev EVM selector for this function is: 0x36de20f5, + /// or in textual repr: toggleAllowlist(address,bool) + function toggleAllowlist(address contractAddress, bool enabled) external; +} + +/// @dev anonymous struct +struct Tuple0 { + address field_0; + uint256 field_1; } diff --git a/tests/src/eth/api/UniqueFungible.sol b/tests/src/eth/api/UniqueFungible.sol index 0f9d1e7a80..a0445eb0cf 100644 --- a/tests/src/eth/api/UniqueFungible.sol +++ b/tests/src/eth/api/UniqueFungible.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.0 <0.9.0; -// Common stubs holder +/// @dev common stubs holder interface Dummy { } @@ -12,96 +12,274 @@ interface ERC165 is Dummy { function supportsInterface(bytes4 interfaceID) external view returns (bool); } -// Inline -interface ERC20Events { - event Transfer(address indexed from, address indexed to, uint256 value); - event Approval( - address indexed owner, - address indexed spender, - uint256 value - ); +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +interface Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) external; + + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) external; + + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) external view returns (bytes memory); + + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) external; + + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() external view returns (bool); + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() external; + + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() external; + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() external view returns (Tuple6 memory); + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) external; + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) external; + + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() external view returns (address); + + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) external; + + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) external; + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) external; + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) external; + + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) external; + + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) external view returns (bool); + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) external; + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) external; + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) external; + + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) external view returns (bool); + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() external view returns (string memory); + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() external view returns (Tuple6 memory); + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) external; } -// Selector: 79cc6790 +/// @dev the ERC-165 identifier for this interface is 0x63034ac5 interface ERC20UniqueExtensions is Dummy, ERC165 { - // Selector: burnFrom(address,uint256) 79cc6790 + /// Burn tokens from account + /// @dev Function that burns an `amount` of the tokens of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) function burnFrom(address from, uint256 amount) external returns (bool); + + /// Mint tokens for multiple accounts. + /// @param amounts array of pairs of account address and amount + /// @dev EVM selector for this function is: 0x1acf2d55, + /// or in textual repr: mintBulk((address,uint256)[]) + function mintBulk(Tuple6[] memory amounts) external returns (bool); +} + +/// @dev anonymous struct +struct Tuple6 { + address field_0; + uint256 field_1; +} + +/// @dev the ERC-165 identifier for this interface is 0x40c10f19 +interface ERC20Mintable is Dummy, ERC165 { + /// Mint tokens for `to` account. + /// @param to account that will receive minted tokens + /// @param amount amount of tokens to mint + /// @dev EVM selector for this function is: 0x40c10f19, + /// or in textual repr: mint(address,uint256) + function mint(address to, uint256 amount) external returns (bool); +} + +/// @dev inlined interface +interface ERC20Events { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); } -// Selector: 942e8b22 +/// @dev the ERC-165 identifier for this interface is 0x942e8b22 interface ERC20 is Dummy, ERC165, ERC20Events { - // Selector: name() 06fdde03 + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() function name() external view returns (string memory); - // Selector: symbol() 95d89b41 + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() function symbol() external view returns (string memory); - // Selector: totalSupply() 18160ddd + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() function totalSupply() external view returns (uint256); - // Selector: decimals() 313ce567 + /// @dev EVM selector for this function is: 0x313ce567, + /// or in textual repr: decimals() function decimals() external view returns (uint8); - // Selector: balanceOf(address) 70a08231 + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) function balanceOf(address owner) external view returns (uint256); - // Selector: transfer(address,uint256) a9059cbb + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) function transfer(address to, uint256 amount) external returns (bool); - // Selector: transferFrom(address,address,uint256) 23b872dd + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) function transferFrom( address from, address to, uint256 amount ) external returns (bool); - // Selector: approve(address,uint256) 095ea7b3 + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) function approve(address spender, uint256 amount) external returns (bool); - // Selector: allowance(address,address) dd62ed3e - function allowance(address owner, address spender) - external - view - returns (uint256); -} - -// Selector: c894dc35 -interface Collection is Dummy, ERC165 { - // Selector: setCollectionProperty(string,bytes) 2f073f66 - function setCollectionProperty(string memory key, bytes memory value) - external; - - // Selector: deleteCollectionProperty(string) 7b7debce - function deleteCollectionProperty(string memory key) external; - - // Throws error if key not found - // - // Selector: collectionProperty(string) cf24fd6d - function collectionProperty(string memory key) - external - view - returns (bytes memory); - - // Selector: ethSetSponsor(address) 8f9af356 - function ethSetSponsor(address sponsor) external; - - // Selector: ethConfirmSponsorship() a8580d1a - function ethConfirmSponsorship() external; - - // Selector: setLimit(string,uint32) 68db30ca - function setLimit(string memory limit, uint32 value) external; - - // Selector: setLimit(string,bool) ea67e4c2 - function setLimit(string memory limit, bool value) external; - - // Selector: contractAddress() f6b4dfb4 - function contractAddress() external view returns (address); + /// @dev EVM selector for this function is: 0xdd62ed3e, + /// or in textual repr: allowance(address,address) + function allowance(address owner, address spender) external view returns (uint256); } -interface UniqueFungible is - Dummy, - ERC165, - ERC20, - ERC20UniqueExtensions, - Collection -{} +interface UniqueFungible is Dummy, ERC165, ERC20, ERC20Mintable, ERC20UniqueExtensions, Collection {} diff --git a/tests/src/eth/api/UniqueNFT.sol b/tests/src/eth/api/UniqueNFT.sol index 924eae2d5d..fc19cd00b6 100644 --- a/tests/src/eth/api/UniqueNFT.sol +++ b/tests/src/eth/api/UniqueNFT.sol @@ -3,13 +3,7 @@ pragma solidity >=0.8.0 <0.9.0; -// Anonymous struct -struct Tuple0 { - uint256 field_0; - string field_1; -} - -// Common stubs holder +/// @dev common stubs holder interface Dummy { } @@ -18,33 +12,17 @@ interface ERC165 is Dummy { function supportsInterface(bytes4 interfaceID) external view returns (bool); } -// Inline -interface ERC721Events { - event Transfer( - address indexed from, - address indexed to, - uint256 indexed tokenId - ); - event Approval( - address indexed owner, - address indexed approved, - uint256 indexed tokenId - ); - event ApprovalForAll( - address indexed owner, - address indexed operator, - bool approved - ); -} - -// Inline -interface ERC721MintableEvents { - event MintingFinished(); -} - -// Selector: 41369377 +/// @title A contract that allows to set and delete token properties and change token property permissions. +/// @dev the ERC-165 identifier for this interface is 0x41369377 interface TokenProperties is Dummy, ERC165 { - // Selector: setTokenPropertyPermission(string,bool,bool,bool) 222d97fa + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. + /// @dev EVM selector for this function is: 0x222d97fa, + /// or in textual repr: setTokenPropertyPermission(string,bool,bool,bool) function setTokenPropertyPermission( string memory key, bool isMutable, @@ -52,240 +30,511 @@ interface TokenProperties is Dummy, ERC165 { bool tokenOwner ) external; - // Selector: setProperty(uint256,string,bytes) 1752d67b + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. + /// @dev EVM selector for this function is: 0x1752d67b, + /// or in textual repr: setProperty(uint256,string,bytes) function setProperty( uint256 tokenId, string memory key, bytes memory value ) external; - // Selector: deleteProperty(uint256,string) 066111d1 + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @dev EVM selector for this function is: 0x066111d1, + /// or in textual repr: deleteProperty(uint256,string) function deleteProperty(uint256 tokenId, string memory key) external; - // Throws error if key not found - // - // Selector: property(uint256,string) 7228c327 - function property(uint256 tokenId, string memory key) - external - view - returns (bytes memory); + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes + /// @dev EVM selector for this function is: 0x7228c327, + /// or in textual repr: property(uint256,string) + function property(uint256 tokenId, string memory key) external view returns (bytes memory); } -// Selector: 42966c68 -interface ERC721Burnable is Dummy, ERC165 { - // Selector: burn(uint256) 42966c68 - function burn(uint256 tokenId) external; -} +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +interface Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) external; + + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) external; -// Selector: 58800161 -interface ERC721 is Dummy, ERC165, ERC721Events { - // Selector: balanceOf(address) 70a08231 - function balanceOf(address owner) external view returns (uint256); + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) external view returns (bytes memory); + + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) external; - // Selector: ownerOf(uint256) 6352211e - function ownerOf(uint256 tokenId) external view returns (address); + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() external view returns (bool); - // Not implemented - // - // Selector: safeTransferFromWithData(address,address,uint256,bytes) 60a11672 - function safeTransferFromWithData( - address from, - address to, - uint256 tokenId, - bytes memory data - ) external; + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() external; - // Not implemented - // - // Selector: safeTransferFrom(address,address,uint256) 42842e0e - function safeTransferFrom( - address from, - address to, - uint256 tokenId - ) external; + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() external; + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() external view returns (Tuple17 memory); + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) external; - // Selector: transferFrom(address,address,uint256) 23b872dd - function transferFrom( - address from, - address to, - uint256 tokenId - ) external; + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) external; - // Selector: approve(address,uint256) 095ea7b3 - function approve(address approved, uint256 tokenId) external; + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() external view returns (address); - // Not implemented - // - // Selector: setApprovalForAll(address,bool) a22cb465 - function setApprovalForAll(address operator, bool approved) external; + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) external; + + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) external; + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) external; - // Not implemented - // - // Selector: getApproved(uint256) 081812fc - function getApproved(uint256 tokenId) external view returns (address); + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) external; + + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) external; - // Not implemented - // - // Selector: isApprovedForAll(address,address) e985e9c5 - function isApprovedForAll(address owner, address operator) - external - view - returns (address); -} + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) external view returns (bool); + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) external; + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) external; + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) external; -// Selector: 5b5e139f -interface ERC721Metadata is Dummy, ERC165 { - // Selector: name() 06fdde03 - function name() external view returns (string memory); + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) external view returns (bool); + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() external view returns (string memory); + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() external view returns (Tuple17 memory); + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) external; +} - // Selector: symbol() 95d89b41 - function symbol() external view returns (string memory); +/// @dev anonymous struct +struct Tuple17 { + address field_0; + uint256 field_1; +} - // Returns token's const_metadata - // - // Selector: tokenURI(uint256) c87b56dd +/// @title ERC-721 Non-Fungible Token Standard, optional metadata extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x5b5e139f +interface ERC721Metadata is Dummy, ERC165 { + // /// @notice A descriptive name for a collection of NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x06fdde03, + // /// or in textual repr: name() + // function name() external view returns (string memory); + + // /// @notice An abbreviated name for NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x95d89b41, + // /// or in textual repr: symbol() + // function symbol() external view returns (string memory); + + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata + /// @dev EVM selector for this function is: 0xc87b56dd, + /// or in textual repr: tokenURI(uint256) function tokenURI(uint256 tokenId) external view returns (string memory); } -// Selector: 68ccfe89 -interface ERC721Mintable is Dummy, ERC165, ERC721MintableEvents { - // Selector: mintingFinished() 05d2035b - function mintingFinished() external view returns (bool); - - // `token_id` should be obtained with `next_token_id` method, - // unlike standard, you can't specify it manually - // - // Selector: mint(address,uint256) 40c10f19 - function mint(address to, uint256 tokenId) external returns (bool); - - // `token_id` should be obtained with `next_token_id` method, - // unlike standard, you can't specify it manually - // - // Selector: mintWithTokenURI(address,uint256,string) 50bb4e7f - function mintWithTokenURI( - address to, - uint256 tokenId, - string memory tokenUri - ) external returns (bool); - - // Not implemented - // - // Selector: finishMinting() 7d64bcb4 - function finishMinting() external returns (bool); +/// @title ERC721 Token that can be irreversibly burned (destroyed). +/// @dev the ERC-165 identifier for this interface is 0x42966c68 +interface ERC721Burnable is Dummy, ERC165 { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The NFT to approve + /// @dev EVM selector for this function is: 0x42966c68, + /// or in textual repr: burn(uint256) + function burn(uint256 tokenId) external; } -// Selector: 780e9d63 -interface ERC721Enumerable is Dummy, ERC165 { - // Selector: tokenByIndex(uint256) 4f6ccce7 - function tokenByIndex(uint256 index) external view returns (uint256); - - // Not implemented - // - // Selector: tokenOfOwnerByIndex(address,uint256) 2f745c59 - function tokenOfOwnerByIndex(address owner, uint256 index) - external - view - returns (uint256); - - // Selector: totalSupply() 18160ddd - function totalSupply() external view returns (uint256); +/// @dev inlined interface +interface ERC721UniqueMintableEvents { + event MintingFinished(); } -// Selector: 7d9262e6 -interface Collection is Dummy, ERC165 { - // Selector: setCollectionProperty(string,bytes) 2f073f66 - function setCollectionProperty(string memory key, bytes memory value) - external; - - // Selector: deleteCollectionProperty(string) 7b7debce - function deleteCollectionProperty(string memory key) external; +/// @title ERC721 minting logic. +/// @dev the ERC-165 identifier for this interface is 0x476ff149 +interface ERC721UniqueMintable is Dummy, ERC165, ERC721UniqueMintableEvents { + /// @dev EVM selector for this function is: 0x05d2035b, + /// or in textual repr: mintingFinished() + function mintingFinished() external view returns (bool); - // Throws error if key not found - // - // Selector: collectionProperty(string) cf24fd6d - function collectionProperty(string memory key) - external - view - returns (bytes memory); + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x6a627842, + /// or in textual repr: mint(address) + function mint(address to) external returns (uint256); + + // /// @notice Function to mint token. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted NFT + // /// @dev EVM selector for this function is: 0x40c10f19, + // /// or in textual repr: mint(address,uint256) + // function mint(address to, uint256 tokenId) external returns (bool); + + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x45c17782, + /// or in textual repr: mintWithTokenURI(address,string) + function mintWithTokenURI(address to, string memory tokenUri) external returns (uint256); + + // /// @notice Function to mint token with the given tokenUri. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted NFT + // /// @param tokenUri Token URI that would be stored in the NFT properties + // /// @dev EVM selector for this function is: 0x50bb4e7f, + // /// or in textual repr: mintWithTokenURI(address,uint256,string) + // function mintWithTokenURI(address to, uint256 tokenId, string memory tokenUri) external returns (bool); + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x7d64bcb4, + /// or in textual repr: finishMinting() + function finishMinting() external returns (bool); +} - // Selector: setCollectionSponsor(address) 7623402e - function setCollectionSponsor(address sponsor) external; +/// @title Unique extensions for ERC721. +/// @dev the ERC-165 identifier for this interface is 0x4468500d +interface ERC721UniqueExtensions is Dummy, ERC165 { + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() external view returns (string memory); - // Selector: confirmCollectionSponsorship() 3c50e97a - function confirmCollectionSponsorship() external; + /// @notice An abbreviated name for NFTs in this contract + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() external view returns (string memory); - // Selector: setCollectionLimit(string,uint32) 6a3841db - function setCollectionLimit(string memory limit, uint32 value) external; + /// @notice Transfer ownership of an NFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 tokenId) external; - // Selector: setCollectionLimit(string,bool) 993b7fba - function setCollectionLimit(string memory limit, bool value) external; + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 tokenId) external; - // Selector: contractAddress() f6b4dfb4 - function contractAddress() external view returns (address); + /// @notice Returns next free NFT ID. + /// @dev EVM selector for this function is: 0x75794a3c, + /// or in textual repr: nextTokenId() + function nextTokenId() external view returns (uint256); + // /// @notice Function to mint multiple tokens. + // /// @dev `tokenIds` should be an array of consecutive numbers and first number + // /// should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokenIds IDs of the minted NFTs + // /// @dev EVM selector for this function is: 0x44a9945e, + // /// or in textual repr: mintBulk(address,uint256[]) + // function mintBulk(address to, uint256[] memory tokenIds) external returns (bool); + + // /// @notice Function to mint multiple tokens with the given tokenUris. + // /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + // /// numbers and first number should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokens array of pairs of token ID and token URI for minted tokens + // /// @dev EVM selector for this function is: 0x36543006, + // /// or in textual repr: mintBulkWithTokenURI(address,(uint256,string)[]) + // function mintBulkWithTokenURI(address to, Tuple6[] memory tokens) external returns (bool); - // Selector: addCollectionAdminSubstrate(uint256) 5730062b - function addCollectionAdminSubstrate(uint256 newAdmin) external view; +} - // Selector: removeCollectionAdminSubstrate(uint256) 4048fcf9 - function removeCollectionAdminSubstrate(uint256 newAdmin) external view; +/// @dev anonymous struct +struct Tuple6 { + uint256 field_0; + string field_1; +} - // Selector: addCollectionAdmin(address) 92e462c7 - function addCollectionAdmin(address newAdmin) external view; +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x780e9d63 +interface ERC721Enumerable is Dummy, ERC165 { + /// @notice Enumerate valid NFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) + /// @dev EVM selector for this function is: 0x4f6ccce7, + /// or in textual repr: tokenByIndex(uint256) + function tokenByIndex(uint256 index) external view returns (uint256); - // Selector: removeCollectionAdmin(address) fafd7b42 - function removeCollectionAdmin(address admin) external view; + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x2f745c59, + /// or in textual repr: tokenOfOwnerByIndex(address,uint256) + function tokenOfOwnerByIndex(address owner, uint256 index) external view returns (uint256); - // Selector: setCollectionNesting(bool) 112d4586 - function setCollectionNesting(bool enable) external; + /// @notice Count NFTs tracked by this contract + /// @return A count of valid NFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() external view returns (uint256); +} - // Selector: setCollectionNesting(bool,address[]) 64872396 - function setCollectionNesting(bool enable, address[] memory collections) - external; +/// @dev inlined interface +interface ERC721Events { + event Transfer(address indexed from, address indexed to, uint256 indexed tokenId); + event Approval(address indexed owner, address indexed approved, uint256 indexed tokenId); + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); +} - // Selector: setCollectionAccess(uint8) 41835d4c - function setCollectionAccess(uint8 mode) external; +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +/// @dev the ERC-165 identifier for this interface is 0x80ac58cd +interface ERC721 is Dummy, ERC165, ERC721Events { + /// @notice Count all NFTs assigned to an owner + /// @dev NFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of NFTs owned by `owner`, possibly zero + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) external view returns (uint256); - // Selector: addToCollectionAllowList(address) 67844fe6 - function addToCollectionAllowList(address user) external view; + /// @notice Find the owner of an NFT + /// @dev NFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// @param tokenId The identifier for an NFT + /// @return The address of the owner of the NFT + /// @dev EVM selector for this function is: 0x6352211e, + /// or in textual repr: ownerOf(uint256) + function ownerOf(uint256 tokenId) external view returns (address); - // Selector: removeFromCollectionAllowList(address) 85c51acb - function removeFromCollectionAllowList(address user) external view; + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xb88d4fde, + /// or in textual repr: safeTransferFrom(address,address,uint256,bytes) + function safeTransferFrom( + address from, + address to, + uint256 tokenId, + bytes memory data + ) external; - // Selector: setCollectionMintMode(bool) 00018e84 - function setCollectionMintMode(bool mode) external; -} + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x42842e0e, + /// or in textual repr: safeTransferFrom(address,address,uint256) + function safeTransferFrom( + address from, + address to, + uint256 tokenId + ) external; -// Selector: d74d154f -interface ERC721UniqueExtensions is Dummy, ERC165 { - // Selector: transfer(address,uint256) a9059cbb - function transfer(address to, uint256 tokenId) external; + /// @notice Transfer ownership of an NFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this NFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid NFT. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 tokenId + ) external; - // Selector: burnFrom(address,uint256) 79cc6790 - function burnFrom(address from, uint256 tokenId) external; + /// @notice Set or reaffirm the approved address for an NFT + /// @dev The zero address indicates there is no approved address. + /// @dev Throws unless `msg.sender` is the current NFT owner, or an authorized + /// operator of the current owner. + /// @param approved The new approved NFT controller + /// @param tokenId The NFT to approve + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address approved, uint256 tokenId) external; - // Selector: nextTokenId() 75794a3c - function nextTokenId() external view returns (uint256); + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xa22cb465, + /// or in textual repr: setApprovalForAll(address,bool) + function setApprovalForAll(address operator, bool approved) external; - // Selector: mintBulk(address,uint256[]) 44a9945e - function mintBulk(address to, uint256[] memory tokenIds) - external - returns (bool); + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x081812fc, + /// or in textual repr: getApproved(uint256) + function getApproved(uint256 tokenId) external view returns (address); - // Selector: mintBulkWithTokenURI(address,(uint256,string)[]) 36543006 - function mintBulkWithTokenURI(address to, Tuple0[] memory tokens) - external - returns (bool); + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xe985e9c5, + /// or in textual repr: isApprovedForAll(address,address) + function isApprovedForAll(address owner, address operator) external view returns (address); } interface UniqueNFT is Dummy, ERC165, ERC721, - ERC721Metadata, ERC721Enumerable, ERC721UniqueExtensions, - ERC721Mintable, + ERC721UniqueMintable, ERC721Burnable, + ERC721Metadata, Collection, TokenProperties {} diff --git a/tests/src/eth/api/UniqueRefungible.sol b/tests/src/eth/api/UniqueRefungible.sol new file mode 100644 index 0000000000..7ffb9d302e --- /dev/null +++ b/tests/src/eth/api/UniqueRefungible.sol @@ -0,0 +1,545 @@ +// SPDX-License-Identifier: OTHER +// This code is automatically generated + +pragma solidity >=0.8.0 <0.9.0; + +/// @dev common stubs holder +interface Dummy { + +} + +interface ERC165 is Dummy { + function supportsInterface(bytes4 interfaceID) external view returns (bool); +} + +/// @title A contract that allows to set and delete token properties and change token property permissions. +/// @dev the ERC-165 identifier for this interface is 0x41369377 +interface TokenProperties is Dummy, ERC165 { + /// @notice Set permissions for token property. + /// @dev Throws error if `msg.sender` is not admin or owner of the collection. + /// @param key Property key. + /// @param isMutable Permission to mutate property. + /// @param collectionAdmin Permission to mutate property by collection admin if property is mutable. + /// @param tokenOwner Permission to mutate property by token owner if property is mutable. + /// @dev EVM selector for this function is: 0x222d97fa, + /// or in textual repr: setTokenPropertyPermission(string,bool,bool,bool) + function setTokenPropertyPermission( + string memory key, + bool isMutable, + bool collectionAdmin, + bool tokenOwner + ) external; + + /// @notice Set token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @param value Property value. + /// @dev EVM selector for this function is: 0x1752d67b, + /// or in textual repr: setProperty(uint256,string,bytes) + function setProperty( + uint256 tokenId, + string memory key, + bytes memory value + ) external; + + /// @notice Delete token property value. + /// @dev Throws error if `msg.sender` has no permission to edit the property. + /// @param tokenId ID of the token. + /// @param key Property key. + /// @dev EVM selector for this function is: 0x066111d1, + /// or in textual repr: deleteProperty(uint256,string) + function deleteProperty(uint256 tokenId, string memory key) external; + + /// @notice Get token property value. + /// @dev Throws error if key not found + /// @param tokenId ID of the token. + /// @param key Property key. + /// @return Property value bytes + /// @dev EVM selector for this function is: 0x7228c327, + /// or in textual repr: property(uint256,string) + function property(uint256 tokenId, string memory key) external view returns (bytes memory); +} + +/// @title A contract that allows you to work with collections. +/// @dev the ERC-165 identifier for this interface is 0x62e22290 +interface Collection is Dummy, ERC165 { + /// Set collection property. + /// + /// @param key Property key. + /// @param value Propery value. + /// @dev EVM selector for this function is: 0x2f073f66, + /// or in textual repr: setCollectionProperty(string,bytes) + function setCollectionProperty(string memory key, bytes memory value) external; + + /// Delete collection property. + /// + /// @param key Property key. + /// @dev EVM selector for this function is: 0x7b7debce, + /// or in textual repr: deleteCollectionProperty(string) + function deleteCollectionProperty(string memory key) external; + + /// Get collection property. + /// + /// @dev Throws error if key not found. + /// + /// @param key Property key. + /// @return bytes The property corresponding to the key. + /// @dev EVM selector for this function is: 0xcf24fd6d, + /// or in textual repr: collectionProperty(string) + function collectionProperty(string memory key) external view returns (bytes memory); + + /// Set the sponsor of the collection. + /// + /// @dev In order for sponsorship to work, it must be confirmed on behalf of the sponsor. + /// + /// @param sponsor Address of the sponsor from whose account funds will be debited for operations with the contract. + /// @dev EVM selector for this function is: 0x7623402e, + /// or in textual repr: setCollectionSponsor(address) + function setCollectionSponsor(address sponsor) external; + + /// Whether there is a pending sponsor. + /// @dev EVM selector for this function is: 0x058ac185, + /// or in textual repr: hasCollectionPendingSponsor() + function hasCollectionPendingSponsor() external view returns (bool); + + /// Collection sponsorship confirmation. + /// + /// @dev After setting the sponsor for the collection, it must be confirmed with this function. + /// @dev EVM selector for this function is: 0x3c50e97a, + /// or in textual repr: confirmCollectionSponsorship() + function confirmCollectionSponsorship() external; + + /// Remove collection sponsor. + /// @dev EVM selector for this function is: 0x6e0326a3, + /// or in textual repr: removeCollectionSponsor() + function removeCollectionSponsor() external; + + /// Get current sponsor. + /// + /// @return Tuble with sponsor address and his substrate mirror. If there is no confirmed sponsor error "Contract has no sponsor" throw. + /// @dev EVM selector for this function is: 0x6ec0a9f1, + /// or in textual repr: collectionSponsor() + function collectionSponsor() external view returns (Tuple17 memory); + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "accountTokenOwnershipLimit", + /// "sponsoredDataSize", + /// "sponsoredDataRateLimit", + /// "tokenLimit", + /// "sponsorTransferTimeout", + /// "sponsorApproveTimeout" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x6a3841db, + /// or in textual repr: setCollectionLimit(string,uint32) + function setCollectionLimit(string memory limit, uint32 value) external; + + /// Set limits for the collection. + /// @dev Throws error if limit not found. + /// @param limit Name of the limit. Valid names: + /// "ownerCanTransfer", + /// "ownerCanDestroy", + /// "transfersEnabled" + /// @param value Value of the limit. + /// @dev EVM selector for this function is: 0x993b7fba, + /// or in textual repr: setCollectionLimit(string,bool) + function setCollectionLimit(string memory limit, bool value) external; + + /// Get contract address. + /// @dev EVM selector for this function is: 0xf6b4dfb4, + /// or in textual repr: contractAddress() + function contractAddress() external view returns (address); + + /// Add collection admin. + /// @param newAdmin Address of the added administrator. + /// @dev EVM selector for this function is: 0x92e462c7, + /// or in textual repr: addCollectionAdmin(address) + function addCollectionAdmin(address newAdmin) external; + + /// Remove collection admin. + /// + /// @param admin Address of the removed administrator. + /// @dev EVM selector for this function is: 0xfafd7b42, + /// or in textual repr: removeCollectionAdmin(address) + function removeCollectionAdmin(address admin) external; + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: 'Owner' else to nesting: 'Disabled' + /// @dev EVM selector for this function is: 0x112d4586, + /// or in textual repr: setCollectionNesting(bool) + function setCollectionNesting(bool enable) external; + + /// Toggle accessibility of collection nesting. + /// + /// @param enable If "true" degenerates to nesting: {OwnerRestricted: [1, 2, 3]} else to nesting: 'Disabled' + /// @param collections Addresses of collections that will be available for nesting. + /// @dev EVM selector for this function is: 0x64872396, + /// or in textual repr: setCollectionNesting(bool,address[]) + function setCollectionNesting(bool enable, address[] memory collections) external; + + /// Set the collection access method. + /// @param mode Access mode + /// 0 for Normal + /// 1 for AllowList + /// @dev EVM selector for this function is: 0x41835d4c, + /// or in textual repr: setCollectionAccess(uint8) + function setCollectionAccess(uint8 mode) external; + + /// Checks that user allowed to operate with collection. + /// + /// @param user User address to check. + /// @dev EVM selector for this function is: 0xd63a8e11, + /// or in textual repr: allowed(address) + function allowed(address user) external view returns (bool); + + /// Add the user to the allowed list. + /// + /// @param user Address of a trusted user. + /// @dev EVM selector for this function is: 0x67844fe6, + /// or in textual repr: addToCollectionAllowList(address) + function addToCollectionAllowList(address user) external; + + /// Remove the user from the allowed list. + /// + /// @param user Address of a removed user. + /// @dev EVM selector for this function is: 0x85c51acb, + /// or in textual repr: removeFromCollectionAllowList(address) + function removeFromCollectionAllowList(address user) external; + + /// Switch permission for minting. + /// + /// @param mode Enable if "true". + /// @dev EVM selector for this function is: 0x00018e84, + /// or in textual repr: setCollectionMintMode(bool) + function setCollectionMintMode(bool mode) external; + + /// Check that account is the owner or admin of the collection + /// + /// @param user account to verify + /// @return "true" if account is the owner or admin + /// @dev EVM selector for this function is: 0x9811b0c7, + /// or in textual repr: isOwnerOrAdmin(address) + function isOwnerOrAdmin(address user) external view returns (bool); + + /// Returns collection type + /// + /// @return `Fungible` or `NFT` or `ReFungible` + /// @dev EVM selector for this function is: 0xd34b55b8, + /// or in textual repr: uniqueCollectionType() + function uniqueCollectionType() external view returns (string memory); + + /// Get collection owner. + /// + /// @return Tuble with sponsor address and his substrate mirror. + /// If address is canonical then substrate mirror is zero and vice versa. + /// @dev EVM selector for this function is: 0xdf727d3b, + /// or in textual repr: collectionOwner() + function collectionOwner() external view returns (Tuple17 memory); + + /// Changes collection owner to another account + /// + /// @dev Owner can be changed only by current owner + /// @param newOwner new owner account + /// @dev EVM selector for this function is: 0x4f53e226, + /// or in textual repr: changeCollectionOwner(address) + function changeCollectionOwner(address newOwner) external; +} + +/// @dev anonymous struct +struct Tuple17 { + address field_0; + uint256 field_1; +} + +/// @dev the ERC-165 identifier for this interface is 0x5b5e139f +interface ERC721Metadata is Dummy, ERC165 { + // /// @notice A descriptive name for a collection of NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x06fdde03, + // /// or in textual repr: name() + // function name() external view returns (string memory); + + // /// @notice An abbreviated name for NFTs in this contract + // /// @dev real implementation of this function lies in `ERC721UniqueExtensions` + // /// @dev EVM selector for this function is: 0x95d89b41, + // /// or in textual repr: symbol() + // function symbol() external view returns (string memory); + + /// @notice A distinct Uniform Resource Identifier (URI) for a given asset. + /// + /// @dev If the token has a `url` property and it is not empty, it is returned. + /// Else If the collection does not have a property with key `schemaName` or its value is not equal to `ERC721Metadata`, it return an error `tokenURI not set`. + /// If the collection property `baseURI` is empty or absent, return "" (empty string) + /// otherwise, if token property `suffix` present and is non-empty, return concatenation of baseURI and suffix + /// otherwise, return concatenation of `baseURI` and stringified token id (decimal stringifying, without paddings). + /// + /// @return token's const_metadata + /// @dev EVM selector for this function is: 0xc87b56dd, + /// or in textual repr: tokenURI(uint256) + function tokenURI(uint256 tokenId) external view returns (string memory); +} + +/// @title ERC721 Token that can be irreversibly burned (destroyed). +/// @dev the ERC-165 identifier for this interface is 0x42966c68 +interface ERC721Burnable is Dummy, ERC165 { + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current RFT owner, or an authorized + /// operator of the current owner. + /// @param tokenId The RFT to approve + /// @dev EVM selector for this function is: 0x42966c68, + /// or in textual repr: burn(uint256) + function burn(uint256 tokenId) external; +} + +/// @dev inlined interface +interface ERC721UniqueMintableEvents { + event MintingFinished(); +} + +/// @title ERC721 minting logic. +/// @dev the ERC-165 identifier for this interface is 0x476ff149 +interface ERC721UniqueMintable is Dummy, ERC165, ERC721UniqueMintableEvents { + /// @dev EVM selector for this function is: 0x05d2035b, + /// or in textual repr: mintingFinished() + function mintingFinished() external view returns (bool); + + /// @notice Function to mint token. + /// @param to The new owner + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x6a627842, + /// or in textual repr: mint(address) + function mint(address to) external returns (uint256); + + // /// @notice Function to mint token. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted RFT + // /// @dev EVM selector for this function is: 0x40c10f19, + // /// or in textual repr: mint(address,uint256) + // function mint(address to, uint256 tokenId) external returns (bool); + + /// @notice Function to mint token with the given tokenUri. + /// @param to The new owner + /// @param tokenUri Token URI that would be stored in the NFT properties + /// @return uint256 The id of the newly minted token + /// @dev EVM selector for this function is: 0x45c17782, + /// or in textual repr: mintWithTokenURI(address,string) + function mintWithTokenURI(address to, string memory tokenUri) external returns (uint256); + + // /// @notice Function to mint token with the given tokenUri. + // /// @dev `tokenId` should be obtained with `nextTokenId` method, + // /// unlike standard, you can't specify it manually + // /// @param to The new owner + // /// @param tokenId ID of the minted RFT + // /// @param tokenUri Token URI that would be stored in the RFT properties + // /// @dev EVM selector for this function is: 0x50bb4e7f, + // /// or in textual repr: mintWithTokenURI(address,uint256,string) + // function mintWithTokenURI(address to, uint256 tokenId, string memory tokenUri) external returns (bool); + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x7d64bcb4, + /// or in textual repr: finishMinting() + function finishMinting() external returns (bool); +} + +/// @title Unique extensions for ERC721. +/// @dev the ERC-165 identifier for this interface is 0xef1eaacb +interface ERC721UniqueExtensions is Dummy, ERC165 { + /// @notice A descriptive name for a collection of NFTs in this contract + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() external view returns (string memory); + + /// @notice An abbreviated name for NFTs in this contract + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() external view returns (string memory); + + /// @notice Transfer ownership of an RFT + /// @dev Throws unless `msg.sender` is the current owner. Throws if `to` + /// is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param to The new owner + /// @param tokenId The RFT to transfer + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 tokenId) external; + + /// @notice Burns a specific ERC721 token. + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the RFT + /// @param tokenId The RFT to transfer + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 tokenId) external; + + /// @notice Returns next free RFT ID. + /// @dev EVM selector for this function is: 0x75794a3c, + /// or in textual repr: nextTokenId() + function nextTokenId() external view returns (uint256); + + // /// @notice Function to mint multiple tokens. + // /// @dev `tokenIds` should be an array of consecutive numbers and first number + // /// should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokenIds IDs of the minted RFTs + // /// @dev EVM selector for this function is: 0x44a9945e, + // /// or in textual repr: mintBulk(address,uint256[]) + // function mintBulk(address to, uint256[] memory tokenIds) external returns (bool); + + // /// @notice Function to mint multiple tokens with the given tokenUris. + // /// @dev `tokenIds` is array of pairs of token ID and token URI. Token IDs should be consecutive + // /// numbers and first number should be obtained with `nextTokenId` method + // /// @param to The new owner + // /// @param tokens array of pairs of token ID and token URI for minted tokens + // /// @dev EVM selector for this function is: 0x36543006, + // /// or in textual repr: mintBulkWithTokenURI(address,(uint256,string)[]) + // function mintBulkWithTokenURI(address to, Tuple6[] memory tokens) external returns (bool); + + /// Returns EVM address for refungible token + /// + /// @param token ID of the token + /// @dev EVM selector for this function is: 0xab76fac6, + /// or in textual repr: tokenContractAddress(uint256) + function tokenContractAddress(uint256 token) external view returns (address); +} + +/// @dev anonymous struct +struct Tuple6 { + uint256 field_0; + string field_1; +} + +/// @title ERC-721 Non-Fungible Token Standard, optional enumeration extension +/// @dev See https://eips.ethereum.org/EIPS/eip-721 +/// @dev the ERC-165 identifier for this interface is 0x780e9d63 +interface ERC721Enumerable is Dummy, ERC165 { + /// @notice Enumerate valid RFTs + /// @param index A counter less than `totalSupply()` + /// @return The token identifier for the `index`th NFT, + /// (sort order not specified) + /// @dev EVM selector for this function is: 0x4f6ccce7, + /// or in textual repr: tokenByIndex(uint256) + function tokenByIndex(uint256 index) external view returns (uint256); + + /// Not implemented + /// @dev EVM selector for this function is: 0x2f745c59, + /// or in textual repr: tokenOfOwnerByIndex(address,uint256) + function tokenOfOwnerByIndex(address owner, uint256 index) external view returns (uint256); + + /// @notice Count RFTs tracked by this contract + /// @return A count of valid RFTs tracked by this contract, where each one of + /// them has an assigned and queryable owner not equal to the zero address + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() external view returns (uint256); +} + +/// @dev inlined interface +interface ERC721Events { + event Transfer(address indexed from, address indexed to, uint256 indexed tokenId); + event Approval(address indexed owner, address indexed approved, uint256 indexed tokenId); + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); +} + +/// @title ERC-721 Non-Fungible Token Standard +/// @dev See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md +/// @dev the ERC-165 identifier for this interface is 0x58800161 +interface ERC721 is Dummy, ERC165, ERC721Events { + /// @notice Count all RFTs assigned to an owner + /// @dev RFTs assigned to the zero address are considered invalid, and this + /// function throws for queries about the zero address. + /// @param owner An address for whom to query the balance + /// @return The number of RFTs owned by `owner`, possibly zero + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) external view returns (uint256); + + /// @notice Find the owner of an RFT + /// @dev RFTs assigned to zero address are considered invalid, and queries + /// about them do throw. + /// Returns special 0xffffffffffffffffffffffffffffffffffffffff address for + /// the tokens that are partially owned. + /// @param tokenId The identifier for an RFT + /// @return The address of the owner of the RFT + /// @dev EVM selector for this function is: 0x6352211e, + /// or in textual repr: ownerOf(uint256) + function ownerOf(uint256 tokenId) external view returns (address); + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x60a11672, + /// or in textual repr: safeTransferFromWithData(address,address,uint256,bytes) + function safeTransferFromWithData( + address from, + address to, + uint256 tokenId, + bytes memory data + ) external; + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x42842e0e, + /// or in textual repr: safeTransferFrom(address,address,uint256) + function safeTransferFrom( + address from, + address to, + uint256 tokenId + ) external; + + /// @notice Transfer ownership of an RFT -- THE CALLER IS RESPONSIBLE + /// TO CONFIRM THAT `to` IS CAPABLE OF RECEIVING NFTS OR ELSE + /// THEY MAY BE PERMANENTLY LOST + /// @dev Throws unless `msg.sender` is the current owner or an authorized + /// operator for this RFT. Throws if `from` is not the current owner. Throws + /// if `to` is the zero address. Throws if `tokenId` is not a valid RFT. + /// Throws if RFT pieces have multiple owners. + /// @param from The current owner of the NFT + /// @param to The new owner + /// @param tokenId The NFT to transfer + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 tokenId + ) external; + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address approved, uint256 tokenId) external; + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xa22cb465, + /// or in textual repr: setApprovalForAll(address,bool) + function setApprovalForAll(address operator, bool approved) external; + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0x081812fc, + /// or in textual repr: getApproved(uint256) + function getApproved(uint256 tokenId) external view returns (address); + + /// @dev Not implemented + /// @dev EVM selector for this function is: 0xe985e9c5, + /// or in textual repr: isApprovedForAll(address,address) + function isApprovedForAll(address owner, address operator) external view returns (address); +} + +interface UniqueRefungible is + Dummy, + ERC165, + ERC721, + ERC721Enumerable, + ERC721UniqueExtensions, + ERC721UniqueMintable, + ERC721Burnable, + ERC721Metadata, + Collection, + TokenProperties +{} diff --git a/tests/src/eth/api/UniqueRefungibleToken.sol b/tests/src/eth/api/UniqueRefungibleToken.sol new file mode 100644 index 0000000000..eb5058127f --- /dev/null +++ b/tests/src/eth/api/UniqueRefungibleToken.sol @@ -0,0 +1,122 @@ +// SPDX-License-Identifier: OTHER +// This code is automatically generated + +pragma solidity >=0.8.0 <0.9.0; + +/// @dev common stubs holder +interface Dummy { + +} + +interface ERC165 is Dummy { + function supportsInterface(bytes4 interfaceID) external view returns (bool); +} + +/// @dev the ERC-165 identifier for this interface is 0x5755c3f2 +interface ERC1633 is Dummy, ERC165 { + /// @dev EVM selector for this function is: 0x80a54001, + /// or in textual repr: parentToken() + function parentToken() external view returns (address); + + /// @dev EVM selector for this function is: 0xd7f083f3, + /// or in textual repr: parentTokenId() + function parentTokenId() external view returns (uint256); +} + +/// @dev the ERC-165 identifier for this interface is 0xab8deb37 +interface ERC20UniqueExtensions is Dummy, ERC165 { + /// @dev Function that burns an amount of the token of a given account, + /// deducting from the sender's allowance for said account. + /// @param from The account whose tokens will be burnt. + /// @param amount The amount that will be burnt. + /// @dev EVM selector for this function is: 0x79cc6790, + /// or in textual repr: burnFrom(address,uint256) + function burnFrom(address from, uint256 amount) external returns (bool); + + /// @dev Function that changes total amount of the tokens. + /// Throws if `msg.sender` doesn't owns all of the tokens. + /// @param amount New total amount of the tokens. + /// @dev EVM selector for this function is: 0xd2418ca7, + /// or in textual repr: repartition(uint256) + function repartition(uint256 amount) external returns (bool); +} + +/// @dev inlined interface +interface ERC20Events { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +/// @title Standard ERC20 token +/// +/// @dev Implementation of the basic standard token. +/// https://github.com/ethereum/EIPs/blob/master/EIPS/eip-20.md +/// @dev the ERC-165 identifier for this interface is 0x942e8b22 +interface ERC20 is Dummy, ERC165, ERC20Events { + /// @return the name of the token. + /// @dev EVM selector for this function is: 0x06fdde03, + /// or in textual repr: name() + function name() external view returns (string memory); + + /// @return the symbol of the token. + /// @dev EVM selector for this function is: 0x95d89b41, + /// or in textual repr: symbol() + function symbol() external view returns (string memory); + + /// @dev Total number of tokens in existence + /// @dev EVM selector for this function is: 0x18160ddd, + /// or in textual repr: totalSupply() + function totalSupply() external view returns (uint256); + + /// @dev Not supported + /// @dev EVM selector for this function is: 0x313ce567, + /// or in textual repr: decimals() + function decimals() external view returns (uint8); + + /// @dev Gets the balance of the specified address. + /// @param owner The address to query the balance of. + /// @return An uint256 representing the amount owned by the passed address. + /// @dev EVM selector for this function is: 0x70a08231, + /// or in textual repr: balanceOf(address) + function balanceOf(address owner) external view returns (uint256); + + /// @dev Transfer token for a specified address + /// @param to The address to transfer to. + /// @param amount The amount to be transferred. + /// @dev EVM selector for this function is: 0xa9059cbb, + /// or in textual repr: transfer(address,uint256) + function transfer(address to, uint256 amount) external returns (bool); + + /// @dev Transfer tokens from one address to another + /// @param from address The address which you want to send tokens from + /// @param to address The address which you want to transfer to + /// @param amount uint256 the amount of tokens to be transferred + /// @dev EVM selector for this function is: 0x23b872dd, + /// or in textual repr: transferFrom(address,address,uint256) + function transferFrom( + address from, + address to, + uint256 amount + ) external returns (bool); + + /// @dev Approve the passed address to spend the specified amount of tokens on behalf of `msg.sender`. + /// Beware that changing an allowance with this method brings the risk that someone may use both the old + /// and the new allowance by unfortunate transaction ordering. One possible solution to mitigate this + /// race condition is to first reduce the spender's allowance to 0 and set the desired value afterwards: + /// https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + /// @param spender The address which will spend the funds. + /// @param amount The amount of tokens to be spent. + /// @dev EVM selector for this function is: 0x095ea7b3, + /// or in textual repr: approve(address,uint256) + function approve(address spender, uint256 amount) external returns (bool); + + /// @dev Function to check the amount of tokens that an owner allowed to a spender. + /// @param owner address The address which owns the funds. + /// @param spender address The address which will spend the funds. + /// @return A uint256 specifying the amount of tokens still available for the spender. + /// @dev EVM selector for this function is: 0xdd62ed3e, + /// or in textual repr: allowance(address,address) + function allowance(address owner, address spender) external view returns (uint256); +} + +interface UniqueRefungibleToken is Dummy, ERC165, ERC20, ERC20UniqueExtensions, ERC1633 {} diff --git a/tests/src/eth/base.test.ts b/tests/src/eth/base.test.ts index 3fff286af8..5a2287181e 100644 --- a/tests/src/eth/base.test.ts +++ b/tests/src/eth/base.test.ts @@ -14,59 +14,56 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import { - collectionIdToAddress, - createEthAccount, - createEthAccountWithBalance, - deployFlipper, - ethBalanceViaSub, - GAS_ARGS, - itWeb3, - recordEthFee, - usingWeb3, -} from './util/helpers'; -import {expect} from 'chai'; -import {createCollectionExpectSuccess, createItemExpectSuccess, UNIQUE} from '../util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import {Contract} from 'web3-eth-contract'; -import Web3 from 'web3'; +import {IKeyringPair} from '@polkadot/types/types'; +import {EthUniqueHelper, itEth, usingEthPlaygrounds, expect} from './util'; + describe('Contract calls', () => { - itWeb3('Call of simple contract fee is less than 0.2 UNQ', async ({web3, api, privateKeyWrapper}) => { - const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, deployer); + let donor: IKeyringPair; - const cost = await recordEthFee(api, deployer, () => flipper.methods.flip().send({from: deployer})); - expect(cost < BigInt(0.2 * Number(UNIQUE))).to.be.true; + before(async function () { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); }); - itWeb3('Balance transfer fee is less than 0.2 UNQ', async ({web3, api, privateKeyWrapper}) => { - const userA = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const userB = createEthAccount(web3); + itEth('Call of simple contract fee is less than 0.2 UNQ', async ({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const flipper = await helper.eth.deployFlipper(deployer); - const cost = await recordEthFee(api, userA, () => web3.eth.sendTransaction({from: userA, to: userB, value: '1000000', ...GAS_ARGS})); - const balanceB = await ethBalanceViaSub(api, userB); - expect(cost - balanceB < BigInt(0.2 * Number(UNIQUE))).to.be.true; + const cost = await helper.eth.calculateFee({Ethereum: deployer}, () => flipper.methods.flip().send({from: deployer})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))).to.be.true; }); - itWeb3('NFT transfer is close to 0.15 UNQ', async ({web3, api, privateKeyWrapper}) => { - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); + itEth('Balance transfer fee is less than 0.2 UNQ', async ({helper}) => { + const userA = await helper.eth.createAccountWithBalance(donor); + const userB = helper.eth.createAccount(); + const cost = await helper.eth.calculateFee({Ethereum: userA}, () => helper.getWeb3().eth.sendTransaction({ + from: userA, + to: userB, + value: '1000000', + gas: helper.eth.DEFAULT_GAS, + })); + const balanceB = await helper.balance.getEthereum(userB); + expect(cost - balanceB < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))).to.be.true; + }); - const alice = privateKeyWrapper('//Alice'); - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const itemId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); + itEth('NFT transfer is close to 0.15 UNQ', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const [alice] = await helper.arrange.createAccounts([10n], donor); + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: caller}); - const cost = await recordEthFee(api, caller, () => contract.methods.transfer(receiver, itemId).send(caller)); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); - const fee = Number(cost) / Number(UNIQUE); + const cost = await helper.eth.calculateFee({Ethereum: caller}, () => contract.methods.transfer(receiver, tokenId).send(caller)); + + const fee = Number(cost) / Number(helper.balance.getOneTokenNominal()); const expectedFee = 0.15; - const tolerance = 0.00002; + const tolerance = 0.001; expect(Math.abs(fee - expectedFee)).to.be.lessThan(tolerance); }); @@ -75,49 +72,59 @@ describe('Contract calls', () => { describe('ERC165 tests', async () => { // https://eips.ethereum.org/EIPS/eip-165 - let collection: number; + let erc721MetadataCompatibleNftCollectionId: number; + let simpleNftCollectionId: number; let minter: string; - function contract(web3: Web3): Contract { - return new web3.eth.Contract(nonFungibleAbi as any, collectionIdToAddress(collection), {from: minter, ...GAS_ARGS}); + const BASE_URI = 'base/'; + + async function checkInterface(helper: EthUniqueHelper, interfaceId: string, simpleResult: boolean, compatibleResult: boolean) { + const simple = helper.ethNativeContract.collection(helper.ethAddress.fromCollectionId(simpleNftCollectionId), 'nft', minter); + const compatible = helper.ethNativeContract.collection(helper.ethAddress.fromCollectionId(erc721MetadataCompatibleNftCollectionId), 'nft', minter); + + expect(await simple.methods.supportsInterface(interfaceId).call()).to.equal(simpleResult, `empty (not ERC721Metadata compatible) NFT collection returns not ${simpleResult}`); + expect(await compatible.methods.supportsInterface(interfaceId).call()).to.equal(compatibleResult, `ERC721Metadata compatible NFT collection returns not ${compatibleResult}`); } before(async () => { - await usingWeb3 (async (web3) => { - collection = await createCollectionExpectSuccess(); - minter = createEthAccount(web3); + await usingEthPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + const [alice] = await helper.arrange.createAccounts([10n], donor); + ({collectionId: simpleNftCollectionId} = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'})); + minter = await helper.eth.createAccountWithBalance(donor); + ({collectionId: erc721MetadataCompatibleNftCollectionId} = await helper.eth.createERC721MetadataCompatibleNFTCollection(minter, 'n', 'd', 'p', BASE_URI)); }); }); - itWeb3('interfaceID == 0xffffffff always false', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0xffffffff').call()).to.be.false; + itEth('nonexistent interfaceID - 0xffffffff - always false', async ({helper}) => { + await checkInterface(helper, '0xffffffff', false, false); }); - itWeb3('ERC721 support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x58800161').call()).to.be.true; + itEth('ERC721 - 0x780e9d63 - support', async ({helper}) => { + await checkInterface(helper, '0x780e9d63', true, true); }); - itWeb3('ERC721Metadata support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x5b5e139f').call()).to.be.true; + itEth('ERC721Metadata - 0x5b5e139f - support', async ({helper}) => { + await checkInterface(helper, '0x5b5e139f', false, true); }); - itWeb3('ERC721Mintable support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x68ccfe89').call()).to.be.true; + itEth('ERC721UniqueMintable - 0x476ff149 - support', async ({helper}) => { + await checkInterface(helper, '0x476ff149', true, true); }); - itWeb3('ERC721Enumerable support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x780e9d63').call()).to.be.true; + itEth('ERC721Enumerable - 0x780e9d63 - support', async ({helper}) => { + await checkInterface(helper, '0x780e9d63', true, true); }); - itWeb3('ERC721UniqueExtensions support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0xd74d154f').call()).to.be.true; + itEth('ERC721UniqueExtensions - 0x4468500d - support', async ({helper}) => { + await checkInterface(helper, '0x4468500d', true, true); }); - itWeb3('ERC721Burnable support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x42966c68').call()).to.be.true; + itEth('ERC721Burnable - 0x42966c68 - support', async ({helper}) => { + await checkInterface(helper, '0x42966c68', true, true); }); - itWeb3('ERC165 support', async ({web3}) => { - expect(await contract(web3).methods.supportsInterface('0x01ffc9a7').call()).to.be.true; + itEth('ERC165 - 0x01ffc9a7 - support', async ({helper}) => { + await checkInterface(helper, '0x01ffc9a7', true, true); }); }); diff --git a/tests/src/eth/collectionAdmin.test.ts b/tests/src/eth/collectionAdmin.test.ts index 7085867636..f30b2b2222 100644 --- a/tests/src/eth/collectionAdmin.test.ts +++ b/tests/src/eth/collectionAdmin.test.ts @@ -13,207 +13,194 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import privateKey from '../substrate/privateKey'; -import { - createEthAccount, - createEthAccountWithBalance, - evmCollection, - evmCollectionHelpers, - getCollectionAddressFromResult, - itWeb3, -} from './util/helpers'; +import {IKeyringPair} from '@polkadot/types/types'; +import {usingEthPlaygrounds, itEth, expect, EthUniqueHelper} from './util'; + +async function recordEthFee(helper: EthUniqueHelper, userAddress: string, call: () => Promise) { + const before = await helper.balance.getSubstrate(helper.address.ethToSubstrate(userAddress)); + await call(); + await helper.wait.newBlocks(1); + const after = await helper.balance.getSubstrate(helper.address.ethToSubstrate(userAddress)); + + expect(after < before).to.be.true; + + return before - after; +} describe('Add collection admins', () => { - itWeb3('Add admin by owner', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const newAdmin = await createEthAccount(web3); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Add admin by owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const newAdmin = helper.eth.createAccount(); + await collectionEvm.methods.addCollectionAdmin(newAdmin).send(); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList[0].asEthereum.toString().toLocaleLowerCase()) .to.be.eq(newAdmin.toLocaleLowerCase()); }); - itWeb3('Add substrate admin by owner', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const newAdmin = privateKeyWrapper('//Alice'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth.skip('Add substrate admin by owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const [newAdmin] = await helper.arrange.createAccounts([10n], donor); await collectionEvm.methods.addCollectionAdminSubstrate(newAdmin.addressRaw).send(); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList[0].asSubstrate.toString().toLocaleLowerCase()) .to.be.eq(newAdmin.address.toLocaleLowerCase()); }); - itWeb3('(!negative tests!) Add admin by ADMIN is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const admin = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth('Verify owner or admin', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const newAdmin = helper.eth.createAccount(); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + expect(await collectionEvm.methods.isOwnerOrAdmin(newAdmin).call()).to.be.false; + await collectionEvm.methods.addCollectionAdmin(newAdmin).send(); + expect(await collectionEvm.methods.isOwnerOrAdmin(newAdmin).call()).to.be.true; + }); + + itEth('(!negative tests!) Add admin by ADMIN is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const admin = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdmin(admin).send(); - - const user = await createEthAccount(web3); + + const user = helper.eth.createAccount(); await expect(collectionEvm.methods.addCollectionAdmin(user).call({from: admin})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(1); expect(adminList[0].asEthereum.toString().toLocaleLowerCase()) .to.be.eq(admin.toLocaleLowerCase()); }); - itWeb3('(!negative tests!) Add admin by USER is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const notAdmin = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - - const user = await createEthAccount(web3); + itEth('(!negative tests!) Add admin by USER is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const notAdmin = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const user = helper.eth.createAccount(); await expect(collectionEvm.methods.addCollectionAdmin(user).call({from: notAdmin})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(0); }); - itWeb3('(!negative tests!) Add substrate admin by ADMIN is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const admin = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth.skip('(!negative tests!) Add substrate admin by ADMIN is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const admin = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdmin(admin).send(); - const notAdmin = privateKey('//Alice'); + const [notAdmin] = await helper.arrange.createAccounts([10n], donor); await expect(collectionEvm.methods.addCollectionAdminSubstrate(notAdmin.addressRaw).call({from: admin})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(1); expect(adminList[0].asEthereum.toString().toLocaleLowerCase()) .to.be.eq(admin.toLocaleLowerCase()); }); - - itWeb3('(!negative tests!) Add substrate admin by USER is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const notAdmin0 = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - const notAdmin1 = privateKey('//Alice'); + + itEth.skip('(!negative tests!) Add substrate admin by USER is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const notAdmin0 = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + const [notAdmin1] = await helper.arrange.createAccounts([10n], donor); await expect(collectionEvm.methods.addCollectionAdminSubstrate(notAdmin1.addressRaw).call({from: notAdmin0})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(0); }); }); describe('Remove collection admins', () => { - itWeb3('Remove admin by owner', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const newAdmin = await createEthAccount(web3); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Remove admin by owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const newAdmin = helper.eth.createAccount(); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdmin(newAdmin).send(); + { - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(1); expect(adminList[0].asEthereum.toString().toLocaleLowerCase()) .to.be.eq(newAdmin.toLocaleLowerCase()); } await collectionEvm.methods.removeCollectionAdmin(newAdmin).send(); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(0); }); - itWeb3('Remove substrate admin by owner', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const newAdmin = privateKeyWrapper('//Alice'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth.skip('Remove substrate admin by owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const [newAdmin] = await helper.arrange.createAccounts([10n], donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdminSubstrate(newAdmin.addressRaw).send(); { - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList[0].asSubstrate.toString().toLocaleLowerCase()) .to.be.eq(newAdmin.address.toLocaleLowerCase()); } - + await collectionEvm.methods.removeCollectionAdminSubstrate(newAdmin.addressRaw).send(); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(0); }); - itWeb3('(!negative tests!) Remove admin by ADMIN is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); + itEth('(!negative tests!) Remove admin by ADMIN is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); - const admin0 = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const admin0 = await helper.eth.createAccountWithBalance(donor); await collectionEvm.methods.addCollectionAdmin(admin0).send(); - const admin1 = await createEthAccount(web3); + const admin1 = await helper.eth.createAccountWithBalance(donor); await collectionEvm.methods.addCollectionAdmin(admin1).send(); await expect(collectionEvm.methods.removeCollectionAdmin(admin1).call({from: admin0})) .to.be.rejectedWith('NoPermission'); { - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(2); expect(adminList.toString().toLocaleLowerCase()) .to.be.deep.contains(admin0.toLocaleLowerCase()) @@ -221,76 +208,150 @@ describe('Remove collection admins', () => { } }); - itWeb3('(!negative tests!) Remove admin by USER is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); + itEth('(!negative tests!) Remove admin by USER is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); - const admin = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const admin = await helper.eth.createAccountWithBalance(donor); await collectionEvm.methods.addCollectionAdmin(admin).send(); - const notAdmin = await createEthAccount(web3); + const notAdmin = helper.eth.createAccount(); await expect(collectionEvm.methods.removeCollectionAdmin(admin).call({from: notAdmin})) .to.be.rejectedWith('NoPermission'); { - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList[0].asEthereum.toString().toLocaleLowerCase()) .to.be.eq(admin.toLocaleLowerCase()); expect(adminList.length).to.be.eq(1); } }); - itWeb3('(!negative tests!) Remove substrate admin by ADMIN is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const adminSub = privateKeyWrapper('//Alice'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth.skip('(!negative tests!) Remove substrate admin by ADMIN is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const [adminSub] = await helper.arrange.createAccounts([10n], donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdminSubstrate(adminSub.addressRaw).send(); - const adminEth = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const adminEth = await helper.eth.createAccountWithBalance(donor); await collectionEvm.methods.addCollectionAdmin(adminEth).send(); await expect(collectionEvm.methods.removeCollectionAdminSubstrate(adminSub.addressRaw).call({from: adminEth})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(2); expect(adminList.toString().toLocaleLowerCase()) .to.be.deep.contains(adminSub.address.toLocaleLowerCase()) .to.be.deep.contains(adminEth.toLocaleLowerCase()); }); - itWeb3('(!negative tests!) Remove substrate admin by USER is not allowed', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const adminSub = privateKeyWrapper('//Alice'); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + itEth.skip('(!negative tests!) Remove substrate admin by USER is not allowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const [adminSub] = await helper.arrange.createAccounts([10n], donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await collectionEvm.methods.addCollectionAdminSubstrate(adminSub.addressRaw).send(); - const notAdminEth = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const notAdminEth = await helper.eth.createAccountWithBalance(donor); await expect(collectionEvm.methods.removeCollectionAdminSubstrate(adminSub.addressRaw).call({from: notAdminEth})) .to.be.rejectedWith('NoPermission'); - const adminList = await api.rpc.unique.adminlist(collectionId); + const adminList = await helper.callRpc('api.rpc.unique.adminlist', [collectionId]); expect(adminList.length).to.be.eq(1); expect(adminList[0].asSubstrate.toString().toLocaleLowerCase()) .to.be.eq(adminSub.address.toLocaleLowerCase()); }); -}); \ No newline at end of file +}); + +describe('Change owner tests', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Change owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const newOwner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + await collectionEvm.methods.changeCollectionOwner(newOwner).send(); + + expect(await collectionEvm.methods.isOwnerOrAdmin(owner).call()).to.be.false; + expect(await collectionEvm.methods.isOwnerOrAdmin(newOwner).call()).to.be.true; + }); + + itEth('change owner call fee', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const newOwner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + const cost = await recordEthFee(helper, owner, () => collectionEvm.methods.changeCollectionOwner(newOwner).send()); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + expect(cost > 0); + }); + + itEth('(!negative tests!) call setOwner by non owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const newOwner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + await expect(collectionEvm.methods.changeCollectionOwner(newOwner).send({from: newOwner})).to.be.rejected; + expect(await collectionEvm.methods.isOwnerOrAdmin(newOwner).call()).to.be.false; + }); +}); + +describe('Change substrate owner tests', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth.skip('Change owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const [newOwner] = await helper.arrange.createAccounts([10n], donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + expect(await collectionEvm.methods.isOwnerOrAdmin(owner).call()).to.be.true; + expect(await collectionEvm.methods.isOwnerOrAdminSubstrate(newOwner.addressRaw).call()).to.be.false; + + await collectionEvm.methods.setOwnerSubstrate(newOwner.addressRaw).send(); + + expect(await collectionEvm.methods.isOwnerOrAdmin(owner).call()).to.be.false; + expect(await collectionEvm.methods.isOwnerOrAdminSubstrate(newOwner.addressRaw).call()).to.be.true; + }); + + itEth.skip('change owner call fee', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const [newOwner] = await helper.arrange.createAccounts([10n], donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const cost = await recordEthFee(helper, owner, () => collectionEvm.methods.setOwnerSubstrate(newOwner.addressRaw).send()); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + expect(cost > 0); + }); + + itEth.skip('(!negative tests!) call setOwner by non owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const otherReceiver = await helper.eth.createAccountWithBalance(donor); + const [newOwner] = await helper.arrange.createAccounts([10n], donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + await expect(collectionEvm.methods.setOwnerSubstrate(newOwner.addressRaw).send({from: otherReceiver})).to.be.rejected; + expect(await collectionEvm.methods.isOwnerOrAdminSubstrate(newOwner.addressRaw).call()).to.be.false; + }); +}); diff --git a/tests/src/eth/collectionHelpersAbi.json b/tests/src/eth/collectionHelpersAbi.json index 93d388eb22..3817f09624 100644 --- a/tests/src/eth/collectionHelpersAbi.json +++ b/tests/src/eth/collectionHelpersAbi.json @@ -18,15 +18,33 @@ "name": "CollectionCreated", "type": "event" }, + { + "inputs": [], + "name": "collectionCreationFee", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { "internalType": "string", "name": "name", "type": "string" }, { "internalType": "string", "name": "description", "type": "string" }, { "internalType": "string", "name": "tokenPrefix", "type": "string" } ], - "name": "createNonfungibleCollection", + "name": "createNFTCollection", "outputs": [{ "internalType": "address", "name": "", "type": "address" }], - "stateMutability": "nonpayable", + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "name", "type": "string" }, + { "internalType": "string", "name": "description", "type": "string" }, + { "internalType": "string", "name": "tokenPrefix", "type": "string" } + ], + "name": "createRFTCollection", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "payable", "type": "function" }, { @@ -42,6 +60,16 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { "internalType": "address", "name": "collection", "type": "address" }, + { "internalType": "string", "name": "baseUri", "type": "string" } + ], + "name": "makeCollectionERC721MetadataCompatible", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { "internalType": "bytes4", "name": "interfaceID", "type": "bytes4" } diff --git a/tests/src/eth/collectionProperties.test.ts b/tests/src/eth/collectionProperties.test.ts index 582e0ba217..6829627da2 100644 --- a/tests/src/eth/collectionProperties.test.ts +++ b/tests/src/eth/collectionProperties.test.ts @@ -1,53 +1,163 @@ -import {addCollectionAdminExpectSuccess, createCollectionExpectSuccess} from '../util/helpers'; -import {collectionIdToAddress, createEthAccount, createEthAccountWithBalance, GAS_ARGS, itWeb3} from './util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import {expect} from 'chai'; -import {executeTransaction} from '../substrate/substrate-api'; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {itEth, usingEthPlaygrounds, expect, EthUniqueHelper} from './util'; +import {Pallets} from '../util'; +import {IProperty, ITokenPropertyPermission} from '../util/playgrounds/types'; +import {IKeyringPair} from '@polkadot/types/types'; describe('EVM collection properties', () => { - itWeb3('Can be set', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await _helper.arrange.createAccounts([10n], donor); + }); + }); - await addCollectionAdminExpectSuccess(alice, collection, {Ethereum: caller}); + itEth('Can be set', async({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'test', tokenPrefix: 'test', properties: []}); + await collection.addAdmin(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); await contract.methods.setCollectionProperty('testKey', Buffer.from('testValue')).send({from: caller}); - const [{value}] = (await api.rpc.unique.collectionProperties(collection, ['testKey'])).toHuman()! as any; - expect(value).to.equal('testValue'); + const raw = (await collection.getData())?.raw; + + expect(raw.properties[0].value).to.equal('testValue'); }); - itWeb3('Can be deleted', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await executeTransaction(api, alice, api.tx.unique.setCollectionProperties(collection, [{key: 'testKey', value: 'testValue'}])); + itEth('Can be deleted', async({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'test', tokenPrefix: 'test', properties: [{key: 'testKey', value: 'testValue'}]}); - await addCollectionAdminExpectSuccess(alice, collection, {Ethereum: caller}); + await collection.addAdmin(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); await contract.methods.deleteCollectionProperty('testKey').send({from: caller}); - const result = (await api.rpc.unique.collectionProperties(collection, ['testKey'])).toJSON()! as any; - expect(result.length).to.equal(0); + const raw = (await collection.getData())?.raw; + + expect(raw.properties.length).to.equal(0); }); - itWeb3('Can be read', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = createEthAccount(web3); - const collection = await createCollectionExpectSuccess({mode: {type:'NFT'}}); - await executeTransaction(api, alice, api.tx.unique.setCollectionProperties(collection, [{key: 'testKey', value: 'testValue'}])); + itEth('Can be read', async({helper}) => { + const caller = helper.eth.createAccount(); + const collection = await helper.nft.mintCollection(alice, {name: 'name', description: 'test', tokenPrefix: 'test', properties: [{key: 'testKey', value: 'testValue'}]}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); const value = await contract.methods.collectionProperty('testKey').call(); - expect(value).to.equal(web3.utils.toHex('testValue')); + expect(value).to.equal(helper.getWeb3().utils.toHex('testValue')); + }); +}); + +describe('Supports ERC721Metadata', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + const checkERC721Metadata = async (helper: EthUniqueHelper, mode: 'nft' | 'rft') => { + const caller = await helper.eth.createAccountWithBalance(donor); + const bruh = await helper.eth.createAccountWithBalance(donor); + + const BASE_URI = 'base/'; + const SUFFIX = 'suffix1'; + const URI = 'uri1'; + + const collectionHelpers = helper.ethNativeContract.collectionHelpers(caller); + const creatorMethod = mode === 'rft' ? 'createRFTCollection' : 'createNFTCollection'; + + const {collectionId, collectionAddress} = await helper.eth[creatorMethod](caller, 'n', 'd', 'p'); + + const contract = helper.ethNativeContract.collectionById(collectionId, mode, caller); + await contract.methods.addCollectionAdmin(bruh).send(); // to check that admin will work too + + const collection1 = helper.nft.getCollectionObject(collectionId); + const data1 = await collection1.getData(); + expect(data1?.raw.flags.erc721metadata).to.be.false; + expect(await contract.methods.supportsInterface('0x5b5e139f').call()).to.be.false; + + await collectionHelpers.methods.makeCollectionERC721MetadataCompatible(collectionAddress, BASE_URI) + .send({from: bruh}); + + expect(await contract.methods.supportsInterface('0x5b5e139f').call()).to.be.true; + + const collection2 = helper.nft.getCollectionObject(collectionId); + const data2 = await collection2.getData(); + expect(data2?.raw.flags.erc721metadata).to.be.true; + + const propertyPermissions = data2?.raw.tokenPropertyPermissions; + expect(propertyPermissions?.length).to.equal(2); + + expect(propertyPermissions.find((tpp: ITokenPropertyPermission) => { + return tpp.key === 'URI' && tpp.permission.mutable && tpp.permission.collectionAdmin && !tpp.permission.tokenOwner; + })).to.be.not.null; + + expect(propertyPermissions.find((tpp: ITokenPropertyPermission) => { + return tpp.key === 'URISuffix' && tpp.permission.mutable && tpp.permission.collectionAdmin && !tpp.permission.tokenOwner; + })).to.be.not.null; + + expect(data2?.raw.properties?.find((property: IProperty) => { + return property.key === 'baseURI' && property.value === BASE_URI; + })).to.be.not.null; + + const token1Result = await contract.methods.mint(bruh).send(); + const tokenId1 = token1Result.events.Transfer.returnValues.tokenId; + + expect(await contract.methods.tokenURI(tokenId1).call()).to.equal(BASE_URI); + + await contract.methods.setProperty(tokenId1, 'URISuffix', Buffer.from(SUFFIX)).send(); + expect(await contract.methods.tokenURI(tokenId1).call()).to.equal(BASE_URI + SUFFIX); + + await contract.methods.setProperty(tokenId1, 'URI', Buffer.from(URI)).send(); + expect(await contract.methods.tokenURI(tokenId1).call()).to.equal(URI); + + await contract.methods.deleteProperty(tokenId1, 'URI').send(); + expect(await contract.methods.tokenURI(tokenId1).call()).to.equal(BASE_URI + SUFFIX); + + const token2Result = await contract.methods.mintWithTokenURI(bruh, URI).send(); + const tokenId2 = token2Result.events.Transfer.returnValues.tokenId; + + expect(await contract.methods.tokenURI(tokenId2).call()).to.equal(URI); + + await contract.methods.deleteProperty(tokenId2, 'URI').send(); + expect(await contract.methods.tokenURI(tokenId2).call()).to.equal(BASE_URI); + + await contract.methods.setProperty(tokenId2, 'URISuffix', Buffer.from(SUFFIX)).send(); + expect(await contract.methods.tokenURI(tokenId2).call()).to.equal(BASE_URI + SUFFIX); + }; + + itEth('ERC721Metadata property can be set for NFT collection', async({helper}) => { + await checkERC721Metadata(helper, 'nft'); + }); + + itEth.ifWithPallets('ERC721Metadata property can be set for RFT collection', [Pallets.ReFungible], async({helper}) => { + await checkERC721Metadata(helper, 'rft'); }); }); diff --git a/tests/src/eth/collectionSponsoring.test.ts b/tests/src/eth/collectionSponsoring.test.ts index 61045fb924..df04d80d8d 100644 --- a/tests/src/eth/collectionSponsoring.test.ts +++ b/tests/src/eth/collectionSponsoring.test.ts @@ -1,39 +1,274 @@ -import {addToAllowListExpectSuccess, confirmSponsorshipExpectSuccess, createCollectionExpectSuccess, enablePublicMintingExpectSuccess, setCollectionSponsorExpectSuccess} from '../util/helpers'; -import {itWeb3, createEthAccount, collectionIdToAddress, GAS_ARGS, normalizeEvents} from './util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import {expect} from 'chai'; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds} from '../util/index'; +import {itEth, expect} from './util'; describe('evm collection sponsoring', () => { - itWeb3('sponsors mint transactions', async ({web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); + let donor: IKeyringPair; + let alice: IKeyringPair; + let nominal: bigint; - const collection = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collection, alice.address); - await confirmSponsorshipExpectSuccess(collection); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + nominal = helper.balance.getOneTokenNominal(); + }); + }); + + itEth('sponsors mint transactions', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {tokenPrefix: 'spnr', permissions: {mintMode: true}}); + await collection.setSponsor(alice, alice.address); + await collection.confirmSponsorship(alice); - const minter = createEthAccount(web3); - expect(await web3.eth.getBalance(minter)).to.equal('0'); + const minter = helper.eth.createAccount(); + expect(await helper.balance.getEthereum(minter)).to.equal(0n); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, collectionIdToAddress(collection), {from: minter, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', minter); - await enablePublicMintingExpectSuccess(alice, collection); - await addToAllowListExpectSuccess(alice, collection, {Ethereum: minter}); + await collection.addToAllowList(alice, {Ethereum: minter}); - const nextTokenId = await contract.methods.nextTokenId().call(); - expect(nextTokenId).to.equal('1'); - const result = await contract.methods.mint(minter, nextTokenId).send(); - const events = normalizeEvents(result.events); + const result = await contract.methods.mint(minter).send(); + + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { - address, + address: collectionAddress, event: 'Transfer', args: { from: '0x0000000000000000000000000000000000000000', to: minter, - tokenId: nextTokenId, + tokenId: '1', }, }, ]); }); + + // TODO: Temprorary off. Need refactor + // itWeb3('Set substrate sponsor', async ({api, web3, privateKeyWrapper}) => { + // const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + // const collectionHelpers = evmCollectionHelpers(web3, owner); + // let result = await collectionHelpers.methods.createNFTCollection('Sponsor collection', '1', '1').send(); + // const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); + // const sponsor = privateKeyWrapper('//Alice'); + // const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + + // expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.false; + // result = await collectionEvm.methods.setCollectionSponsorSubstrate(sponsor.addressRaw).send({from: owner}); + // expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.true; + + // const confirmTx = await api.tx.unique.confirmSponsorship(collectionId); + // await submitTransactionAsync(sponsor, confirmTx); + // expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.false; + + // const sponsorTuple = await collectionEvm.methods.collectionSponsor().call({from: owner}); + // expect(bigIntToSub(api, BigInt(sponsorTuple[1]))).to.be.eq(sponsor.address); + // }); + + itEth('Remove sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionHelpers = helper.ethNativeContract.collectionHelpers(owner); + + let result = await collectionHelpers.methods.createNFTCollection('Sponsor collection', '1', '1').send({value: Number(2n * nominal)}); + const collectionIdAddress = helper.ethAddress.normalizeAddress(result.events.CollectionCreated.returnValues.collectionId); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionIdAddress, 'nft', owner); + + expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.false; + result = await collectionEvm.methods.setCollectionSponsor(sponsor).send({from: owner}); + expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.true; + + await collectionEvm.methods.confirmCollectionSponsorship().send({from: sponsor}); + expect(await collectionEvm.methods.hasCollectionPendingSponsor().call({from: owner})).to.be.false; + + await collectionEvm.methods.removeCollectionSponsor().send({from: owner}); + + const sponsorTuple = await collectionEvm.methods.collectionSponsor().call({from: owner}); + expect(sponsorTuple.field_0).to.be.eq('0x0000000000000000000000000000000000000000'); + }); + + itEth('Sponsoring collection from evm address via access list', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const {collectionId, collectionAddress} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, 'Sponsor collection', '1', '1', ''); + + const collection = helper.nft.getCollectionObject(collectionId); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + await collectionEvm.methods.setCollectionSponsor(sponsor).send({from: owner}); + let collectionData = (await collection.getData())!; + expect(collectionData.raw.sponsorship.Unconfirmed).to.be.eq(helper.address.ethToSubstrate(sponsor, true)); + await expect(collectionEvm.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); + + await collectionEvm.methods.confirmCollectionSponsorship().send({from: sponsor}); + collectionData = (await collection.getData())!; + expect(collectionData.raw.sponsorship.Confirmed).to.be.eq(helper.address.ethToSubstrate(sponsor, true)); + + const user = helper.eth.createAccount(); + const nextTokenId = await collectionEvm.methods.nextTokenId().call(); + expect(nextTokenId).to.be.equal('1'); + + const oldPermissions = (await collection.getData())!.raw.permissions; // (await getDetailedCollectionInfo(api, collectionId))!.permissions.toHuman(); + expect(oldPermissions.mintMode).to.be.false; + expect(oldPermissions.access).to.be.equal('Normal'); + + await collectionEvm.methods.setCollectionAccess(1 /*'AllowList'*/).send({from: owner}); + await collectionEvm.methods.addToCollectionAllowList(user).send({from: owner}); + await collectionEvm.methods.setCollectionMintMode(true).send({from: owner}); + + const newPermissions = (await collection.getData())!.raw.permissions; // (await getDetailedCollectionInfo(api, collectionId))!.permissions.toHuman(); + expect(newPermissions.mintMode).to.be.true; + expect(newPermissions.access).to.be.equal('AllowList'); + + const ownerBalanceBefore = await helper.balance.getSubstrate(helper.address.ethToSubstrate(owner)); + const sponsorBalanceBefore = await helper.balance.getSubstrate(helper.address.ethToSubstrate(sponsor)); + + { + const result = await collectionEvm.methods.mintWithTokenURI(user, 'Test URI').send({from: user}); + const events = helper.eth.normalizeEvents(result.events); + + expect(events).to.be.deep.equal([ + { + address: collectionAddress, + event: 'Transfer', + args: { + from: '0x0000000000000000000000000000000000000000', + to: user, + tokenId: '1', + }, + }, + ]); + + const ownerBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(owner)); + const sponsorBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + + expect(await collectionEvm.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); + expect(ownerBalanceBefore).to.be.eq(ownerBalanceAfter); + expect(sponsorBalanceBefore > sponsorBalanceAfter).to.be.true; + } + }); + + // TODO: Temprorary off. Need refactor + // itWeb3('Sponsoring collection from substrate address via access list', async ({api, web3, privateKeyWrapper}) => { + // const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + // const collectionHelpers = evmCollectionHelpers(web3, owner); + // const result = await collectionHelpers.methods.createERC721MetadataCompatibleNFTCollection('Sponsor collection', '1', '1', '').send(); + // const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); + // const sponsor = privateKeyWrapper('//Alice'); + // const collectionEvm = evmCollection(web3, owner, collectionIdAddress); + + // await collectionEvm.methods.setCollectionSponsorSubstrate(sponsor.addressRaw).send({from: owner}); + + // const confirmTx = await api.tx.unique.confirmSponsorship(collectionId); + // await submitTransactionAsync(sponsor, confirmTx); + + // const user = createEthAccount(web3); + // const nextTokenId = await collectionEvm.methods.nextTokenId().call(); + // expect(nextTokenId).to.be.equal('1'); + + // await collectionEvm.methods.setCollectionAccess(1 /*'AllowList'*/).send({from: owner}); + // await collectionEvm.methods.addToCollectionAllowList(user).send({from: owner}); + // await collectionEvm.methods.setCollectionMintMode(true).send({from: owner}); + + // const ownerBalanceBefore = await ethBalanceViaSub(api, owner); + // const sponsorBalanceBefore = (await getBalance(api, [sponsor.address]))[0]; + + // { + // const nextTokenId = await collectionEvm.methods.nextTokenId().call(); + // expect(nextTokenId).to.be.equal('1'); + // const result = await collectionEvm.methods.mintWithTokenURI( + // user, + // nextTokenId, + // 'Test URI', + // ).send({from: user}); + // const events = normalizeEvents(result.events); + + // expect(events).to.be.deep.equal([ + // { + // address: collectionIdAddress, + // event: 'Transfer', + // args: { + // from: '0x0000000000000000000000000000000000000000', + // to: user, + // tokenId: nextTokenId, + // }, + // }, + // ]); + + // const ownerBalanceAfter = await ethBalanceViaSub(api, owner); + // const sponsorBalanceAfter = (await getBalance(api, [sponsor.address]))[0]; + + // expect(await collectionEvm.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); + // expect(ownerBalanceBefore).to.be.eq(ownerBalanceAfter); + // expect(sponsorBalanceBefore > sponsorBalanceAfter).to.be.true; + // } + // }); + + itEth('Check that transaction via EVM spend money from sponsor address', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const {collectionAddress, collectionId} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner,'Sponsor collection', '1', '1', ''); + const collection = helper.nft.getCollectionObject(collectionId); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + await collectionEvm.methods.setCollectionSponsor(sponsor).send(); + let collectionData = (await collection.getData())!; + expect(collectionData.raw.sponsorship.Unconfirmed).to.be.eq(helper.address.ethToSubstrate(sponsor, true)); + await expect(collectionEvm.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); + + const sponsorCollection = helper.ethNativeContract.collection(collectionAddress, 'nft', sponsor); + await sponsorCollection.methods.confirmCollectionSponsorship().send(); + collectionData = (await collection.getData())!; + expect(collectionData.raw.sponsorship.Confirmed).to.be.eq(helper.address.ethToSubstrate(sponsor, true)); + + const user = helper.eth.createAccount(); + await collectionEvm.methods.addCollectionAdmin(user).send(); + + const ownerBalanceBefore = await helper.balance.getSubstrate(helper.address.ethToSubstrate(owner)); + const sponsorBalanceBefore = await helper.balance.getSubstrate(helper.address.ethToSubstrate(sponsor)); + + const userCollectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', user); + + const result = await userCollectionEvm.methods.mintWithTokenURI(user, 'Test URI').send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const events = helper.eth.normalizeEvents(result.events); + const address = helper.ethAddress.fromCollectionId(collectionId); + + expect(events).to.be.deep.equal([ + { + address, + event: 'Transfer', + args: { + from: '0x0000000000000000000000000000000000000000', + to: user, + tokenId: '1', + }, + }, + ]); + expect(await userCollectionEvm.methods.tokenURI(tokenId).call()).to.be.equal('Test URI'); + + const ownerBalanceAfter = await helper.balance.getSubstrate(helper.address.ethToSubstrate(owner)); + expect(ownerBalanceAfter).to.be.eq(ownerBalanceBefore); + const sponsorBalanceAfter = await helper.balance.getSubstrate(helper.address.ethToSubstrate(sponsor)); + expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; + }); }); diff --git a/tests/src/eth/contractSponsoring.test.ts b/tests/src/eth/contractSponsoring.test.ts index d74e5790f0..597bf27e13 100644 --- a/tests/src/eth/contractSponsoring.test.ts +++ b/tests/src/eth/contractSponsoring.test.ts @@ -14,336 +14,577 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import { - contractHelpers, - createEthAccountWithBalance, - transferBalanceToEth, - deployFlipper, - itWeb3, - SponsoringMode, - createEthAccount, - collectionIdToAddress, - GAS_ARGS, - normalizeEvents, - subToEth, - executeEthTxOnSub, - evmCollectionHelpers, - getCollectionAddressFromResult, - evmCollection, - ethBalanceViaSub, -} from './util/helpers'; -import { - addCollectionAdminExpectSuccess, - createCollectionExpectSuccess, - getDetailedCollectionInfo, - transferBalanceTo, -} from '../util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import getBalance from '../substrate/get-balance'; -import {evmToAddress} from '@polkadot/util-crypto'; +import {IKeyringPair} from '@polkadot/types/types'; +import {EthUniqueHelper} from './util/playgrounds/unique.dev'; +import {itEth, expect, SponsoringMode, usingEthPlaygrounds} from './util'; +import {usingPlaygrounds} from '../util'; +import {CompiledContract} from './util/playgrounds/types'; describe('Sponsoring EVM contracts', () => { - itWeb3('Sponsoring can be set by the address that has deployed the contract', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); - const helpers = contractHelpers(web3, owner); + let donor: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Self sponsored can be set by the address that deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const flipper = await helper.eth.deployFlipper(owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.selfSponsoredEnable(flipper.options.address).send()).to.be.not.rejected; + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.true; + }); + + itEth('Set self sponsored events', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const flipper = await helper.eth.deployFlipper(owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); + + const result = await helpers.methods.selfSponsoredEnable(flipper.options.address).send(); + const ethEvents = helper.eth.helper.eth.normalizeEvents(result.events); + expect(ethEvents).to.be.deep.equal([ + { + address: flipper.options.address, + event: 'ContractSponsorSet', + args: { + contractAddress: flipper.options.address, + sponsor: flipper.options.address, + }, + }, + { + address: flipper.options.address, + event: 'ContractSponsorshipConfirmed', + args: { + contractAddress: flipper.options.address, + sponsor: flipper.options.address, + }, + }, + ]); + }); + + itEth('Self sponsored can not be set by the address that did not deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const notOwner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.selfSponsoredEnable(flipper.options.address).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + }); + + itEth('Sponsoring can be set by the address that has deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; - await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); + await expect(helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner})).to.be.not.rejected; expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; }); - itWeb3('Sponsoring cannot be set by the address that did not deployed the contract', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const notOwner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); - const helpers = contractHelpers(web3, owner); + itEth('Sponsoring cannot be set by the address that did not deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const notOwner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; - await expect(helpers.methods.setSponsoringMode(notOwner, SponsoringMode.Allowlisted).send({from: notOwner})).to.rejected; + await expect(helpers.methods.setSponsoringMode(notOwner, SponsoringMode.Allowlisted).call({from: notOwner})).to.be.rejectedWith('NoPermission'); expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; }); + + itEth('Sponsor can be set by the address that deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasPendingSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.setSponsor(flipper.options.address, sponsor).send()).to.be.not.rejected; + expect(await helpers.methods.hasPendingSponsor(flipper.options.address).call()).to.be.true; + }); + + itEth('Set sponsor event', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + const result = await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + const events = helper.eth.normalizeEvents(result.events); + expect(events).to.be.deep.equal([ + { + address: flipper.options.address, + event: 'ContractSponsorSet', + args: { + contractAddress: flipper.options.address, + sponsor: sponsor, + }, + }, + ]); + }); + + itEth('Sponsor can not be set by the address that did not deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const notOwner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasPendingSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.setSponsor(flipper.options.address, sponsor).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + expect(await helpers.methods.hasPendingSponsor(flipper.options.address).call()).to.be.false; + }); + + itEth('Sponsorship can be confirmed by the address that pending as sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.setSponsor(flipper.options.address, sponsor).send()).to.be.not.rejected; + await expect(helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor})).to.be.not.rejected; + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.true; + }); - itWeb3('In generous mode, non-allowlisted user transaction will be sponsored', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); + itEth('Confirm sponsorship event', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + await expect(helpers.methods.setSponsor(flipper.options.address, sponsor).send()).to.be.not.rejected; + const result = await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); + const events = helper.eth.normalizeEvents(result.events); + expect(events).to.be.deep.equal([ + { + address: flipper.options.address, + event: 'ContractSponsorshipConfirmed', + args: { + contractAddress: flipper.options.address, + sponsor: sponsor, + }, + }, + ]); + }); - const flipper = await deployFlipper(web3, owner); + itEth('Sponsorship can not be confirmed by the address that not pending as sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const notSponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.setSponsor(flipper.options.address, sponsor).send()).to.be.not.rejected; + await expect(helpers.methods.confirmSponsorship(flipper.options.address).call({from: notSponsor})).to.be.rejectedWith('NoPermission'); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + }); - const helpers = contractHelpers(web3, owner); + itEth('Sponsorship can not be confirmed by the address that not set as sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const notSponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await expect(helpers.methods.confirmSponsorship(flipper.options.address).call({from: notSponsor})).to.be.rejectedWith('NoPendingSponsor'); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + }); + + itEth('Get self sponsored sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.selfSponsoredEnable(flipper.options.address).send(); + + const result = await helpers.methods.sponsor(flipper.options.address).call(); + + expect(result[0]).to.be.eq(flipper.options.address); + expect(result[1]).to.be.eq('0'); + }); + + itEth('Get confirmed sponsor', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); + + const result = await helpers.methods.sponsor(flipper.options.address).call(); + + expect(result[0]).to.be.eq(sponsor); + expect(result[1]).to.be.eq('0'); + }); + + itEth('Sponsor can be removed by the address that deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.true; + + await helpers.methods.removeSponsor(flipper.options.address).send(); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + }); + + itEth('Remove sponsor event', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); + + const result = await helpers.methods.removeSponsor(flipper.options.address).send(); + const events = helper.eth.normalizeEvents(result.events); + expect(events).to.be.deep.equal([ + { + address: flipper.options.address, + event: 'ContractSponsorRemoved', + args: { + contractAddress: flipper.options.address, + }, + }, + ]); + }); + + itEth('Sponsor can not be removed by the address that did not deployed the contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const notOwner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.false; + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.true; + + await expect(helpers.methods.removeSponsor(flipper.options.address).call({from: notOwner})).to.be.rejectedWith('NoPermission'); + expect(await helpers.methods.hasSponsor(flipper.options.address).call()).to.be.true; + }); + + itEth('In generous mode, non-allowlisted user transaction will be sponsored', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Generous).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); - - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); - expect(originalFlipperBalance).to.be.not.equal('0'); + const sponsorBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + const callerBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); await flipper.methods.flip().send({from: caller}); expect(await flipper.methods.getValue().call()).to.be.true; - // Balance should be taken from flipper instead of caller - const balanceAfter = await web3.eth.getBalance(flipper.options.address); - expect(+balanceAfter).to.be.lessThan(+originalFlipperBalance); + // Balance should be taken from sponsor instead of caller + const sponsorBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + const callerBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); + expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; + expect(callerBalanceAfter).to.be.eq(callerBalanceBefore); }); - itWeb3('Sponsoring is set, an address that has no UNQ can send a transaction and it works. Sponsor balance should decrease (allowlisted)', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); + itEth('In generous mode, non-allowlisted user transaction will be self sponsored', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.selfSponsoredEnable(flipper.options.address).send(); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = createEthAccount(web3); + await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Generous).send({from: owner}); + await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); + + await helper.eth.transferBalanceFromSubstrate(donor, flipper.options.address); + + const contractBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(flipper.options.address)); + const callerBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); + + await flipper.methods.flip().send({from: caller}); + expect(await flipper.methods.getValue().call()).to.be.true; + + // Balance should be taken from sponsor instead of caller + const contractBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(flipper.options.address)); + const callerBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); + expect(contractBalanceAfter < contractBalanceBefore).to.be.true; + expect(callerBalanceAfter).to.be.eq(callerBalanceBefore); + }); - const flipper = await deployFlipper(web3, owner); + itEth('Sponsoring is set, an address that has no UNQ can send a transaction and it works. Sponsor balance should decrease (allowlisted)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = helper.eth.createAccount(); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); - const helpers = contractHelpers(web3, owner); await helpers.methods.toggleAllowlist(flipper.options.address, true).send({from: owner}); await helpers.methods.toggleAllowed(flipper.options.address, caller, true).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); - expect(originalFlipperBalance).to.be.not.equal('0'); + const sponsorBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + expect(sponsorBalanceBefore).to.be.not.equal('0'); await flipper.methods.flip().send({from: caller}); expect(await flipper.methods.getValue().call()).to.be.true; // Balance should be taken from flipper instead of caller - const balanceAfter = await web3.eth.getBalance(flipper.options.address); - expect(+balanceAfter).to.be.lessThan(+originalFlipperBalance); + const sponsorBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; }); - itWeb3('Sponsoring is set, an address that has no UNQ can send a transaction and it works. Sponsor balance should not decrease (non-allowlisted)', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = createEthAccount(web3); + itEth('Sponsoring is set, an address that has no UNQ can send a transaction and it works. Sponsor balance should not decrease (non-allowlisted)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccount(); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); - const flipper = await deployFlipper(web3, owner); - - const helpers = contractHelpers(web3, owner); - - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); + await helper.eth.transferBalanceFromSubstrate(donor, flipper.options.address); - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); + const originalFlipperBalance = await helper.balance.getEthereum(flipper.options.address); expect(originalFlipperBalance).to.be.not.equal('0'); await expect(flipper.methods.flip().send({from: caller})).to.be.rejectedWith(/InvalidTransaction::Payment/); expect(await flipper.methods.getValue().call()).to.be.false; // Balance should be taken from flipper instead of caller - const balanceAfter = await web3.eth.getBalance(flipper.options.address); - expect(+balanceAfter).to.be.equals(+originalFlipperBalance); + // FIXME the comment is wrong! What check should be here? + const balanceAfter = await helper.balance.getEthereum(flipper.options.address); + expect(balanceAfter).to.be.equal(originalFlipperBalance); }); - itWeb3('Sponsoring is set, an address that has UNQ can send a transaction and it works. User balance should not change', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const originalCallerBalance = await web3.eth.getBalance(caller); + itEth('Sponsoring is set, an address that has UNQ can send a transaction and it works. User balance should not change', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); - const flipper = await deployFlipper(web3, owner); - - const helpers = contractHelpers(web3, owner); await helpers.methods.toggleAllowlist(flipper.options.address, true).send({from: owner}); await helpers.methods.toggleAllowed(flipper.options.address, caller, true).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); - expect(originalFlipperBalance).to.be.not.equal('0'); + const sponsorBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + const callerBalanceBefore = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); await flipper.methods.flip().send({from: caller}); expect(await flipper.methods.getValue().call()).to.be.true; - expect(await web3.eth.getBalance(caller)).to.be.equals(originalCallerBalance); + const sponsorBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(sponsor)); + const callerBalanceAfter = await helper.balance.getSubstrate(await helper.address.ethToSubstrate(caller)); + expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; + expect(callerBalanceAfter).to.be.equal(callerBalanceBefore); }); - itWeb3('Sponsoring is limited, with setContractRateLimit. The limitation is working if transactions are sent more often, the sender pays the commission.', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const originalCallerBalance = await web3.eth.getBalance(caller); - - const flipper = await deployFlipper(web3, owner); - - const helpers = contractHelpers(web3, owner); + itEth('Sponsoring is limited, with setContractRateLimit. The limitation is working if transactions are sent more often, the sender pays the commission.', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + const originalCallerBalance = await helper.balance.getEthereum(caller); await helpers.methods.toggleAllowlist(flipper.options.address, true).send({from: owner}); await helpers.methods.toggleAllowed(flipper.options.address, caller, true).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 10).send({from: owner}); - expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); + await helpers.methods.setSponsor(flipper.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); + const originalFlipperBalance = await helper.balance.getEthereum(sponsor); expect(originalFlipperBalance).to.be.not.equal('0'); await flipper.methods.flip().send({from: caller}); expect(await flipper.methods.getValue().call()).to.be.true; - expect(await web3.eth.getBalance(caller)).to.be.equals(originalCallerBalance); + expect(await helper.balance.getEthereum(caller)).to.be.equal(originalCallerBalance); - const newFlipperBalance = await web3.eth.getBalance(flipper.options.address); - expect(newFlipperBalance).to.be.not.equals(originalFlipperBalance); + const newFlipperBalance = await helper.balance.getEthereum(sponsor); + expect(newFlipperBalance).to.be.not.equal(originalFlipperBalance); await flipper.methods.flip().send({from: caller}); - expect(await web3.eth.getBalance(flipper.options.address)).to.be.equal(newFlipperBalance); - expect(await web3.eth.getBalance(caller)).to.be.not.equals(originalCallerBalance); + // todo:playgrounds fails rarely (expected 99893341659775672580n to equal 99912598679356033129n) (again, 99893341659775672580n) + expect(await helper.balance.getEthereum(sponsor)).to.be.equal(newFlipperBalance); + expect(await helper.balance.getEthereum(caller)).to.be.not.equal(originalCallerBalance); }); // TODO: Find a way to calculate default rate limit - itWeb3('Default rate limit equals 7200', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); - const helpers = contractHelpers(web3, owner); - expect(await helpers.methods.getSponsoringRateLimit(flipper.options.address).call()).to.be.equals('7200'); + itEth('Default rate limit equal 7200', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + expect(await helpers.methods.sponsoringRateLimit(flipper.options.address).call()).to.be.equal('7200'); }); +}); - itWeb3('Sponsoring collection from evm address via access list', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelpers = evmCollectionHelpers(web3, owner); - let result = await collectionHelpers.methods.createNonfungibleCollection('Sponsor collection', '1', '1').send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - const sponsor = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - result = await collectionEvm.methods.setCollectionSponsor(sponsor).send({from: owner}); - let collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - const ss58Format = (api.registry.getChainProperties())!.toJSON().ss58Format; - expect(collectionSub.sponsorship.isUnconfirmed).to.be.true; - expect(collectionSub.sponsorship.asUnconfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - await expect(collectionEvm.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); - - await collectionEvm.methods.confirmCollectionSponsorship().send({from: sponsor}); - collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - expect(collectionSub.sponsorship.isConfirmed).to.be.true; - expect(collectionSub.sponsorship.asConfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - - const user = createEthAccount(web3); - const nextTokenId = await collectionEvm.methods.nextTokenId().call(); - expect(nextTokenId).to.be.equal('1'); - - const oldPermissions = (await getDetailedCollectionInfo(api, collectionId))!.permissions.toHuman(); - expect(oldPermissions.mintMode).to.be.false; - expect(oldPermissions.access).to.be.equal('Normal'); - - await collectionEvm.methods.setCollectionAccess(1 /*'AllowList'*/).send({from: owner}); - await collectionEvm.methods.addToCollectionAllowList(user).send({from: owner}); - await collectionEvm.methods.setCollectionMintMode(true).send({from: owner}); - - const newPermissions = (await getDetailedCollectionInfo(api, collectionId))!.permissions.toHuman(); - expect(newPermissions.mintMode).to.be.true; - expect(newPermissions.access).to.be.equal('AllowList'); - - const ownerBalanceBefore = await ethBalanceViaSub(api, owner); - const sponsorBalanceBefore = await ethBalanceViaSub(api, sponsor); - - { - const nextTokenId = await collectionEvm.methods.nextTokenId().call(); - expect(nextTokenId).to.be.equal('1'); - const result = await collectionEvm.methods.mintWithTokenURI( - user, - nextTokenId, - 'Test URI', - ).send({from: user}); - const events = normalizeEvents(result.events); - - expect(events).to.be.deep.equal([ - { - address: collectionIdAddress, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: user, - tokenId: nextTokenId, - }, - }, - ]); +describe('Sponsoring Fee Limit', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let testContract: CompiledContract; + + async function compileTestContract(helper: EthUniqueHelper) { + if (!testContract) { + testContract = await helper.ethContract.compile( + 'TestContract', + ` + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + contract TestContract { + event Result(bool); + + function test(uint32 cycles) public { + uint256 counter = 0; + while(true) { + counter ++; + if (counter > cycles){ + break; + } + } + emit Result(true); + } + } + `, + ); + } + return testContract; + } + + async function deployTestContract(helper: EthUniqueHelper, owner: string) { + const compiled = await compileTestContract(helper); + return await helper.ethContract.deployByAbi(owner, compiled.abi, compiled.object); + } + + before(async () => { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + }); + }); - const ownerBalanceAfter = await ethBalanceViaSub(api, owner); - const sponsorBalanceAfter = await ethBalanceViaSub(api, sponsor); + itEth('Default fee limit', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); - expect(await collectionEvm.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); - expect(ownerBalanceBefore).to.be.eq(ownerBalanceAfter); - expect(sponsorBalanceBefore > sponsorBalanceAfter).to.be.true; - } + expect(await helpers.methods.sponsoringFeeLimit(flipper.options.address).call()).to.be.equal('115792089237316195423570985008687907853269984665640564039457584007913129639935'); }); - itWeb3('Check that transaction via EVM spend money from sponsor address', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelpers = evmCollectionHelpers(web3, owner); - let result = await collectionHelpers.methods.createNonfungibleCollection('Sponsor collection', '1', '1').send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - const sponsor = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - result = await collectionEvm.methods.setCollectionSponsor(sponsor).send(); - let collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - const ss58Format = (api.registry.getChainProperties())!.toJSON().ss58Format; - expect(collectionSub.sponsorship.isUnconfirmed).to.be.true; - expect(collectionSub.sponsorship.asUnconfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - await expect(collectionEvm.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); - const sponsorCollection = evmCollection(web3, sponsor, collectionIdAddress); - await sponsorCollection.methods.confirmCollectionSponsorship().send(); - collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - expect(collectionSub.sponsorship.isConfirmed).to.be.true; - expect(collectionSub.sponsorship.asConfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - - const user = createEthAccount(web3); - await collectionEvm.methods.addCollectionAdmin(user).send(); + itEth('Set fee limit', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await helpers.methods.setSponsoringFeeLimit(flipper.options.address, 100).send(); + expect(await helpers.methods.sponsoringFeeLimit(flipper.options.address).call()).to.be.equal('100'); + }); + + itEth('Negative test - set fee limit by non-owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const stranger = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + const flipper = await helper.eth.deployFlipper(owner); + + await expect(helpers.methods.setSponsoringFeeLimit(flipper.options.address, 100).send({from: stranger})).to.be.rejected; + }); + + itEth('Negative test - check that eth transactions exceeding fee limit are not executed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const user = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + + const testContract = await deployTestContract(helper, owner); - const ownerBalanceBefore = await ethBalanceViaSub(api, owner); - const sponsorBalanceBefore = await ethBalanceViaSub(api, sponsor); + await helpers.methods.setSponsoringMode(testContract.options.address, SponsoringMode.Generous).send({from: owner}); + await helpers.methods.setSponsoringRateLimit(testContract.options.address, 0).send({from: owner}); - - const userCollectionEvm = evmCollection(web3, user, collectionIdAddress); - const nextTokenId = await userCollectionEvm.methods.nextTokenId().call(); - expect(nextTokenId).to.be.equal('1'); - result = await userCollectionEvm.methods.mintWithTokenURI( - user, - nextTokenId, - 'Test URI', - ).send(); - - const events = normalizeEvents(result.events); - const address = collectionIdToAddress(collectionId); + await helpers.methods.setSponsor(testContract.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(testContract.options.address).send({from: sponsor}); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: user, - tokenId: nextTokenId, - }, - }, - ]); - expect(await userCollectionEvm.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); - - const ownerBalanceAfter = await ethBalanceViaSub(api, owner); - expect(ownerBalanceAfter).to.be.eq(ownerBalanceBefore); - const sponsorBalanceAfter = await ethBalanceViaSub(api, sponsor); - expect(sponsorBalanceAfter < sponsorBalanceBefore).to.be.true; + const gasPrice = BigInt(await helper.getWeb3().eth.getGasPrice()); + + await helpers.methods.setSponsoringFeeLimit(testContract.options.address, 2_000_000n * gasPrice).send(); + + const originalUserBalance = await helper.balance.getEthereum(user); + await testContract.methods.test(100).send({from: user, gas: 2_000_000}); + expect(await helper.balance.getEthereum(user)).to.be.equal(originalUserBalance); + + await testContract.methods.test(100).send({from: user, gas: 2_100_000}); + expect(await helper.balance.getEthereum(user)).to.not.be.equal(originalUserBalance); + }); + + itEth('Negative test - check that evm.call transactions exceeding fee limit are not executed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(owner); + + const testContract = await deployTestContract(helper, owner); + + await helpers.methods.setSponsoringMode(testContract.options.address, SponsoringMode.Generous).send({from: owner}); + await helpers.methods.setSponsoringRateLimit(testContract.options.address, 0).send({from: owner}); + + await helpers.methods.setSponsor(testContract.options.address, sponsor).send(); + await helpers.methods.confirmSponsorship(testContract.options.address).send({from: sponsor}); + + const gasPrice = BigInt(await helper.getWeb3().eth.getGasPrice()); + + await helpers.methods.setSponsoringFeeLimit(testContract.options.address, 2_000_000n * gasPrice).send(); + + const originalAliceBalance = await helper.balance.getSubstrate(alice.address); + + await helper.eth.sendEVM( + alice, + testContract.options.address, + testContract.methods.test(100).encodeABI(), + '0', + 2_000_000, + ); + // expect((await api.query.system.account(alice.address)).data.free.toBigInt()).to.be.equal(originalAliceBalance); + expect(await helper.balance.getSubstrate(alice.address)).to.be.equal(originalAliceBalance); + + await helper.eth.sendEVM( + alice, + testContract.options.address, + testContract.methods.test(100).encodeABI(), + '0', + 2_100_000, + ); + expect(await helper.balance.getSubstrate(alice.address)).to.not.be.equal(originalAliceBalance); }); }); diff --git a/tests/src/eth/createCollection.test.ts b/tests/src/eth/createCollection.test.ts deleted file mode 100644 index 0dfae58537..0000000000 --- a/tests/src/eth/createCollection.test.ts +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import {evmToAddress} from '@polkadot/util-crypto'; -import {expect} from 'chai'; -import {getCreatedCollectionCount, getDetailedCollectionInfo} from '../util/helpers'; -import { - evmCollectionHelpers, - collectionIdToAddress, - createEthAccount, - createEthAccountWithBalance, - evmCollection, - itWeb3, - getCollectionAddressFromResult, -} from './util/helpers'; - -describe('Create collection from EVM', () => { - // itWeb3('Create collection', async ({api, web3, privateKeyWrapper}) => { - // const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - // const collectionHelper = evmCollectionHelpers(web3, owner); - // const collectionName = 'CollectionEVM'; - // const description = 'Some description'; - // const tokenPrefix = 'token prefix'; - - // const collectionCountBefore = await getCreatedCollectionCount(api); - // const result = await collectionHelper.methods - // .createNonfungibleCollection(collectionName, description, tokenPrefix) - // .send(); - // const collectionCountAfter = await getCreatedCollectionCount(api); - - // const {collectionId, collection} = await getCollectionAddressFromResult(api, result); - // expect(collectionCountAfter - collectionCountBefore).to.be.eq(1); - // expect(collectionId).to.be.eq(collectionCountAfter); - // expect(collection.name.map(v => String.fromCharCode(v.toNumber())).join('')).to.be.eq(collectionName); - // expect(collection.description.map(v => String.fromCharCode(v.toNumber())).join('')).to.be.eq(description); - // expect(collection.tokenPrefix.toHuman()).to.be.eq(tokenPrefix); - // }); - - // itWeb3('Check collection address exist', async ({api, web3, privateKeyWrapper}) => { - // const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - // const collectionHelpers = evmCollectionHelpers(web3, owner); - - // const expectedCollectionId = await getCreatedCollectionCount(api) + 1; - // const expectedCollectionAddress = collectionIdToAddress(expectedCollectionId); - // expect(await collectionHelpers.methods - // .isCollectionExist(expectedCollectionAddress) - // .call()).to.be.false; - - // await collectionHelpers.methods - // .createNonfungibleCollection('A', 'A', 'A') - // .send(); - - // expect(await collectionHelpers.methods - // .isCollectionExist(expectedCollectionAddress) - // .call()).to.be.true; - // }); - - itWeb3('Set sponsorship', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelpers = evmCollectionHelpers(web3, owner); - let result = await collectionHelpers.methods.createNonfungibleCollection('Sponsor collection', '1', '1').send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - const sponsor = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - result = await collectionEvm.methods.setCollectionSponsor(sponsor).send(); - let collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - expect(collectionSub.sponsorship.isUnconfirmed).to.be.true; - const ss58Format = (api.registry.getChainProperties())!.toJSON().ss58Format; - expect(collectionSub.sponsorship.asUnconfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - await expect(collectionEvm.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); - const sponsorCollection = evmCollection(web3, sponsor, collectionIdAddress); - await sponsorCollection.methods.confirmCollectionSponsorship().send(); - collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - expect(collectionSub.sponsorship.isConfirmed).to.be.true; - expect(collectionSub.sponsorship.asConfirmed.toHuman()).to.be.eq(evmToAddress(sponsor, Number(ss58Format))); - }); - - itWeb3('Set limits', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelpers = evmCollectionHelpers(web3, owner); - const result = await collectionHelpers.methods.createNonfungibleCollection('Const collection', '5', '5').send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - const limits = { - accountTokenOwnershipLimit: 1000, - sponsoredDataSize: 1024, - sponsoredDataRateLimit: 30, - tokenLimit: 1000000, - sponsorTransferTimeout: 6, - sponsorApproveTimeout: 6, - ownerCanTransfer: false, - ownerCanDestroy: false, - transfersEnabled: false, - }; - - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('accountTokenOwnershipLimit', limits.accountTokenOwnershipLimit).send(); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('sponsoredDataSize', limits.sponsoredDataSize).send(); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('sponsoredDataRateLimit', limits.sponsoredDataRateLimit).send(); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('tokenLimit', limits.tokenLimit).send(); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('sponsorTransferTimeout', limits.sponsorTransferTimeout).send(); - await collectionEvm.methods['setCollectionLimit(string,uint32)']('sponsorApproveTimeout', limits.sponsorApproveTimeout).send(); - await collectionEvm.methods['setCollectionLimit(string,bool)']('ownerCanTransfer', limits.ownerCanTransfer).send(); - await collectionEvm.methods['setCollectionLimit(string,bool)']('ownerCanDestroy', limits.ownerCanDestroy).send(); - await collectionEvm.methods['setCollectionLimit(string,bool)']('transfersEnabled', limits.transfersEnabled).send(); - - const collectionSub = (await getDetailedCollectionInfo(api, collectionId))!; - expect(collectionSub.limits.accountTokenOwnershipLimit.unwrap().toNumber()).to.be.eq(limits.accountTokenOwnershipLimit); - expect(collectionSub.limits.sponsoredDataSize.unwrap().toNumber()).to.be.eq(limits.sponsoredDataSize); - expect(collectionSub.limits.sponsoredDataRateLimit.unwrap().asBlocks.toNumber()).to.be.eq(limits.sponsoredDataRateLimit); - expect(collectionSub.limits.tokenLimit.unwrap().toNumber()).to.be.eq(limits.tokenLimit); - expect(collectionSub.limits.sponsorTransferTimeout.unwrap().toNumber()).to.be.eq(limits.sponsorTransferTimeout); - expect(collectionSub.limits.sponsorApproveTimeout.unwrap().toNumber()).to.be.eq(limits.sponsorApproveTimeout); - expect(collectionSub.limits.ownerCanTransfer.toHuman()).to.be.eq(limits.ownerCanTransfer); - expect(collectionSub.limits.ownerCanDestroy.toHuman()).to.be.eq(limits.ownerCanDestroy); - expect(collectionSub.limits.transfersEnabled.toHuman()).to.be.eq(limits.transfersEnabled); - }); - - itWeb3('Collection address exist', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionAddressForNonexistentCollection = '0x17C4E6453CC49AAAAEACA894E6D9683E00112233'; - const collectionHelpers = evmCollectionHelpers(web3, owner); - expect(await collectionHelpers.methods - .isCollectionExist(collectionAddressForNonexistentCollection).call()) - .to.be.false; - - const result = await collectionHelpers.methods.createNonfungibleCollection('Collection address exist', '7', '7').send(); - const {collectionIdAddress} = await getCollectionAddressFromResult(api, result); - expect(await collectionHelpers.methods - .isCollectionExist(collectionIdAddress).call()) - .to.be.true; - }); -}); - -describe('(!negative tests!) Create collection from EVM', () => { - itWeb3('(!negative test!) Create collection (bad lengths)', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const helper = evmCollectionHelpers(web3, owner); - { - const MAX_NAME_LENGHT = 64; - const collectionName = 'A'.repeat(MAX_NAME_LENGHT + 1); - const description = 'A'; - const tokenPrefix = 'A'; - - await expect(helper.methods - .createNonfungibleCollection(collectionName, description, tokenPrefix) - .call()).to.be.rejectedWith('name is too long. Max length is ' + MAX_NAME_LENGHT); - - } - { - const MAX_DESCRIPTION_LENGHT = 256; - const collectionName = 'A'; - const description = 'A'.repeat(MAX_DESCRIPTION_LENGHT + 1); - const tokenPrefix = 'A'; - await expect(helper.methods - .createNonfungibleCollection(collectionName, description, tokenPrefix) - .call()).to.be.rejectedWith('description is too long. Max length is ' + MAX_DESCRIPTION_LENGHT); - } - { - const MAX_TOKEN_PREFIX_LENGHT = 16; - const collectionName = 'A'; - const description = 'A'; - const tokenPrefix = 'A'.repeat(MAX_TOKEN_PREFIX_LENGHT + 1); - await expect(helper.methods - .createNonfungibleCollection(collectionName, description, tokenPrefix) - .call()).to.be.rejectedWith('token_prefix is too long. Max length is ' + MAX_TOKEN_PREFIX_LENGHT); - } - }); - - itWeb3('(!negative test!) Create collection (no funds)', async ({web3}) => { - const owner = await createEthAccount(web3); - const helper = evmCollectionHelpers(web3, owner); - const collectionName = 'A'; - const description = 'A'; - const tokenPrefix = 'A'; - - await expect(helper.methods - .createNonfungibleCollection(collectionName, description, tokenPrefix) - .call()).to.be.rejectedWith('NotSufficientFounds'); - }); - - itWeb3('(!negative test!) Check owner', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const notOwner = await createEthAccount(web3); - const collectionHelpers = evmCollectionHelpers(web3, owner); - const result = await collectionHelpers.methods.createNonfungibleCollection('A', 'A', 'A').send(); - const {collectionIdAddress} = await getCollectionAddressFromResult(api, result); - const contractEvmFromNotOwner = evmCollection(web3, notOwner, collectionIdAddress); - const EXPECTED_ERROR = 'NoPermission'; - { - const sponsor = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - await expect(contractEvmFromNotOwner.methods - .setCollectionSponsor(sponsor) - .call()).to.be.rejectedWith(EXPECTED_ERROR); - - const sponsorCollection = evmCollection(web3, sponsor, collectionIdAddress); - await expect(sponsorCollection.methods - .confirmCollectionSponsorship() - .call()).to.be.rejectedWith('caller is not set as sponsor'); - } - { - await expect(contractEvmFromNotOwner.methods - .setCollectionLimit('account_token_ownership_limit', '1000') - .call()).to.be.rejectedWith(EXPECTED_ERROR); - } - }); - - itWeb3('(!negative test!) Set limits', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelpers = evmCollectionHelpers(web3, owner); - const result = await collectionHelpers.methods.createNonfungibleCollection('Schema collection', 'A', 'A').send(); - const {collectionIdAddress} = await getCollectionAddressFromResult(api, result); - const collectionEvm = evmCollection(web3, owner, collectionIdAddress); - await expect(collectionEvm.methods - .setCollectionLimit('badLimit', 'true') - .call()).to.be.rejectedWith('unknown boolean limit "badLimit"'); - }); -}); \ No newline at end of file diff --git a/tests/src/eth/createNFTCollection.test.ts b/tests/src/eth/createNFTCollection.test.ts new file mode 100644 index 0000000000..c3df62101f --- /dev/null +++ b/tests/src/eth/createNFTCollection.test.ts @@ -0,0 +1,262 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {evmToAddress} from '@polkadot/util-crypto'; +import {IKeyringPair} from '@polkadot/types/types'; +import {expect, itEth, usingEthPlaygrounds} from './util'; + + +describe('Create NFT collection from EVM', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Create collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const name = 'CollectionEVM'; + const description = 'Some description'; + const prefix = 'token prefix'; + + const {collectionId} = await helper.eth.createNFTCollection(owner, name, description, prefix); + const data = (await helper.rft.getData(collectionId))!; + const collection = helper.nft.getCollectionObject(collectionId); + + expect(data.name).to.be.eq(name); + expect(data.description).to.be.eq(description); + expect(data.raw.tokenPrefix).to.be.eq(prefix); + expect(data.raw.mode).to.be.eq('NFT'); + + const options = await collection.getOptions(); + + expect(options.tokenPropertyPermissions).to.be.empty; + }); + + itEth('Create collection with properties', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const name = 'CollectionEVM'; + const description = 'Some description'; + const prefix = 'token prefix'; + const baseUri = 'BaseURI'; + + const {collectionId} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, name, description, prefix, baseUri); + + const collection = helper.nft.getCollectionObject(collectionId); + const data = (await collection.getData())!; + + expect(data.name).to.be.eq(name); + expect(data.description).to.be.eq(description); + expect(data.raw.tokenPrefix).to.be.eq(prefix); + expect(data.raw.mode).to.be.eq('NFT'); + + const options = await collection.getOptions(); + expect(options.tokenPropertyPermissions).to.be.deep.equal([ + { + key: 'URI', + permission: {mutable: true, collectionAdmin: true, tokenOwner: false}, + }, + { + key: 'URISuffix', + permission: {mutable: true, collectionAdmin: true, tokenOwner: false}, + }, + ]); + }); + + // this test will occasionally fail when in async environment. + itEth.skip('Check collection address exist', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const expectedCollectionId = +(await helper.callRpc('api.rpc.unique.collectionStats')).created + 1; + const expectedCollectionAddress = helper.ethAddress.fromCollectionId(expectedCollectionId); + const collectionHelpers = helper.ethNativeContract.collectionHelpers(owner); + + expect(await collectionHelpers.methods + .isCollectionExist(expectedCollectionAddress) + .call()).to.be.false; + + await collectionHelpers.methods + .createNFTCollection('A', 'A', 'A') + .send({value: Number(2n * helper.balance.getOneTokenNominal())}); + + expect(await collectionHelpers.methods + .isCollectionExist(expectedCollectionAddress) + .call()).to.be.true; + }); + + itEth('Set sponsorship', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const ss58Format = helper.chain.getChainProperties().ss58Format; + const {collectionId, collectionAddress} = await helper.eth.createNFTCollection(owner, 'Sponsor', 'absolutely anything', 'ROC'); + + const collection = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + await collection.methods.setCollectionSponsor(sponsor).send(); + + let data = (await helper.nft.getData(collectionId))!; + expect(data.raw.sponsorship.Unconfirmed).to.be.equal(evmToAddress(sponsor, Number(ss58Format))); + + await expect(collection.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); + + const sponsorCollection = helper.ethNativeContract.collection(collectionAddress, 'nft', sponsor); + await sponsorCollection.methods.confirmCollectionSponsorship().send(); + + data = (await helper.nft.getData(collectionId))!; + expect(data.raw.sponsorship.Confirmed).to.be.equal(evmToAddress(sponsor, Number(ss58Format))); + }); + + itEth('Set limits', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionId, collectionAddress} = await helper.eth.createNFTCollection(owner, 'Limits', 'absolutely anything', 'FLO'); + const limits = { + accountTokenOwnershipLimit: 1000, + sponsoredDataSize: 1024, + sponsoredDataRateLimit: 30, + tokenLimit: 1000000, + sponsorTransferTimeout: 6, + sponsorApproveTimeout: 6, + ownerCanTransfer: false, + ownerCanDestroy: false, + transfersEnabled: false, + }; + + const collection = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + await collection.methods['setCollectionLimit(string,uint32)']('accountTokenOwnershipLimit', limits.accountTokenOwnershipLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsoredDataSize', limits.sponsoredDataSize).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsoredDataRateLimit', limits.sponsoredDataRateLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('tokenLimit', limits.tokenLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsorTransferTimeout', limits.sponsorTransferTimeout).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsorApproveTimeout', limits.sponsorApproveTimeout).send(); + await collection.methods['setCollectionLimit(string,bool)']('ownerCanTransfer', limits.ownerCanTransfer).send(); + await collection.methods['setCollectionLimit(string,bool)']('ownerCanDestroy', limits.ownerCanDestroy).send(); + await collection.methods['setCollectionLimit(string,bool)']('transfersEnabled', limits.transfersEnabled).send(); + + const data = (await helper.nft.getData(collectionId))!; + expect(data.raw.limits.accountTokenOwnershipLimit).to.be.eq(limits.accountTokenOwnershipLimit); + expect(data.raw.limits.sponsoredDataSize).to.be.eq(limits.sponsoredDataSize); + expect(data.raw.limits.sponsoredDataRateLimit.blocks).to.be.eq(limits.sponsoredDataRateLimit); + expect(data.raw.limits.tokenLimit).to.be.eq(limits.tokenLimit); + expect(data.raw.limits.sponsorTransferTimeout).to.be.eq(limits.sponsorTransferTimeout); + expect(data.raw.limits.sponsorApproveTimeout).to.be.eq(limits.sponsorApproveTimeout); + expect(data.raw.limits.ownerCanTransfer).to.be.eq(limits.ownerCanTransfer); + expect(data.raw.limits.ownerCanDestroy).to.be.eq(limits.ownerCanDestroy); + expect(data.raw.limits.transfersEnabled).to.be.eq(limits.transfersEnabled); + }); + + itEth('Collection address exist', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionAddressForNonexistentCollection = '0x17C4E6453CC49AAAAEACA894E6D9683E00112233'; + expect(await helper.ethNativeContract.collectionHelpers(collectionAddressForNonexistentCollection) + .methods.isCollectionExist(collectionAddressForNonexistentCollection).call()) + .to.be.false; + + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'Exister', 'absolutely anything', 'EVC'); + expect(await helper.ethNativeContract.collectionHelpers(collectionAddress) + .methods.isCollectionExist(collectionAddress).call()) + .to.be.true; + }); +}); + +describe('(!negative tests!) Create NFT collection from EVM', () => { + let donor: IKeyringPair; + let nominal: bigint; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + nominal = helper.balance.getOneTokenNominal(); + }); + }); + + itEth('(!negative test!) Create collection (bad lengths)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(owner); + { + const MAX_NAME_LENGTH = 64; + const collectionName = 'A'.repeat(MAX_NAME_LENGTH + 1); + const description = 'A'; + const tokenPrefix = 'A'; + + await expect(collectionHelper.methods + .createNFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('name is too long. Max length is ' + MAX_NAME_LENGTH); + + } + { + const MAX_DESCRIPTION_LENGTH = 256; + const collectionName = 'A'; + const description = 'A'.repeat(MAX_DESCRIPTION_LENGTH + 1); + const tokenPrefix = 'A'; + await expect(collectionHelper.methods + .createNFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('description is too long. Max length is ' + MAX_DESCRIPTION_LENGTH); + } + { + const MAX_TOKEN_PREFIX_LENGTH = 16; + const collectionName = 'A'; + const description = 'A'; + const tokenPrefix = 'A'.repeat(MAX_TOKEN_PREFIX_LENGTH + 1); + await expect(collectionHelper.methods + .createNFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('token_prefix is too long. Max length is ' + MAX_TOKEN_PREFIX_LENGTH); + } + }); + + itEth('(!negative test!) Create collection (no funds)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(owner); + await expect(collectionHelper.methods + .createNFTCollection('Peasantry', 'absolutely anything', 'CVE') + .call({value: Number(1n * nominal)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + }); + + itEth('(!negative test!) Check owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const malfeasant = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'Transgressed', 'absolutely anything', 'COR'); + const malfeasantCollection = helper.ethNativeContract.collection(collectionAddress, 'nft', malfeasant); + const EXPECTED_ERROR = 'NoPermission'; + { + const sponsor = await helper.eth.createAccountWithBalance(donor); + await expect(malfeasantCollection.methods + .setCollectionSponsor(sponsor) + .call()).to.be.rejectedWith(EXPECTED_ERROR); + + const sponsorCollection = helper.ethNativeContract.collection(collectionAddress, 'nft', sponsor); + await expect(sponsorCollection.methods + .confirmCollectionSponsorship() + .call()).to.be.rejectedWith('caller is not set as sponsor'); + } + { + await expect(malfeasantCollection.methods + .setCollectionLimit('account_token_ownership_limit', '1000') + .call()).to.be.rejectedWith(EXPECTED_ERROR); + } + }); + + itEth('(!negative test!) Set limits', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createNFTCollection(owner, 'Limits', 'absolutely anything', 'OLF'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + await expect(collectionEvm.methods + .setCollectionLimit('badLimit', 'true') + .call()).to.be.rejectedWith('unknown boolean limit "badLimit"'); + }); +}); diff --git a/tests/src/eth/createRFTCollection.test.ts b/tests/src/eth/createRFTCollection.test.ts new file mode 100644 index 0000000000..a319d31bad --- /dev/null +++ b/tests/src/eth/createRFTCollection.test.ts @@ -0,0 +1,266 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {evmToAddress} from '@polkadot/util-crypto'; +import {IKeyringPair} from '@polkadot/types/types'; +import {Pallets, requirePalletsOrSkip} from '../util'; +import {expect, itEth, usingEthPlaygrounds} from './util'; + + +describe('Create RFT collection from EVM', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Create collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const name = 'CollectionEVM'; + const description = 'Some description'; + const prefix = 'token prefix'; + + const {collectionId} = await helper.eth.createRFTCollection(owner, name, description, prefix); + const data = (await helper.rft.getData(collectionId))!; + const collection = helper.rft.getCollectionObject(collectionId); + + expect(data.name).to.be.eq(name); + expect(data.description).to.be.eq(description); + expect(data.raw.tokenPrefix).to.be.eq(prefix); + expect(data.raw.mode).to.be.eq('ReFungible'); + + const options = await collection.getOptions(); + + expect(options.tokenPropertyPermissions).to.be.empty; + }); + + + + itEth('Create collection with properties', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const name = 'CollectionEVM'; + const description = 'Some description'; + const prefix = 'token prefix'; + const baseUri = 'BaseURI'; + + const {collectionId} = await helper.eth.createERC721MetadataCompatibleRFTCollection(owner, name, description, prefix, baseUri); + + const collection = helper.rft.getCollectionObject(collectionId); + const data = (await collection.getData())!; + + expect(data.name).to.be.eq(name); + expect(data.description).to.be.eq(description); + expect(data.raw.tokenPrefix).to.be.eq(prefix); + expect(data.raw.mode).to.be.eq('ReFungible'); + + const options = await collection.getOptions(); + expect(options.tokenPropertyPermissions).to.be.deep.equal([ + { + key: 'URI', + permission: {mutable: true, collectionAdmin: true, tokenOwner: false}, + }, + { + key: 'URISuffix', + permission: {mutable: true, collectionAdmin: true, tokenOwner: false}, + }, + ]); + }); + + // this test will occasionally fail when in async environment. + itEth.skip('Check collection address exist', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const expectedCollectionId = +(await helper.callRpc('api.rpc.unique.collectionStats')).created + 1; + const expectedCollectionAddress = helper.ethAddress.fromCollectionId(expectedCollectionId); + const collectionHelpers = helper.ethNativeContract.collectionHelpers(owner); + + expect(await collectionHelpers.methods + .isCollectionExist(expectedCollectionAddress) + .call()).to.be.false; + + await collectionHelpers.methods + .createRFTCollection('A', 'A', 'A') + .send({value: Number(2n * helper.balance.getOneTokenNominal())}); + + expect(await collectionHelpers.methods + .isCollectionExist(expectedCollectionAddress) + .call()).to.be.true; + }); + + itEth('Set sponsorship', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const ss58Format = helper.chain.getChainProperties().ss58Format; + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(owner, 'Sponsor', 'absolutely anything', 'ENVY'); + + const collection = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + await collection.methods.setCollectionSponsor(sponsor).send(); + + let data = (await helper.rft.getData(collectionId))!; + expect(data.raw.sponsorship.Unconfirmed).to.be.equal(evmToAddress(sponsor, Number(ss58Format))); + + await expect(collection.methods.confirmCollectionSponsorship().call()).to.be.rejectedWith('caller is not set as sponsor'); + + const sponsorCollection = helper.ethNativeContract.collection(collectionAddress, 'rft', sponsor); + await sponsorCollection.methods.confirmCollectionSponsorship().send(); + + data = (await helper.rft.getData(collectionId))!; + expect(data.raw.sponsorship.Confirmed).to.be.equal(evmToAddress(sponsor, Number(ss58Format))); + }); + + itEth('Set limits', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(owner, 'Limits', 'absolutely anything', 'INSI'); + const limits = { + accountTokenOwnershipLimit: 1000, + sponsoredDataSize: 1024, + sponsoredDataRateLimit: 30, + tokenLimit: 1000000, + sponsorTransferTimeout: 6, + sponsorApproveTimeout: 6, + ownerCanTransfer: false, + ownerCanDestroy: false, + transfersEnabled: false, + }; + + const collection = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + await collection.methods['setCollectionLimit(string,uint32)']('accountTokenOwnershipLimit', limits.accountTokenOwnershipLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsoredDataSize', limits.sponsoredDataSize).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsoredDataRateLimit', limits.sponsoredDataRateLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('tokenLimit', limits.tokenLimit).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsorTransferTimeout', limits.sponsorTransferTimeout).send(); + await collection.methods['setCollectionLimit(string,uint32)']('sponsorApproveTimeout', limits.sponsorApproveTimeout).send(); + await collection.methods['setCollectionLimit(string,bool)']('ownerCanTransfer', limits.ownerCanTransfer).send(); + await collection.methods['setCollectionLimit(string,bool)']('ownerCanDestroy', limits.ownerCanDestroy).send(); + await collection.methods['setCollectionLimit(string,bool)']('transfersEnabled', limits.transfersEnabled).send(); + + const data = (await helper.rft.getData(collectionId))!; + expect(data.raw.limits.accountTokenOwnershipLimit).to.be.eq(limits.accountTokenOwnershipLimit); + expect(data.raw.limits.sponsoredDataSize).to.be.eq(limits.sponsoredDataSize); + expect(data.raw.limits.sponsoredDataRateLimit.blocks).to.be.eq(limits.sponsoredDataRateLimit); + expect(data.raw.limits.tokenLimit).to.be.eq(limits.tokenLimit); + expect(data.raw.limits.sponsorTransferTimeout).to.be.eq(limits.sponsorTransferTimeout); + expect(data.raw.limits.sponsorApproveTimeout).to.be.eq(limits.sponsorApproveTimeout); + expect(data.raw.limits.ownerCanTransfer).to.be.eq(limits.ownerCanTransfer); + expect(data.raw.limits.ownerCanDestroy).to.be.eq(limits.ownerCanDestroy); + expect(data.raw.limits.transfersEnabled).to.be.eq(limits.transfersEnabled); + }); + + itEth('Collection address exist', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionAddressForNonexistentCollection = '0x17C4E6453CC49AAAAEACA894E6D9683E00112233'; + expect(await helper.ethNativeContract.collectionHelpers(collectionAddressForNonexistentCollection) + .methods.isCollectionExist(collectionAddressForNonexistentCollection).call()) + .to.be.false; + + const {collectionAddress} = await helper.eth.createRFTCollection(owner, 'Exister', 'absolutely anything', 'WIWT'); + expect(await helper.ethNativeContract.collectionHelpers(collectionAddress) + .methods.isCollectionExist(collectionAddress).call()) + .to.be.true; + }); +}); + +describe('(!negative tests!) Create RFT collection from EVM', () => { + let donor: IKeyringPair; + let nominal: bigint; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + donor = await privateKey({filename: __filename}); + nominal = helper.balance.getOneTokenNominal(); + }); + }); + + itEth('(!negative test!) Create collection (bad lengths)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(owner); + { + const MAX_NAME_LENGTH = 64; + const collectionName = 'A'.repeat(MAX_NAME_LENGTH + 1); + const description = 'A'; + const tokenPrefix = 'A'; + + await expect(collectionHelper.methods + .createRFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('name is too long. Max length is ' + MAX_NAME_LENGTH); + } + { + const MAX_DESCRIPTION_LENGTH = 256; + const collectionName = 'A'; + const description = 'A'.repeat(MAX_DESCRIPTION_LENGTH + 1); + const tokenPrefix = 'A'; + await expect(collectionHelper.methods + .createRFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('description is too long. Max length is ' + MAX_DESCRIPTION_LENGTH); + } + { + const MAX_TOKEN_PREFIX_LENGTH = 16; + const collectionName = 'A'; + const description = 'A'; + const tokenPrefix = 'A'.repeat(MAX_TOKEN_PREFIX_LENGTH + 1); + await expect(collectionHelper.methods + .createRFTCollection(collectionName, description, tokenPrefix) + .call({value: Number(2n * nominal)})).to.be.rejectedWith('token_prefix is too long. Max length is ' + MAX_TOKEN_PREFIX_LENGTH); + } + }); + + itEth('(!negative test!) Create collection (no funds)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(owner); + await expect(collectionHelper.methods + .createRFTCollection('Peasantry', 'absolutely anything', 'TWIW') + .call({value: Number(1n * nominal)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + }); + + itEth('(!negative test!) Check owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const peasant = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createRFTCollection(owner, 'Transgressed', 'absolutely anything', 'YVNE'); + const peasantCollection = helper.ethNativeContract.collection(collectionAddress, 'rft', peasant); + const EXPECTED_ERROR = 'NoPermission'; + { + const sponsor = await helper.eth.createAccountWithBalance(donor); + await expect(peasantCollection.methods + .setCollectionSponsor(sponsor) + .call()).to.be.rejectedWith(EXPECTED_ERROR); + + const sponsorCollection = helper.ethNativeContract.collection(collectionAddress, 'rft', sponsor); + await expect(sponsorCollection.methods + .confirmCollectionSponsorship() + .call()).to.be.rejectedWith('caller is not set as sponsor'); + } + { + await expect(peasantCollection.methods + .setCollectionLimit('account_token_ownership_limit', '1000') + .call()).to.be.rejectedWith(EXPECTED_ERROR); + } + }); + + itEth('(!negative test!) Set limits', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createRFTCollection(owner, 'Limits', 'absolutely anything', 'ISNI'); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + await expect(collectionEvm.methods + .setCollectionLimit('badLimit', 'true') + .call()).to.be.rejectedWith('unknown boolean limit "badLimit"'); + }); +}); diff --git a/tests/src/eth/crossTransfer.test.ts b/tests/src/eth/crossTransfer.test.ts index d60c8f635b..e515db5752 100644 --- a/tests/src/eth/crossTransfer.test.ts +++ b/tests/src/eth/crossTransfer.test.ts @@ -14,90 +14,91 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {createCollectionExpectSuccess, - createFungibleItemExpectSuccess, - transferExpectSuccess, - transferFromExpectSuccess, - setCollectionLimitsExpectSuccess, - createItemExpectSuccess} from '../util/helpers'; -import {collectionIdToAddress, - createEthAccountWithBalance, - subToEth, - GAS_ARGS, itWeb3} from './util/helpers'; -import fungibleAbi from './fungibleAbi.json'; -import nonFungibleAbi from './nonFungibleAbi.json'; +import {itEth, usingEthPlaygrounds} from './util'; +import {CrossAccountId} from '../util/playgrounds/unique'; +import {IKeyringPair} from '@polkadot/types/types'; describe('Token transfer between substrate address and EVM address. Fungible', () => { - itWeb3('The private key X create a substrate address. Alice sends a token to the corresponding EVM address, and X can send it to Bob in the substrate', async ({privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await setCollectionLimitsExpectSuccess(alice, collection, {ownerCanTransfer: true}); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Substrate: alice.address}); - await transferExpectSuccess(collection, 0, alice, {Ethereum: subToEth(charlie.address)} , 200, 'Fungible'); - await transferFromExpectSuccess(collection, 0, alice, {Ethereum: subToEth(charlie.address)}, charlie, 50, 'Fungible'); - await transferExpectSuccess(collection, 0, charlie, bob, 50, 'Fungible'); }); + + itEth('The private key X create a substrate address. Alice sends a token to the corresponding EVM address, and X can send it to Bob in the substrate', async ({helper}) => { + const bobCA = CrossAccountId.fromKeyring(bob); + const charlieCA = CrossAccountId.fromKeyring(charlie); - itWeb3('The private key X create a EVM address. Alice sends a token to the substrate address corresponding to this EVM address, and X can send it to Bob in the EVM', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - await setCollectionLimitsExpectSuccess(alice, collection, {ownerCanTransfer: true}); - const bobProxy = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const aliceProxy = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const collection = await helper.ft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); + + await collection.mint(alice, 200n); + await collection.transfer(alice, charlieCA.toEthereum(), 200n); + await collection.transferFrom(alice, charlieCA.toEthereum(), charlieCA, 50n); + await collection.transfer(charlie, bobCA, 50n); + }); + + itEth('The private key X create a EVM address. Alice sends a token to the substrate address corresponding to this EVM address, and X can send it to Bob in the EVM', async ({helper}) => { + const aliceProxy = await helper.eth.createAccountWithBalance(donor); + const bobProxy = await helper.eth.createAccountWithBalance(donor); + + const collection = await helper.ft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, alice.address); - await transferExpectSuccess(collection, 0, alice, {Ethereum: aliceProxy} , 200, 'Fungible'); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: aliceProxy, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'ft', aliceProxy); + await collection.mint(alice, 200n, {Ethereum: aliceProxy}); await contract.methods.transfer(bobProxy, 50).send({from: aliceProxy}); - await transferFromExpectSuccess(collection, 0, alice, {Ethereum: bobProxy}, bob, 50, 'Fungible'); - await transferExpectSuccess(collection, 0, bob, alice, 50, 'Fungible'); + await collection.transferFrom(alice, {Ethereum: bobProxy}, CrossAccountId.fromKeyring(bob), 50n); + await collection.transfer(bob, CrossAccountId.fromKeyring(alice), 50n); }); }); describe('Token transfer between substrate address and EVM address. NFT', () => { - itWeb3('The private key X create a substrate address. Alice sends a token to the corresponding EVM address, and X can send it to Bob in the substrate', async ({privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'NFT'}, + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await setCollectionLimitsExpectSuccess(alice, collection, {ownerCanTransfer: true}); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: alice.address}); - await transferExpectSuccess(collection, tokenId, alice, {Ethereum: subToEth(charlie.address)}, 1, 'NFT'); - await transferFromExpectSuccess(collection, tokenId, alice, {Ethereum: subToEth(charlie.address)}, charlie, 1, 'NFT'); - await transferExpectSuccess(collection, tokenId, charlie, bob, 1, 'NFT'); + }); + + itEth('The private key X create a substrate address. Alice sends a token to the corresponding EVM address, and X can send it to Bob in the substrate', async ({helper}) => { + const charlieEth = CrossAccountId.fromKeyring(charlie, 'Ethereum'); + + const collection = await helper.nft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); + const token = await collection.mintToken(alice); + await token.transfer(alice, charlieEth); + await token.transferFrom(alice, charlieEth, CrossAccountId.fromKeyring(charlie)); + await token.transfer(charlie, CrossAccountId.fromKeyring(bob)); }); - itWeb3('The private key X create a EVM address. Alice sends a token to the substrate address corresponding to this EVM address, and X can send it to Bob in the EVM', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - await setCollectionLimitsExpectSuccess(alice, collection, {ownerCanTransfer: true}); - const bobProxy = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const aliceProxy = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: alice.address}); - await transferExpectSuccess(collection, tokenId, alice, {Ethereum: aliceProxy} , 1, 'NFT'); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: aliceProxy, ...GAS_ARGS}); + itEth('The private key X create a EVM address. Alice sends a token to the substrate address corresponding to this EVM address, and X can send it to Bob in the EVM', async ({helper}) => { + const aliceProxy = await helper.eth.createAccountWithBalance(donor); + const bobProxy = await helper.eth.createAccountWithBalance(donor); + + const collection = await helper.nft.mintCollection(alice); + await collection.setLimits(alice, {ownerCanTransfer: true}); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', aliceProxy); + + const token = await collection.mintToken(alice); + await token.transfer(alice, {Ethereum: aliceProxy}); await contract.methods.transfer(bobProxy, 1).send({from: aliceProxy}); - await transferFromExpectSuccess(collection, tokenId, alice, {Ethereum: bobProxy}, bob, 1, 'NFT'); - await transferExpectSuccess(collection, tokenId, bob, charlie, 1, 'NFT'); + await token.transferFrom(alice, {Ethereum: bobProxy}, {Substrate: bob.address}); + await token.transfer(bob, {Substrate: charlie.address}); }); }); diff --git a/tests/src/eth/evmCoder.test.ts b/tests/src/eth/evmCoder.test.ts new file mode 100644 index 0000000000..24cdf6baf7 --- /dev/null +++ b/tests/src/eth/evmCoder.test.ts @@ -0,0 +1,90 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itEth, expect, usingEthPlaygrounds} from './util'; + +const getContractSource = (collectionAddress: string, contractAddress: string): string => { + return ` + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + interface ITest { + function ztestzzzzzzz() external returns (uint256 n); + } + contract Test { + event Result(bool, uint256); + function test1() public { + try + ITest(${collectionAddress}).ztestzzzzzzz() + returns (uint256 n) { + // enters + emit Result(true, n); // => [true, BigNumber { value: "43648854190028290368124427828690944273759144372138548774646036134290060795932" }] + } catch { + emit Result(false, 0); + } + } + function test2() public { + try + ITest(${contractAddress}).ztestzzzzzzz() + returns (uint256 n) { + emit Result(true, n); + } catch { + // enters + emit Result(false, 0); // => [ false, BigNumber { value: "0" } ] + } + } + function test3() public { + ITest(${collectionAddress}).ztestzzzzzzz(); + } + } + `; +}; + + +describe('Evm Coder tests', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Call non-existing function', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.eth.createNFTCollection(owner, 'EVMCODER', '', 'TEST'); + const contract = await helper.ethContract.deployByCode(owner, 'Test', getContractSource(collection.collectionAddress, '0x1bfed5D614b886b9Ab2eA4CBAc22A96B7EC29c9c')); + const testContract = await helper.ethContract.deployByCode(owner, 'Test', getContractSource(collection.collectionAddress, contract.options.address)); + { + const result = await testContract.methods.test1().send(); + expect(result.events.Result.returnValues).to.deep.equal({ + '0': false, + '1': '0', + }); + } + { + const result = await testContract.methods.test2().send(); + expect(result.events.Result.returnValues).to.deep.equal({ + '0': false, + '1': '0', + }); + } + { + await expect(testContract.methods.test3().call()) + .to.be.rejectedWith(/unrecognized selector: 0xd9f02b36$/g); + } + }); +}); diff --git a/tests/src/eth/fractionalizer/Fractionalizer.sol b/tests/src/eth/fractionalizer/Fractionalizer.sol new file mode 100644 index 0000000000..bc40e4219a --- /dev/null +++ b/tests/src/eth/fractionalizer/Fractionalizer.sol @@ -0,0 +1,168 @@ +// SPDX-License-Identifier: Apache License +pragma solidity >=0.8.0; +import {CollectionHelpers} from "../api/CollectionHelpers.sol"; +import {ContractHelpers} from "../api/ContractHelpers.sol"; +import {UniqueRefungibleToken} from "../api/UniqueRefungibleToken.sol"; +import {UniqueRefungible} from "../api/UniqueRefungible.sol"; +import {UniqueNFT} from "../api/UniqueNFT.sol"; + +/// @dev Fractionalization contract. It stores mappings between NFT and RFT tokens, +/// stores allowlist of NFT tokens available for fractionalization, has methods +/// for fractionalization and defractionalization of NFT tokens. +contract Fractionalizer { + struct Token { + address _collection; + uint256 _tokenId; + } + address rftCollection; + mapping(address => bool) nftCollectionAllowList; + mapping(address => mapping(uint256 => uint256)) public nft2rftMapping; + mapping(address => Token) public rft2nftMapping; + //use constant to reduce gas cost + bytes32 constant refungibleCollectionType = keccak256(bytes("ReFungible")); + + receive() external payable onlyOwner {} + + /// @dev Method modifier to only allow contract owner to call it. + modifier onlyOwner() { + address contracthelpersAddress = 0x842899ECF380553E8a4de75bF534cdf6fBF64049; + ContractHelpers contractHelpers = ContractHelpers(contracthelpersAddress); + address contractOwner = contractHelpers.contractOwner(address(this)); + require(msg.sender == contractOwner, "Only owner can"); + _; + } + + /// @dev This emits when RFT collection setting is changed. + event RFTCollectionSet(address _collection); + + /// @dev This emits when NFT collection is allowed or disallowed. + event AllowListSet(address _collection, bool _status); + + /// @dev This emits when NFT token is fractionalized by contract. + event Fractionalized(address _collection, uint256 _tokenId, address _rftToken, uint128 _amount); + + /// @dev This emits when NFT token is defractionalized by contract. + event Defractionalized(address _rftToken, address _nftCollection, uint256 _nftTokenId); + + /// Set RFT collection that contract will work with. RFT tokens for fractionalized NFT tokens + /// would be created in this collection. + /// @dev Throws if RFT collection is already configured for this contract. + /// Throws if collection of wrong type (NFT, Fungible) is provided instead + /// of RFT collection. + /// Throws if `msg.sender` is not owner or admin of provided RFT collection. + /// Can only be called by contract owner. + /// @param _collection address of RFT collection. + function setRFTCollection(address _collection) external onlyOwner { + require(rftCollection == address(0), "RFT collection is already set"); + UniqueRefungible refungibleContract = UniqueRefungible(_collection); + string memory collectionType = refungibleContract.uniqueCollectionType(); + + // compare hashed to reduce gas cost + require( + keccak256(bytes(collectionType)) == refungibleCollectionType, + "Wrong collection type. Collection is not refungible." + ); + require( + refungibleContract.isOwnerOrAdmin(address(this)), + "Fractionalizer contract should be an admin of the collection" + ); + rftCollection = _collection; + emit RFTCollectionSet(rftCollection); + } + + /// Creates and sets RFT collection that contract will work with. RFT tokens for fractionalized NFT tokens + /// would be created in this collection. + /// @dev Throws if RFT collection is already configured for this contract. + /// Can only be called by contract owner. + /// @param _name name for created RFT collection. + /// @param _description description for created RFT collection. + /// @param _tokenPrefix token prefix for created RFT collection. + function createAndSetRFTCollection( + string calldata _name, + string calldata _description, + string calldata _tokenPrefix + ) external payable onlyOwner { + require(rftCollection == address(0), "RFT collection is already set"); + address collectionHelpers = 0x6C4E9fE1AE37a41E93CEE429e8E1881aBdcbb54F; + rftCollection = CollectionHelpers(collectionHelpers).createRFTCollection{value: msg.value}(_name, _description, _tokenPrefix); + emit RFTCollectionSet(rftCollection); + } + + /// Allow or disallow NFT collection tokens from being fractionalized by this contract. + /// @dev Can only be called by contract owner. + /// @param collection NFT token address. + /// @param status `true` to allow and `false` to disallow NFT token. + function setNftCollectionIsAllowed(address collection, bool status) external onlyOwner { + nftCollectionAllowList[collection] = status; + emit AllowListSet(collection, status); + } + + /// Fractionilize NFT token. + /// @dev Takes NFT token from `msg.sender` and transfers RFT token to `msg.sender` + /// instead. Creates new RFT token if provided NFT token never was fractionalized + /// by this contract or existing RFT token if it was. + /// Throws if RFT collection isn't configured for this contract. + /// Throws if fractionalization of provided NFT token is not allowed + /// Throws if `msg.sender` is not owner of provided NFT token + /// @param _collection NFT collection address + /// @param _token id of NFT token to be fractionalized + /// @param _pieces number of pieces new RFT token would have + function nft2rft( + address _collection, + uint256 _token, + uint128 _pieces + ) external { + require(rftCollection != address(0), "RFT collection is not set"); + UniqueRefungible rftCollectionContract = UniqueRefungible(rftCollection); + require( + nftCollectionAllowList[_collection] == true, + "Fractionalization of this collection is not allowed by admin" + ); + require(UniqueNFT(_collection).ownerOf(_token) == msg.sender, "Only token owner could fractionalize it"); + UniqueNFT(_collection).transferFrom(msg.sender, address(this), _token); + uint256 rftTokenId; + address rftTokenAddress; + UniqueRefungibleToken rftTokenContract; + if (nft2rftMapping[_collection][_token] == 0) { + rftTokenId = rftCollectionContract.mint(address(this)); + rftTokenAddress = rftCollectionContract.tokenContractAddress(rftTokenId); + nft2rftMapping[_collection][_token] = rftTokenId; + rft2nftMapping[rftTokenAddress] = Token(_collection, _token); + + rftTokenContract = UniqueRefungibleToken(rftTokenAddress); + } else { + rftTokenId = nft2rftMapping[_collection][_token]; + rftTokenAddress = rftCollectionContract.tokenContractAddress(rftTokenId); + rftTokenContract = UniqueRefungibleToken(rftTokenAddress); + } + rftTokenContract.repartition(_pieces); + rftTokenContract.transfer(msg.sender, _pieces); + emit Fractionalized(_collection, _token, rftTokenAddress, _pieces); + } + + /// Defrationalize NFT token. + /// @dev Takes RFT token from `msg.sender` and transfers corresponding NFT token + /// to `msg.sender` instead. + /// Throws if RFT collection isn't configured for this contract. + /// Throws if provided RFT token is no from configured RFT collection. + /// Throws if RFT token was not created by this contract. + /// Throws if `msg.sender` isn't owner of all RFT token pieces. + /// @param _collection RFT collection address + /// @param _token id of RFT token + function rft2nft(address _collection, uint256 _token) external { + require(rftCollection != address(0), "RFT collection is not set"); + require(rftCollection == _collection, "Wrong RFT collection"); + UniqueRefungible rftCollectionContract = UniqueRefungible(rftCollection); + address rftTokenAddress = rftCollectionContract.tokenContractAddress(_token); + Token memory nftToken = rft2nftMapping[rftTokenAddress]; + require(nftToken._collection != address(0), "No corresponding NFT token found"); + UniqueRefungibleToken rftTokenContract = UniqueRefungibleToken(rftTokenAddress); + require( + rftTokenContract.balanceOf(msg.sender) == rftTokenContract.totalSupply(), + "Not all pieces are owned by the caller" + ); + rftCollectionContract.transferFrom(msg.sender, address(this), _token); + UniqueNFT(nftToken._collection).transferFrom(address(this), msg.sender, nftToken._tokenId); + emit Defractionalized(rftTokenAddress, nftToken._collection, nftToken._tokenId); + } +} diff --git a/tests/src/eth/fractionalizer/fractionalizer.test.ts b/tests/src/eth/fractionalizer/fractionalizer.test.ts new file mode 100644 index 0000000000..13186564cf --- /dev/null +++ b/tests/src/eth/fractionalizer/fractionalizer.test.ts @@ -0,0 +1,446 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + + +import {readFile} from 'fs/promises'; + +import {IKeyringPair} from '@polkadot/types/types'; +import {evmToAddress} from '@polkadot/util-crypto'; + +import {Contract} from 'web3-eth-contract'; + +import {usingEthPlaygrounds, expect, itEth, EthUniqueHelper} from '../util'; +import {CompiledContract} from '../util/playgrounds/types'; +import {requirePalletsOrSkip, Pallets} from '../../util'; + + +let compiledFractionalizer: CompiledContract; + +const compileContract = async (helper: EthUniqueHelper): Promise => { + if(!compiledFractionalizer) { + compiledFractionalizer = await helper.ethContract.compile('Fractionalizer', (await readFile(`${__dirname}/Fractionalizer.sol`)).toString(), [ + {solPath: 'api/CollectionHelpers.sol', fsPath: `${__dirname}/../api/CollectionHelpers.sol`}, + {solPath: 'api/ContractHelpers.sol', fsPath: `${__dirname}/../api/ContractHelpers.sol`}, + {solPath: 'api/UniqueRefungibleToken.sol', fsPath: `${__dirname}/../api/UniqueRefungibleToken.sol`}, + {solPath: 'api/UniqueRefungible.sol', fsPath: `${__dirname}/../api/UniqueRefungible.sol`}, + {solPath: 'api/UniqueNFT.sol', fsPath: `${__dirname}/../api/UniqueNFT.sol`}, + ]); + } + return compiledFractionalizer; +}; + + +const deployContract = async (helper: EthUniqueHelper, owner: string): Promise => { + const compiled = await compileContract(helper); + return await helper.ethContract.deployByAbi(owner, compiled.abi, compiled.object); +}; + + +const initContract = async (helper: EthUniqueHelper, owner: string): Promise<{contract: Contract, rftCollectionAddress: string}> => { + const fractionalizer = await deployContract(helper, owner); + const amount = 10n * helper.balance.getOneTokenNominal(); + const web3 = helper.getWeb3(); + await web3.eth.sendTransaction({from: owner, to: fractionalizer.options.address, value: `${amount}`, gas: helper.eth.DEFAULT_GAS}); + const result = await fractionalizer.methods.createAndSetRFTCollection('A', 'B', 'C').send({value: Number(2n * helper.balance.getOneTokenNominal())}); + const rftCollectionAddress = result.events.RFTCollectionSet.returnValues._collection; + return {contract: fractionalizer, rftCollectionAddress}; +}; + +const mintRFTToken = async (helper: EthUniqueHelper, owner: string, fractionalizer: Contract, amount: bigint): Promise<{ + nftCollectionAddress: string, nftTokenId: number, rftTokenAddress: string +}> => { + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + await nftContract.methods.approve(fractionalizer.options.address, nftTokenId).send({from: owner}); + const result = await fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, amount).send({from: owner}); + const {_collection, _tokenId, _rftToken} = result.events.Fractionalized.returnValues; + return { + nftCollectionAddress: _collection, + nftTokenId: _tokenId, + rftTokenAddress: _rftToken, + }; +}; + + +describe('Fractionalizer contract usage', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper: EthUniqueHelper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Set RFT collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 10n); + const fractionalizer = await deployContract(helper, owner); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + const rftContract = helper.ethNativeContract.collection(rftCollection.collectionAddress, 'rft', owner); + + await rftContract.methods.addCollectionAdmin(fractionalizer.options.address).send({from: owner}); + const result = await fractionalizer.methods.setRFTCollection(rftCollection.collectionAddress).send({from: owner}); + expect(result.events).to.be.like({ + RFTCollectionSet: { + returnValues: { + _collection: rftCollection.collectionAddress, + }, + }, + }); + }); + + itEth('Mint RFT collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 10n); + const fractionalizer = await deployContract(helper, owner); + await helper.balance.transferToSubstrate(donor, evmToAddress(fractionalizer.options.address), 10n * helper.balance.getOneTokenNominal()); + + const result = await fractionalizer.methods.createAndSetRFTCollection('A', 'B', 'C').send({from: owner, value: Number(2n * helper.balance.getOneTokenNominal())}); + expect(result.events).to.be.like({ + RFTCollectionSet: {}, + }); + expect(result.events.RFTCollectionSet.returnValues._collection).to.be.ok; + }); + + itEth('Set Allowlist', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const {contract: fractionalizer} = await initContract(helper, owner); + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + + const result1 = await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + expect(result1.events).to.be.like({ + AllowListSet: { + returnValues: { + _collection: nftCollection.collectionAddress, + _status: true, + }, + }, + }); + const result2 = await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, false).send({from: owner}); + expect(result2.events).to.be.like({ + AllowListSet: { + returnValues: { + _collection: nftCollection.collectionAddress, + _status: false, + }, + }, + }); + }); + + itEth('NFT to RFT', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + const {contract: fractionalizer} = await initContract(helper, owner); + + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + await nftContract.methods.approve(fractionalizer.options.address, nftTokenId).send({from: owner}); + const result = await fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100).send({from: owner}); + expect(result.events).to.be.like({ + Fractionalized: { + returnValues: { + _collection: nftCollection.collectionAddress, + _tokenId: nftTokenId, + _amount: '100', + }, + }, + }); + const rftTokenAddress = result.events.Fractionalized.returnValues._rftToken; + + const rftTokenContract = helper.ethNativeContract.rftToken(rftTokenAddress); + expect(await rftTokenContract.methods.balanceOf(owner).call()).to.equal('100'); + }); + + itEth('RFT to NFT', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const {contract: fractionalizer, rftCollectionAddress} = await initContract(helper, owner); + const {rftTokenAddress, nftCollectionAddress, nftTokenId} = await mintRFTToken(helper, owner, fractionalizer, 100n); + + const {collectionId, tokenId} = helper.ethAddress.extractTokenId(rftTokenAddress); + const refungibleAddress = helper.ethAddress.fromCollectionId(collectionId); + expect(rftCollectionAddress).to.be.equal(refungibleAddress); + const refungibleTokenContract = helper.ethNativeContract.rftToken(rftTokenAddress, owner); + await refungibleTokenContract.methods.approve(fractionalizer.options.address, 100).send({from: owner}); + const result = await fractionalizer.methods.rft2nft(refungibleAddress, tokenId).send({from: owner}); + expect(result.events).to.be.like({ + Defractionalized: { + returnValues: { + _rftToken: rftTokenAddress, + _nftCollection: nftCollectionAddress, + _nftTokenId: nftTokenId, + }, + }, + }); + }); + + itEth('Test fractionalizer NFT <-> RFT mapping ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const {contract: fractionalizer, rftCollectionAddress} = await initContract(helper, owner); + const {rftTokenAddress, nftCollectionAddress, nftTokenId} = await mintRFTToken(helper, owner, fractionalizer, 100n); + + const {collectionId, tokenId} = helper.ethAddress.extractTokenId(rftTokenAddress); + const refungibleAddress = helper.ethAddress.fromCollectionId(collectionId); + expect(rftCollectionAddress).to.be.equal(refungibleAddress); + const refungibleTokenContract = helper.ethNativeContract.rftToken(rftTokenAddress, owner); + await refungibleTokenContract.methods.approve(fractionalizer.options.address, 100).send({from: owner}); + + const rft2nft = await fractionalizer.methods.rft2nftMapping(rftTokenAddress).call(); + expect(rft2nft).to.be.like({ + _collection: nftCollectionAddress, + _tokenId: nftTokenId, + }); + + const nft2rft = await fractionalizer.methods.nft2rftMapping(nftCollectionAddress, nftTokenId).call(); + expect(nft2rft).to.be.eq(tokenId.toString()); + }); +}); + + + +describe('Negative Integration Tests for fractionalizer', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper: EthUniqueHelper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('call setRFTCollection twice', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + const refungibleContract = helper.ethNativeContract.collection(rftCollection.collectionAddress, 'rft', owner); + + const fractionalizer = await deployContract(helper, owner); + await refungibleContract.methods.addCollectionAdmin(fractionalizer.options.address).send({from: owner}); + await fractionalizer.methods.setRFTCollection(rftCollection.collectionAddress).send({from: owner}); + + await expect(fractionalizer.methods.setRFTCollection(rftCollection.collectionAddress).call()) + .to.be.rejectedWith(/RFT collection is already set$/g); + }); + + itEth('call setRFTCollection with NFT collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + + const fractionalizer = await deployContract(helper, owner); + await nftContract.methods.addCollectionAdmin(fractionalizer.options.address).send({from: owner}); + + await expect(fractionalizer.methods.setRFTCollection(nftCollection.collectionAddress).call()) + .to.be.rejectedWith(/Wrong collection type. Collection is not refungible.$/g); + }); + + itEth('call setRFTCollection while not collection admin', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const fractionalizer = await deployContract(helper, owner); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + + await expect(fractionalizer.methods.setRFTCollection(rftCollection.collectionAddress).call()) + .to.be.rejectedWith(/Fractionalizer contract should be an admin of the collection$/g); + }); + + itEth('call setRFTCollection after createAndSetRFTCollection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const fractionalizer = await deployContract(helper, owner); + await helper.balance.transferToSubstrate(donor, evmToAddress(fractionalizer.options.address), 10n * helper.balance.getOneTokenNominal()); + + const result = await fractionalizer.methods.createAndSetRFTCollection('A', 'B', 'C').send({from: owner, value: Number(2n * helper.balance.getOneTokenNominal())}); + const collectionIdAddress = result.events.RFTCollectionSet.returnValues._collection; + + await expect(fractionalizer.methods.setRFTCollection(collectionIdAddress).call()) + .to.be.rejectedWith(/RFT collection is already set$/g); + }); + + itEth('call nft2rft without setting RFT collection for contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + const fractionalizer = await deployContract(helper, owner); + + await expect(fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100).call()) + .to.be.rejectedWith(/RFT collection is not set$/g); + }); + + itEth('call nft2rft while not owner of NFT token', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const nftOwner = await helper.eth.createAccountWithBalance(donor, 10n); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + await nftContract.methods.transfer(nftOwner, 1).send({from: owner}); + + + const {contract: fractionalizer} = await initContract(helper, owner); + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + + await expect(fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100).call({from: owner})) + .to.be.rejectedWith(/Only token owner could fractionalize it$/g); + }); + + itEth('call nft2rft while not in list of allowed accounts', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + const {contract: fractionalizer} = await initContract(helper, owner); + + await nftContract.methods.approve(fractionalizer.options.address, nftTokenId).send({from: owner}); + await expect(fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100).call()) + .to.be.rejectedWith(/Fractionalization of this collection is not allowed by admin$/g); + }); + + itEth('call nft2rft while fractionalizer doesnt have approval for nft token', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + const {contract: fractionalizer} = await initContract(helper, owner); + + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + await expect(fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100).call()) + .to.be.rejectedWith(/ApprovedValueTooLow$/g); + }); + + itEth('call rft2nft without setting RFT collection for contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const fractionalizer = await deployContract(helper, owner); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + const refungibleContract = helper.ethNativeContract.collection(rftCollection.collectionAddress, 'rft', owner); + const mintResult = await refungibleContract.methods.mint(owner).send({from: owner}); + const rftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + await expect(fractionalizer.methods.rft2nft(rftCollection.collectionAddress, rftTokenId).call({from: owner})) + .to.be.rejectedWith(/RFT collection is not set$/g); + }); + + itEth('call rft2nft for RFT token that is not from configured RFT collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const {contract: fractionalizer} = await initContract(helper, owner); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + const refungibleContract = helper.ethNativeContract.collection(rftCollection.collectionAddress, 'rft', owner); + const mintResult = await refungibleContract.methods.mint(owner).send({from: owner}); + const rftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + await expect(fractionalizer.methods.rft2nft(rftCollection.collectionAddress, rftTokenId).call()) + .to.be.rejectedWith(/Wrong RFT collection$/g); + }); + + itEth('call rft2nft for RFT token that was not minted by fractionalizer contract', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const rftCollection = await helper.eth.createRFTCollection(owner, 'rft', 'RFT collection', 'RFT'); + const refungibleContract = helper.ethNativeContract.collection(rftCollection.collectionAddress, 'rft', owner); + + const fractionalizer = await deployContract(helper, owner); + + await refungibleContract.methods.addCollectionAdmin(fractionalizer.options.address).send({from: owner}); + await fractionalizer.methods.setRFTCollection(rftCollection.collectionAddress).send({from: owner}); + + const mintResult = await refungibleContract.methods.mint(owner).send({from: owner}); + const rftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + await expect(fractionalizer.methods.rft2nft(rftCollection.collectionAddress, rftTokenId).call()) + .to.be.rejectedWith(/No corresponding NFT token found$/g); + }); + + itEth('call rft2nft without owning all RFT pieces', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const receiver = await helper.eth.createAccountWithBalance(donor, 10n); + + const {contract: fractionalizer, rftCollectionAddress} = await initContract(helper, owner); + const {rftTokenAddress} = await mintRFTToken(helper, owner, fractionalizer, 100n); + + const {tokenId} = helper.ethAddress.extractTokenId(rftTokenAddress); + const refungibleTokenContract = helper.ethNativeContract.rftToken(rftTokenAddress, owner); + await refungibleTokenContract.methods.transfer(receiver, 50).send({from: owner}); + await refungibleTokenContract.methods.approve(fractionalizer.options.address, 50).send({from: receiver}); + await expect(fractionalizer.methods.rft2nft(rftCollectionAddress, tokenId).call({from: receiver})) + .to.be.rejectedWith(/Not all pieces are owned by the caller$/g); + }); + + itEth('send QTZ/UNQ to contract from non owner', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const payer = await helper.eth.createAccountWithBalance(donor, 10n); + + const fractionalizer = await deployContract(helper, owner); + const amount = 10n * helper.balance.getOneTokenNominal(); + const web3 = helper.getWeb3(); + await expect(web3.eth.sendTransaction({from: payer, to: fractionalizer.options.address, value: `${amount}`, gas: helper.eth.DEFAULT_GAS})).to.be.rejected; + }); + + itEth('fractionalize NFT with NFT transfers disallowed', async ({helper}) => { + const nftCollection = await helper.nft.mintCollection(donor, {name: 'A', description: 'B', tokenPrefix: 'C'}); + + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + const nftToken = await nftCollection.mintToken(donor, {Ethereum: owner}); + await helper.executeExtrinsic(donor, 'api.tx.unique.setTransfersEnabledFlag', [nftCollection.collectionId, false], true); + const nftCollectionAddress = helper.ethAddress.fromCollectionId(nftCollection.collectionId); + const {contract: fractionalizer} = await initContract(helper, owner); + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollectionAddress, true).send({from: owner}); + + const nftContract = helper.ethNativeContract.collection(nftCollectionAddress, 'nft', owner); + await nftContract.methods.approve(fractionalizer.options.address, nftToken.tokenId).send({from: owner}); + await expect(fractionalizer.methods.nft2rft(nftCollectionAddress, nftToken.tokenId, 100).call()) + .to.be.rejectedWith(/TransferNotAllowed$/g); + }); + + itEth('fractionalize NFT with RFT transfers disallowed', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor, 20n); + + const rftCollection = await helper.rft.mintCollection(donor, {name: 'A', description: 'B', tokenPrefix: 'C'}); + const rftCollectionAddress = helper.ethAddress.fromCollectionId(rftCollection.collectionId); + const fractionalizer = await deployContract(helper, owner); + await rftCollection.addAdmin(donor, {Ethereum: fractionalizer.options.address}); + + await fractionalizer.methods.setRFTCollection(rftCollectionAddress).send({from: owner}); + await helper.executeExtrinsic(donor, 'api.tx.unique.setTransfersEnabledFlag', [rftCollection.collectionId, false], true); + + const nftCollection = await helper.eth.createNFTCollection(owner, 'nft', 'NFT collection', 'NFT'); + const nftContract = helper.ethNativeContract.collection(nftCollection.collectionAddress, 'nft', owner); + const mintResult = await nftContract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintResult.events.Transfer.returnValues.tokenId; + + await fractionalizer.methods.setNftCollectionIsAllowed(nftCollection.collectionAddress, true).send({from: owner}); + await nftContract.methods.approve(fractionalizer.options.address, nftTokenId).send({from: owner}); + + await expect(fractionalizer.methods.nft2rft(nftCollection.collectionAddress, nftTokenId, 100n).call()) + .to.be.rejectedWith(/TransferNotAllowed$/g); + }); +}); diff --git a/tests/src/eth/fungible.test.ts b/tests/src/eth/fungible.test.ts index 7acd39cd10..ce67e3bf03 100644 --- a/tests/src/eth/fungible.test.ts +++ b/tests/src/eth/fungible.test.ts @@ -14,81 +14,132 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {approveExpectSuccess, createCollectionExpectSuccess, createFungibleItemExpectSuccess, transferExpectSuccess, transferFromExpectSuccess, UNIQUE} from '../util/helpers'; -import {collectionIdToAddress, createEthAccount, createEthAccountWithBalance, GAS_ARGS, itWeb3, normalizeEvents, recordEthFee, recordEvents, subToEth, transferBalanceToEth} from './util/helpers'; -import fungibleAbi from './fungibleAbi.json'; -import {expect} from 'chai'; +import {expect, itEth, usingEthPlaygrounds} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; describe('Fungible: Information getting', () => { - itWeb3('totalSupply', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); + let donor: IKeyringPair; + let alice: IKeyringPair; - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); + }); + }); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Substrate: alice.address}); + itEth('totalSupply', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'ft', caller); const totalSupply = await contract.methods.totalSupply().call(); - expect(totalSupply).to.equal('200'); }); - itWeb3('balanceOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('balanceOf', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: caller}); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: caller}); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'ft', caller); const balance = await contract.methods.balanceOf(caller).call(); - expect(balance).to.equal('200'); }); }); describe('Fungible: Plain calls', () => { - itWeb3('Can perform approve()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); }); - const alice = privateKeyWrapper('//Alice'); + }); + + itEth('Can perform mint()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.addAdmin(alice, {Ethereum: owner}); + + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); + + const result = await contract.methods.mint(receiver, 100).send(); + + const event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.value).to.equal('100'); + }); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Can perform mintBulk()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const bulkSize = 3; + const receivers = [...new Array(bulkSize)].map(() => helper.eth.createAccount()); + const collection = await helper.ft.mintCollection(alice); + await collection.addAdmin(alice, {Ethereum: owner}); + + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); + + const result = await contract.methods.mintBulk(Array.from({length: bulkSize}, (_, i) => ( + [receivers[i], (i + 1) * 10] + ))).send(); + const events = result.events.Transfer.sort((a: any, b: any) => +a.returnValues.value - b.returnValues.value); + for (let i = 0; i < bulkSize; i++) { + const event = events[i]; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.equal(receivers[i]); + expect(event.returnValues.value).to.equal(String(10 * (i + 1))); + } + }); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); + itEth('Can perform burn()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.ft.mintCollection(alice); + await collection.addAdmin(alice, {Ethereum: owner}); + + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); + await contract.methods.mint(receiver, 100).send(); + + const result = await contract.methods.burnFrom(receiver, 49).send({from: receiver}); + + const event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal(receiver); + expect(event.returnValues.to).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.value).to.equal('49'); + + const balance = await contract.methods.balanceOf(receiver).call(); + expect(balance).to.equal('51'); + }); - const spender = createEthAccount(web3); + itEth('Can perform approve()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); { const result = await contract.methods.approve(spender, 100).send({from: owner}); - const events = normalizeEvents(result.events); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Approval', - args: { - owner, - spender, - value: '100', - }, - }, - ]); + + const event = result.events.Approval; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(owner); + expect(event.returnValues.spender).to.be.equal(spender); + expect(event.returnValues.value).to.be.equal('100'); } { @@ -97,51 +148,32 @@ describe('Fungible: Plain calls', () => { } }); - itWeb3('Can perform transferFrom()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = createEthAccount(web3); - await transferBalanceToEth(api, alice, owner); + itEth('Can perform transferFrom()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); - - const spender = createEthAccount(web3); - await transferBalanceToEth(api, alice, spender); - - const receiver = createEthAccount(web3); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); await contract.methods.approve(spender, 100).send(); { const result = await contract.methods.transferFrom(owner, receiver, 49).send({from: spender}); - const events = normalizeEvents(result.events); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: owner, - to: receiver, - value: '49', - }, - }, - { - address, - event: 'Approval', - args: { - owner, - spender, - value: '51', - }, - }, - ]); + + let event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('49'); + + event = result.events.Approval; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(owner); + expect(event.returnValues.spender).to.be.equal(spender); + expect(event.returnValues.value).to.be.equal('51'); } { @@ -155,38 +187,23 @@ describe('Fungible: Plain calls', () => { } }); - itWeb3('Can perform transfer()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = createEthAccount(web3); - await transferBalanceToEth(api, alice, owner); - - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); + itEth('Can perform transfer()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); - const receiver = createEthAccount(web3); - await transferBalanceToEth(api, alice, receiver); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); { const result = await contract.methods.transfer(receiver, 50).send({from: owner}); - const events = normalizeEvents(result.events); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: owner, - to: receiver, - value: '50', - }, - }, - ]); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('50'); } { @@ -202,162 +219,147 @@ describe('Fungible: Plain calls', () => { }); describe('Fungible: Fees', () => { - itWeb3('approve() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = createEthAccount(web3); - - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); - - const cost = await recordEthFee(api, owner, () => contract.methods.approve(spender, 100).send({from: owner})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); - }); + let donor: IKeyringPair; + let alice: IKeyringPair; - itWeb3('transferFrom() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + }); + + itEth('approve() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); + + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); + + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.approve(spender, 100).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + }); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); + itEth('transferFrom() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); await contract.methods.approve(spender, 100).send({from: owner}); - const cost = await recordEthFee(api, spender, () => contract.methods.transferFrom(owner, spender, 100).send({from: spender})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); + const cost = await helper.eth.recordCallFee(spender, () => contract.methods.transferFrom(owner, spender, 100).send({from: spender})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); }); - itWeb3('transfer() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); + itEth('transfer() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n, {Ethereum: owner}); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft', owner); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address, {from: owner, ...GAS_ARGS}); - - const cost = await recordEthFee(api, owner, () => contract.methods.transfer(receiver, 100).send({from: owner})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.transfer(receiver, 100).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); }); }); describe('Fungible: Substrate calls', () => { - itWeb3('Events emitted for approve()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); + let donor: IKeyringPair; + let alice: IKeyringPair; - const receiver = createEthAccount(web3); + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); + }); + }); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}); + itEth('Events emitted for approve()', async ({helper}) => { + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft'); - const events = await recordEvents(contract, async () => { - await approveExpectSuccess(collection, 0, alice, {Ethereum: receiver}, 100); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Approval', - args: { - owner: subToEth(alice.address), - spender: receiver, - value: '100', - }, - }, - ]); + + await collection.approveTokens(alice, {Ethereum: receiver}, 100n); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.event).to.be.equal('Approval'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.spender).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('100'); }); - itWeb3('Events emitted for transferFrom()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const receiver = createEthAccount(web3); - - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}); - await approveExpectSuccess(collection, 0, alice, bob.address, 100); + itEth('Events emitted for transferFrom()', async ({helper}) => { + const [bob] = await helper.arrange.createAccounts([10n], donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n); + await collection.approveTokens(alice, {Substrate: bob.address}, 100n); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft'); - const events = await recordEvents(contract, async () => { - await transferFromExpectSuccess(collection, 0, bob, alice, {Ethereum: receiver}, 51, 'Fungible'); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: subToEth(alice.address), - to: receiver, - value: '51', - }, - }, - { - address, - event: 'Approval', - args: { - owner: subToEth(alice.address), - spender: subToEth(bob.address), - value: '49', - }, - }, - ]); + await collection.transferFrom(bob, {Substrate: alice.address}, {Ethereum: receiver}, 51n); + if (events.length == 0) await helper.wait.newBlocks(1); + let event = events[0]; + + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('51'); + + event = events[1]; + expect(event.event).to.be.equal('Approval'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.spender).to.be.equal(helper.address.substrateToEth(bob.address)); + expect(event.returnValues.value).to.be.equal('49'); }); - itWeb3('Events emitted for transfer()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - - const receiver = createEthAccount(web3); - - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}); + itEth('Events emitted for transfer()', async ({helper}) => { + const receiver = helper.eth.createAccount(); + const collection = await helper.ft.mintCollection(alice); + await collection.mint(alice, 200n); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(fungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'ft'); - const events = await recordEvents(contract, async () => { - await transferExpectSuccess(collection, 0, alice, {Ethereum:receiver}, 51, 'Fungible'); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: subToEth(alice.address), - to: receiver, - value: '51', - }, - }, - ]); + + await collection.transfer(alice, {Ethereum:receiver}, 51n); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('51'); }); }); diff --git a/tests/src/eth/fungibleAbi.json b/tests/src/eth/fungibleAbi.json index 41b42a0f98..100e26fe49 100644 --- a/tests/src/eth/fungibleAbi.json +++ b/tests/src/eth/fungibleAbi.json @@ -49,6 +49,24 @@ "name": "Transfer", "type": "event" }, + { + "inputs": [ + { "internalType": "address", "name": "newAdmin", "type": "address" } + ], + "name": "addCollectionAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "addToCollectionAllowList", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { "internalType": "address", "name": "owner", "type": "address" }, @@ -59,6 +77,15 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "allowed", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { "internalType": "address", "name": "spender", "type": "address" }, @@ -88,6 +115,32 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { "internalType": "address", "name": "newOwner", "type": "address" } + ], + "name": "changeCollectionOwner", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "collectionOwner", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple6", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [{ "internalType": "string", "name": "key", "type": "string" }], "name": "collectionProperty", @@ -95,6 +148,30 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "collectionSponsor", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple6", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "confirmCollectionSponsorship", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [], "name": "contractAddress", @@ -118,17 +195,44 @@ }, { "inputs": [], - "name": "ethConfirmSponsorship", - "outputs": [], + "name": "hasCollectionPendingSponsor", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "isOwnerOrAdmin", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "mint", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ - { "internalType": "address", "name": "sponsor", "type": "address" } + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple6[]", + "name": "amounts", + "type": "tuple[]" + } ], - "name": "ethSetSponsor", - "outputs": [], + "name": "mintBulk", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "nonpayable", "type": "function" }, @@ -141,10 +245,32 @@ }, { "inputs": [ - { "internalType": "string", "name": "key", "type": "string" }, - { "internalType": "bytes", "name": "value", "type": "bytes" } + { "internalType": "address", "name": "admin", "type": "address" } ], - "name": "setCollectionProperty", + "name": "removeCollectionAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "removeCollectionSponsor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "removeFromCollectionAllowList", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "uint8", "name": "mode", "type": "uint8" }], + "name": "setCollectionAccess", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -154,7 +280,7 @@ { "internalType": "string", "name": "limit", "type": "string" }, { "internalType": "uint32", "name": "value", "type": "uint32" } ], - "name": "setLimit", + "name": "setCollectionLimit", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -164,7 +290,54 @@ { "internalType": "string", "name": "limit", "type": "string" }, { "internalType": "bool", "name": "value", "type": "bool" } ], - "name": "setLimit", + "name": "setCollectionLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "bool", "name": "mode", "type": "bool" }], + "name": "setCollectionMintMode", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "bool", "name": "enable", "type": "bool" }], + "name": "setCollectionNesting", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "bool", "name": "enable", "type": "bool" }, + { + "internalType": "address[]", + "name": "collections", + "type": "address[]" + } + ], + "name": "setCollectionNesting", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "key", "type": "string" }, + { "internalType": "bytes", "name": "value", "type": "bytes" } + ], + "name": "setCollectionProperty", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "sponsor", "type": "address" } + ], + "name": "setCollectionSponsor", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -212,5 +385,12 @@ "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "nonpayable", "type": "function" + }, + { + "inputs": [], + "name": "uniqueCollectionType", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" } ] diff --git a/tests/src/eth/helpersSmoke.test.ts b/tests/src/eth/helpersSmoke.test.ts index ed260ae64d..31140efa06 100644 --- a/tests/src/eth/helpersSmoke.test.ts +++ b/tests/src/eth/helpersSmoke.test.ts @@ -14,21 +14,30 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import {createEthAccountWithBalance, deployFlipper, itWeb3, contractHelpers} from './util/helpers'; +import {expect, itEth, usingEthPlaygrounds} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; describe('Helpers sanity check', () => { - itWeb3('Contract owner is recorded', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + let donor: IKeyringPair; - const flipper = await deployFlipper(web3, owner); + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Contract owner is recorded', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const flipper = await helper.eth.deployFlipper(owner); - expect(await contractHelpers(web3, owner).methods.contractOwner(flipper.options.address).call()).to.be.equal(owner); + expect(await helper.ethNativeContract.contractHelpers(owner).methods.contractOwner(flipper.options.address).call()).to.be.equal(owner); }); - itWeb3('Flipper is working', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const flipper = await deployFlipper(web3, owner); + itEth('Flipper is working', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const flipper = await helper.eth.deployFlipper(owner); expect(await flipper.methods.getValue().call()).to.be.false; await flipper.methods.flip().send({from: owner}); diff --git a/tests/src/eth/marketplace/marketplace.test.ts b/tests/src/eth/marketplace/marketplace.test.ts index 753939c830..90ce502dd3 100644 --- a/tests/src/eth/marketplace/marketplace.test.ts +++ b/tests/src/eth/marketplace/marketplace.test.ts @@ -14,209 +14,207 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . +import {IKeyringPair} from '@polkadot/types/types'; import {readFile} from 'fs/promises'; -import {getBalanceSingle} from '../../substrate/get-balance'; -import { - addToAllowListExpectSuccess, - confirmSponsorshipExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - getTokenOwner, - setCollectionLimitsExpectSuccess, - setCollectionSponsorExpectSuccess, - transferExpectSuccess, - transferFromExpectSuccess, - transferBalanceTo, -} from '../../util/helpers'; -import {collectionIdToAddress, contractHelpers, createEthAccountWithBalance, executeEthTxOnSub, GAS_ARGS, itWeb3, SponsoringMode, subToEth, subToEthLowercase, transferBalanceToEth} from '../util/helpers'; -import {evmToAddress} from '@polkadot/util-crypto'; -import nonFungibleAbi from '../nonFungibleAbi.json'; - -import {expect} from 'chai'; - -const PRICE = 2000n; +import {itEth, usingEthPlaygrounds, expect, SponsoringMode} from '../util'; describe('Matcher contract usage', () => { - itWeb3('With UNQ', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const matcherOwner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const PRICE = 2000n; + let donor: IKeyringPair; + let alice: IKeyringPair; + let aliceMirror: string; + let aliceDoubleMirror: string; + let seller: IKeyringPair; + let sellerMirror: string; + + before(async () => { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + beforeEach(async () => { + await usingEthPlaygrounds(async (helper, privateKey) => { + [alice] = await helper.arrange.createAccounts([1000n], donor); + aliceMirror = helper.address.substrateToEth(alice.address).toLowerCase(); + aliceDoubleMirror = helper.address.ethToSubstrate(aliceMirror); + seller = await privateKey(`//Seller/${Date.now()}`); + sellerMirror = helper.address.substrateToEth(seller.address).toLowerCase(); + + await helper.balance.transferToSubstrate(donor, aliceDoubleMirror, 10_000_000_000_000_000_000n); + }); + }); + + itEth('With UNQ', async ({helper}) => { + const web3 = helper.getWeb3(); + const matcherOwner = await helper.eth.createAccountWithBalance(donor); const matcherContract = new web3.eth.Contract(JSON.parse((await readFile(`${__dirname}/MarketPlace.abi`)).toString()), undefined, { from: matcherOwner, - ...GAS_ARGS, + gas: helper.eth.DEFAULT_GAS, }); const matcher = await matcherContract.deploy({data: (await readFile(`${__dirname}/MarketPlace.bin`)).toString(), arguments:[matcherOwner]}).send({from: matcherOwner}); - const helpers = contractHelpers(web3, matcherOwner); + + const sponsor = await helper.eth.createAccountWithBalance(donor); + const helpers = helper.ethNativeContract.contractHelpers(matcherOwner); await helpers.methods.setSponsoringMode(matcher.options.address, SponsoringMode.Allowlisted).send({from: matcherOwner}); await helpers.methods.setSponsoringRateLimit(matcher.options.address, 1).send({from: matcherOwner}); - await transferBalanceToEth(api, alice, matcher.options.address); - - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorApproveTimeout: 1}); - const evmCollection = new web3.eth.Contract(nonFungibleAbi as any, collectionIdToAddress(collectionId), {from: matcherOwner}); - await setCollectionSponsorExpectSuccess(collectionId, alice.address); - await transferBalanceToEth(api, alice, subToEth(alice.address)); - await confirmSponsorshipExpectSuccess(collectionId); - - await helpers.methods.toggleAllowed(matcher.options.address, subToEth(alice.address), true).send({from: matcherOwner}); - await addToAllowListExpectSuccess(alice, collectionId, evmToAddress(subToEth(alice.address))); + + await helpers.methods.setSponsor(matcher.options.address, sponsor).send({from: matcherOwner}); + await helpers.methods.confirmSponsorship(matcher.options.address).send({from: sponsor}); - const seller = privateKeyWrapper(`//Seller/${Date.now()}`); - await helpers.methods.toggleAllowed(matcher.options.address, subToEth(seller.address), true).send({from: matcherOwner}); + const collection = await helper.nft.mintCollection(alice, {limits: {sponsorApproveTimeout: 1}, pendingSponsor: alice.address}); + await collection.confirmSponsorship(alice); + await collection.addToAllowList(alice, {Substrate: aliceDoubleMirror}); + const evmCollection = helper.ethNativeContract.collection(helper.ethAddress.fromCollectionId(collection.collectionId), 'nft'); + await helper.eth.transferBalanceFromSubstrate(donor, aliceMirror); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', seller.address); + await helpers.methods.toggleAllowed(matcher.options.address, aliceMirror, true).send({from: matcherOwner}); + await helpers.methods.toggleAllowed(matcher.options.address, sellerMirror, true).send({from: matcherOwner}); - // To transfer item to matcher it first needs to be transfered to EVM account of bob - await transferExpectSuccess(collectionId, tokenId, seller, {Ethereum: subToEth(seller.address)}); + const token = await collection.mintToken(alice, {Ethereum: sellerMirror}); // Token is owned by seller initially - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(seller.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: sellerMirror}); // Ask { - await executeEthTxOnSub(web3, api, seller, evmCollection, m => m.approve(matcher.options.address, tokenId)); - await executeEthTxOnSub(web3, api, seller, matcher, m => m.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, tokenId)); + await helper.eth.sendEVM(seller, evmCollection.options.address, evmCollection.methods.approve(matcher.options.address, token.tokenId).encodeABI(), '0'); + await helper.eth.sendEVM(seller, matcher.options.address, matcher.methods.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, token.tokenId).encodeABI(), '0'); } // Token is transferred to matcher - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); // Buy { - const sellerBalanceBeforePurchase = await getBalanceSingle(api, seller.address); - await executeEthTxOnSub(web3, api, alice, matcher, m => m.buy(evmCollection.options.address, tokenId), {value: PRICE}); - expect(await getBalanceSingle(api, seller.address) - sellerBalanceBeforePurchase === PRICE); + const sellerBalanceBeforePurchase = await helper.balance.getSubstrate(seller.address); + await helper.eth.sendEVM(alice, matcher.options.address, matcher.methods.buy(evmCollection.options.address, token.tokenId).encodeABI(), PRICE.toString()); + expect(await helper.balance.getSubstrate(seller.address) - sellerBalanceBeforePurchase === PRICE); } // Token is transferred to evm account of alice - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(alice.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: aliceMirror}); // Transfer token to substrate side of alice - await transferFromExpectSuccess(collectionId, tokenId, alice, {Ethereum: subToEth(alice.address)}, {Substrate: alice.address}); + await token.transferFrom(alice, {Ethereum: aliceMirror}, {Substrate: alice.address}); // Token is transferred to substrate account of alice, seller received funds - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Substrate: alice.address}); + expect(await token.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - - itWeb3('With escrow', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const matcherOwner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const escrow = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('With escrow', async ({helper}) => { + const web3 = helper.getWeb3(); + const matcherOwner = await helper.eth.createAccountWithBalance(donor); const matcherContract = new web3.eth.Contract(JSON.parse((await readFile(`${__dirname}/MarketPlace.abi`)).toString()), undefined, { from: matcherOwner, - ...GAS_ARGS, + gas: helper.eth.DEFAULT_GAS, }); const matcher = await matcherContract.deploy({data: (await readFile(`${__dirname}/MarketPlace.bin`)).toString(), arguments: [matcherOwner]}).send({from: matcherOwner, gas: 10000000}); + + const sponsor = await helper.eth.createAccountWithBalance(donor); + const escrow = await helper.eth.createAccountWithBalance(donor); await matcher.methods.setEscrow(escrow).send({from: matcherOwner}); - const helpers = contractHelpers(web3, matcherOwner); + const helpers = helper.ethNativeContract.contractHelpers(matcherOwner); await helpers.methods.setSponsoringMode(matcher.options.address, SponsoringMode.Allowlisted).send({from: matcherOwner}); await helpers.methods.setSponsoringRateLimit(matcher.options.address, 1).send({from: matcherOwner}); - await transferBalanceToEth(api, alice, matcher.options.address); + + await helpers.methods.setSponsor(matcher.options.address, sponsor).send({from: matcherOwner}); + await helpers.methods.confirmSponsorship(matcher.options.address).send({from: sponsor}); - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorApproveTimeout: 1}); - const evmCollection = new web3.eth.Contract(nonFungibleAbi as any, collectionIdToAddress(collectionId), {from: matcherOwner}); - await setCollectionSponsorExpectSuccess(collectionId, alice.address); - await transferBalanceToEth(api, alice, subToEth(alice.address)); - await confirmSponsorshipExpectSuccess(collectionId); + const collection = await helper.nft.mintCollection(alice, {limits: {sponsorApproveTimeout: 1}, pendingSponsor: alice.address}); + await collection.confirmSponsorship(alice); + await collection.addToAllowList(alice, {Substrate: aliceDoubleMirror}); + const evmCollection = helper.ethNativeContract.collection(helper.ethAddress.fromCollectionId(collection.collectionId), 'nft'); + await helper.eth.transferBalanceFromSubstrate(donor, aliceMirror); - await helpers.methods.toggleAllowed(matcher.options.address, subToEth(alice.address), true).send({from: matcherOwner}); - await addToAllowListExpectSuccess(alice, collectionId, evmToAddress(subToEth(alice.address))); - const seller = privateKeyWrapper(`//Seller/${Date.now()}`); - await helpers.methods.toggleAllowed(matcher.options.address, subToEth(seller.address), true).send({from: matcherOwner}); + await helpers.methods.toggleAllowed(matcher.options.address, aliceMirror, true).send({from: matcherOwner}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', seller.address); + await helpers.methods.toggleAllowed(matcher.options.address, sellerMirror, true).send({from: matcherOwner}); - // To transfer item to matcher it first needs to be transfered to EVM account of bob - await transferExpectSuccess(collectionId, tokenId, seller, {Ethereum: subToEth(seller.address)}); + const token = await collection.mintToken(alice, {Ethereum: sellerMirror}); // Token is owned by seller initially - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(seller.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: sellerMirror}); // Ask { - await executeEthTxOnSub(web3, api, seller, evmCollection, m => m.approve(matcher.options.address, tokenId)); - await executeEthTxOnSub(web3, api, seller, matcher, m => m.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, tokenId)); + await helper.eth.sendEVM(seller, evmCollection.options.address, evmCollection.methods.approve(matcher.options.address, token.tokenId).encodeABI(), '0'); + await helper.eth.sendEVM(seller, matcher.options.address, matcher.methods.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, token.tokenId).encodeABI(), '0'); } // Token is transferred to matcher - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); // Give buyer KSM - await matcher.methods.depositKSM(PRICE, subToEth(alice.address)).send({from: escrow}); + await matcher.methods.depositKSM(PRICE, aliceMirror).send({from: escrow}); // Buy { - expect(await matcher.methods.balanceKSM(subToEth(seller.address)).call()).to.be.equal('0'); - expect(await matcher.methods.balanceKSM(subToEth(alice.address)).call()).to.be.equal(PRICE.toString()); + expect(await matcher.methods.balanceKSM(sellerMirror).call()).to.be.equal('0'); + expect(await matcher.methods.balanceKSM(aliceMirror).call()).to.be.equal(PRICE.toString()); - await executeEthTxOnSub(web3, api, alice, matcher, m => m.buyKSM(evmCollection.options.address, tokenId, subToEth(alice.address), subToEth(alice.address))); + await helper.eth.sendEVM(alice, matcher.options.address, matcher.methods.buyKSM(evmCollection.options.address, token.tokenId, aliceMirror, aliceMirror).encodeABI(), '0'); // Price is removed from buyer balance, and added to seller - expect(await matcher.methods.balanceKSM(subToEth(alice.address)).call()).to.be.equal('0'); - expect(await matcher.methods.balanceKSM(subToEth(seller.address)).call()).to.be.equal(PRICE.toString()); + expect(await matcher.methods.balanceKSM(aliceMirror).call()).to.be.equal('0'); + expect(await matcher.methods.balanceKSM(sellerMirror).call()).to.be.equal(PRICE.toString()); } // Token is transferred to evm account of alice - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(alice.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: aliceMirror}); // Transfer token to substrate side of alice - await transferFromExpectSuccess(collectionId, tokenId, alice, {Ethereum: subToEth(alice.address)}, {Substrate: alice.address}); + await token.transferFrom(alice, {Ethereum: aliceMirror}, {Substrate: alice.address}); // Token is transferred to substrate account of alice, seller received funds - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Substrate: alice.address}); + expect(await token.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - - itWeb3('Sell tokens from substrate user via EVM contract', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const matcherOwner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Sell tokens from substrate user via EVM contract', async ({helper}) => { + const web3 = helper.getWeb3(); + const matcherOwner = await helper.eth.createAccountWithBalance(donor); const matcherContract = new web3.eth.Contract(JSON.parse((await readFile(`${__dirname}/MarketPlace.abi`)).toString()), undefined, { from: matcherOwner, - ...GAS_ARGS, + gas: helper.eth.DEFAULT_GAS, }); const matcher = await matcherContract.deploy({data: (await readFile(`${__dirname}/MarketPlace.bin`)).toString(), arguments:[matcherOwner]}).send({from: matcherOwner}); - await transferBalanceToEth(api, alice, matcher.options.address); - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorApproveTimeout: 1}); - const evmCollection = new web3.eth.Contract(nonFungibleAbi as any, collectionIdToAddress(collectionId), {from: matcherOwner}); + await helper.eth.transferBalanceFromSubstrate(donor, matcher.options.address); - const seller = privateKeyWrapper(`//Seller/${Date.now()}`); - await transferBalanceTo(api, alice, seller.address); - - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT', seller.address); + const collection = await helper.nft.mintCollection(alice, {limits: {sponsorApproveTimeout: 1}}); + const evmCollection = helper.ethNativeContract.collection(helper.ethAddress.fromCollectionId(collection.collectionId), 'nft'); - // To transfer item to matcher it first needs to be transfered to EVM account of bob - await transferExpectSuccess(collectionId, tokenId, seller, {Ethereum: subToEth(seller.address)}); + await helper.balance.transferToSubstrate(donor, seller.address, 100_000_000_000_000_000_000n); + + const token = await collection.mintToken(alice, {Ethereum: sellerMirror}); // Token is owned by seller initially - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(seller.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: sellerMirror}); // Ask { - await executeEthTxOnSub(web3, api, seller, evmCollection, m => m.approve(matcher.options.address, tokenId)); - await executeEthTxOnSub(web3, api, seller, matcher, m => m.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, tokenId)); + await helper.eth.sendEVM(seller, evmCollection.options.address, evmCollection.methods.approve(matcher.options.address, token.tokenId).encodeABI(), '0'); + await helper.eth.sendEVM(seller, matcher.options.address, matcher.methods.addAsk(PRICE, '0x0000000000000000000000000000000000000001', evmCollection.options.address, token.tokenId).encodeABI(), '0'); } // Token is transferred to matcher - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: matcher.options.address.toLowerCase()}); // Buy { - const sellerBalanceBeforePurchase = await getBalanceSingle(api, seller.address); - await executeEthTxOnSub(web3, api, alice, matcher, m => m.buy(evmCollection.options.address, tokenId), {value: PRICE}); - expect(await getBalanceSingle(api, seller.address) - sellerBalanceBeforePurchase === PRICE); + const sellerBalanceBeforePurchase = await helper.balance.getSubstrate(seller.address); + await helper.eth.sendEVM(alice, matcher.options.address, matcher.methods.buy(evmCollection.options.address, token.tokenId).encodeABI(), PRICE.toString()); + expect(await helper.balance.getSubstrate(seller.address) - sellerBalanceBeforePurchase === PRICE); } // Token is transferred to evm account of alice - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Ethereum: subToEthLowercase(alice.address)}); + expect(await token.getOwner()).to.be.deep.equal({Ethereum: aliceMirror}); // Transfer token to substrate side of alice - await transferFromExpectSuccess(collectionId, tokenId, alice, {Ethereum: subToEth(alice.address)}, {Substrate: alice.address}); + await token.transferFrom(alice, {Ethereum: aliceMirror}, {Substrate: alice.address}); // Token is transferred to substrate account of alice, seller received funds - expect(await getTokenOwner(api, collectionId, tokenId)).to.be.deep.equal({Substrate: alice.address}); + expect(await token.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); }); diff --git a/tests/src/eth/migration.test.ts b/tests/src/eth/migration.test.ts index 151d51d742..0cc8456c1f 100644 --- a/tests/src/eth/migration.test.ts +++ b/tests/src/eth/migration.test.ts @@ -14,12 +14,20 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import {submitTransactionAsync} from '../substrate/substrate-api'; -import {createEthAccountWithBalance, GAS_ARGS, itWeb3} from './util/helpers'; +import {expect, itEth, usingEthPlaygrounds} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; describe('EVM Migrations', () => { - itWeb3('Deploy contract saved state', async ({web3, api, privateKeyWrapper}) => { + let superuser: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + superuser = await privateKey('//Alice'); + }); + }); + + // todo:playgrounds requires sudo, look into later + itEth('Deploy contract saved state', async ({helper}) => { /* contract StatefulContract { uint counter; @@ -53,13 +61,16 @@ describe('EVM Migrations', () => { ['0xedc95719e9a3b28dd8e80877cb5880a9be7de1a13fc8b05e7999683b6b567643', '0x0000000000000000000000000000000000000000000000000000000000000004'], ]; - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const caller = await helper.eth.createAccountWithBalance(superuser); - await submitTransactionAsync(alice, api.tx.sudo.sudo(api.tx.evmMigration.begin(ADDRESS) as any)); - await submitTransactionAsync(alice, api.tx.sudo.sudo(api.tx.evmMigration.setData(ADDRESS, DATA as any) as any)); - await submitTransactionAsync(alice, api.tx.sudo.sudo(api.tx.evmMigration.finish(ADDRESS, CODE) as any)); + const txBegin = helper.constructApiCall('api.tx.evmMigration.begin', [ADDRESS]); + const txSetData = helper.constructApiCall('api.tx.evmMigration.setData', [ADDRESS, DATA]); + const txFinish = helper.constructApiCall('api.tx.evmMigration.finish', [ADDRESS, CODE]); + await expect(helper.executeExtrinsic(superuser, 'api.tx.sudo.sudo', [txBegin])).to.be.fulfilled; + await expect(helper.executeExtrinsic(superuser, 'api.tx.sudo.sudo', [txSetData])).to.be.fulfilled; + await expect(helper.executeExtrinsic(superuser, 'api.tx.sudo.sudo', [txFinish])).to.be.fulfilled; + const web3 = helper.getWeb3(); const contract = new web3.eth.Contract([ { inputs: [], @@ -87,7 +98,7 @@ describe('EVM Migrations', () => { stateMutability: 'view', type: 'function', }, - ], ADDRESS, {from: caller, ...GAS_ARGS}); + ], ADDRESS, {from: caller, gas: helper.eth.DEFAULT_GAS}); expect(await contract.methods.counterValue().call()).to.be.equal('10'); for (let i = 1; i <= 4; i++) { diff --git a/tests/src/eth/nesting/nest.test.ts b/tests/src/eth/nesting/nest.test.ts index b4d249ac17..dfa5d9f6b3 100644 --- a/tests/src/eth/nesting/nest.test.ts +++ b/tests/src/eth/nesting/nest.test.ts @@ -1,217 +1,175 @@ -import {ApiPromise} from '@polkadot/api'; +import {IKeyringPair} from '@polkadot/types/types'; import {Contract} from 'web3-eth-contract'; -import {expect} from 'chai'; -import Web3 from 'web3'; -import {createEthAccountWithBalance, evmCollectionHelpers, GAS_ARGS, getCollectionAddressFromResult, itWeb3, tokenIdToAddress} from '../../eth/util/helpers'; -import nonFungibleAbi from '../nonFungibleAbi.json'; + +import {itEth, EthUniqueHelper, usingEthPlaygrounds, expect} from '../util'; const createNestingCollection = async ( - api: ApiPromise, - web3: Web3, + helper: EthUniqueHelper, owner: string, ): Promise<{ collectionId: number, collectionAddress: string, contract: Contract }> => { - const collectionHelper = evmCollectionHelpers(web3, owner); - - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'B', 'C') - .send(); - const {collectionIdAddress: collectionAddress, collectionId} = await getCollectionAddressFromResult(api, result); - - const contract = new web3.eth.Contract(nonFungibleAbi as any, collectionAddress, {from: owner, ...GAS_ARGS}); + const {collectionAddress, collectionId} = await helper.eth.createNFTCollection(owner, 'A', 'B', 'C'); + + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await contract.methods.setCollectionNesting(true).send({from: owner}); return {collectionId, collectionAddress, contract}; }; -describe('Integration Test: EVM Nesting', () => { - itWeb3('NFT: allows an Owner to nest/unnest their token', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const {collectionId, contract} = await createNestingCollection(api, web3, owner); - // Create a token to be nested - const targetNFTTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - owner, - targetNFTTokenId, - ).send({from: owner}); +describe('EVM nesting tests group', () => { + let donor: IKeyringPair; - const targetNftTokenAddress = tokenIdToAddress(collectionId, targetNFTTokenId); + before(async function() { + await usingEthPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); - // Create a nested token - const firstTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - targetNftTokenAddress, - firstTokenId, - ).send({from: owner}); + describe('Integration Test: EVM Nesting', () => { + itEth('NFT: allows an Owner to nest/unnest their token', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionId, contract} = await createNestingCollection(helper, owner); - expect(await contract.methods.ownerOf(firstTokenId).call()).to.be.equal(targetNftTokenAddress); + // Create a token to be nested to + const mintingTargetNFTTokenIdResult = await contract.methods.mint(owner).send({from: owner}); + const targetNFTTokenId = mintingTargetNFTTokenIdResult.events.Transfer.returnValues.tokenId; + const targetNftTokenAddress = helper.ethAddress.fromTokenId(collectionId, targetNFTTokenId); - // Create a token to be nested and nest - const secondTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - owner, - secondTokenId, - ).send({from: owner}); + // Create a nested token + const mintingFirstTokenIdResult = await contract.methods.mint(targetNftTokenAddress).send({from: owner}); + const firstTokenId = mintingFirstTokenIdResult.events.Transfer.returnValues.tokenId; + expect(await contract.methods.ownerOf(firstTokenId).call()).to.be.equal(targetNftTokenAddress); - await contract.methods.transfer(targetNftTokenAddress, secondTokenId).send({from: owner}); + // Create a token to be nested and nest + const mintingSecondTokenIdResult = await contract.methods.mint(owner).send({from: owner}); + const secondTokenId = mintingSecondTokenIdResult.events.Transfer.returnValues.tokenId; - expect(await contract.methods.ownerOf(secondTokenId).call()).to.be.equal(targetNftTokenAddress); + await contract.methods.transfer(targetNftTokenAddress, secondTokenId).send({from: owner}); + expect(await contract.methods.ownerOf(secondTokenId).call()).to.be.equal(targetNftTokenAddress); - // Unnest token back - await contract.methods.transferFrom(targetNftTokenAddress, owner, secondTokenId).send({from: owner}); - expect(await contract.methods.ownerOf(secondTokenId).call()).to.be.equal(owner); - }); + // Unnest token back + await contract.methods.transferFrom(targetNftTokenAddress, owner, secondTokenId).send({from: owner}); + expect(await contract.methods.ownerOf(secondTokenId).call()).to.be.equal(owner); + }); - itWeb3('NFT: allows an Owner to nest/unnest their token (Restricted nesting)', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(api, web3, owner); - const {collectionAddress: collectionAddressB, contract: contractB} = await createNestingCollection(api, web3, owner); - await contractA.methods.setCollectionNesting(true, [collectionAddressA, collectionAddressB]).send({from: owner}); - - // Create a token to nest into - const targetNftTokenId = await contractA.methods.nextTokenId().call(); - await contractA.methods.mint( - owner, - targetNftTokenId, - ).send({from: owner}); - const nftTokenAddressA1 = tokenIdToAddress(collectionIdA, targetNftTokenId); - - // Create a token for nesting in the same collection as the target - const nftTokenIdA = await contractA.methods.nextTokenId().call(); - await contractA.methods.mint( - owner, - nftTokenIdA, - ).send({from: owner}); - - // Create a token for nesting in a different collection - const nftTokenIdB = await contractB.methods.nextTokenId().call(); - await contractB.methods.mint( - owner, - nftTokenIdB, - ).send({from: owner}); - - // Nest - await contractA.methods.transfer(nftTokenAddressA1, nftTokenIdA).send({from: owner}); - expect(await contractA.methods.ownerOf(nftTokenIdA).call()).to.be.equal(nftTokenAddressA1); - - await contractB.methods.transfer(nftTokenAddressA1, nftTokenIdB).send({from: owner}); - expect(await contractB.methods.ownerOf(nftTokenIdB).call()).to.be.equal(nftTokenAddressA1); - }); -}); + itEth('NFT: allows an Owner to nest/unnest their token (Restricted nesting)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); -describe('Negative Test: EVM Nesting', async() => { - itWeb3('NFT: disallows to nest token if nesting is disabled', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const {collectionId, contract} = await createNestingCollection(api, web3, owner); - await contract.methods.setCollectionNesting(false).send({from: owner}); - - // Create a token to nest into - const targetNftTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - owner, - targetNftTokenId, - ).send({from: owner}); - - const targetNftTokenAddress = tokenIdToAddress(collectionId, targetNftTokenId); - - // Create a token to nest - const nftTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - owner, - nftTokenId, - ).send({from: owner}); - - // Try to nest - await expect(contract.methods - .transfer(targetNftTokenAddress, nftTokenId) - .call({from: owner})).to.be.rejectedWith('UserIsNotAllowedToNest'); - }); + const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(helper, owner); + const {collectionAddress: collectionAddressB, contract: contractB} = await createNestingCollection(helper, owner); + await contractA.methods.setCollectionNesting(true, [collectionAddressA, collectionAddressB]).send({from: owner}); - itWeb3('NFT: disallows a non-Owner to nest someone else\'s token', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const malignant = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const {collectionId, contract} = await createNestingCollection(api, web3, owner); - - // Mint a token - const targetTokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - owner, - targetTokenId, - ).send({from: owner}); - const targetTokenAddress = tokenIdToAddress(collectionId, targetTokenId); - - // Mint a token belonging to a different account - const tokenId = await contract.methods.nextTokenId().call(); - await contract.methods.mint( - malignant, - tokenId, - ).send({from: owner}); - - // Try to nest one token in another as a non-owner account - await expect(contract.methods - .transfer(targetTokenAddress, tokenId) - .call({from: malignant})).to.be.rejectedWith('UserIsNotAllowedToNest'); - }); + // Create a token to nest into + const mintingtargetNftTokenIdResult = await contractA.methods.mint(owner).send({from: owner}); + const targetNftTokenId = mintingtargetNftTokenIdResult.events.Transfer.returnValues.tokenId; + const nftTokenAddressA1 = helper.ethAddress.fromTokenId(collectionIdA, targetNftTokenId); + + // Create a token for nesting in the same collection as the target + const mintingTokenIdAResult = await contractA.methods.mint(owner).send({from: owner}); + const nftTokenIdA = mintingTokenIdAResult.events.Transfer.returnValues.tokenId; - itWeb3('NFT: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const malignant = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(api, web3, owner); - const {collectionAddress: collectionAddressB, contract: contractB} = await createNestingCollection(api, web3, owner); - - await contractA.methods.setCollectionNesting(true, [collectionAddressA, collectionAddressB]).send({from: owner}); - - // Create a token in one collection - const nftTokenIdA = await contractA.methods.nextTokenId().call(); - await contractA.methods.mint( - owner, - nftTokenIdA, - ).send({from: owner}); - const nftTokenAddressA = tokenIdToAddress(collectionIdA, nftTokenIdA); - - // Create a token in another collection belonging to someone else - const nftTokenIdB = await contractB.methods.nextTokenId().call(); - await contractB.methods.mint( - malignant, - nftTokenIdB, - ).send({from: owner}); - - // Try to drag someone else's token into the other collection and nest - await expect(contractB.methods - .transfer(nftTokenAddressA, nftTokenIdB) - .call({from: malignant})).to.be.rejectedWith('UserIsNotAllowedToNest'); + // Create a token for nesting in a different collection + const mintingTokenIdBResult = await contractB.methods.mint(owner).send({from: owner}); + const nftTokenIdB = mintingTokenIdBResult.events.Transfer.returnValues.tokenId; + + // Nest + await contractA.methods.transfer(nftTokenAddressA1, nftTokenIdA).send({from: owner}); + expect(await contractA.methods.ownerOf(nftTokenIdA).call()).to.be.equal(nftTokenAddressA1); + + await contractB.methods.transfer(nftTokenAddressA1, nftTokenIdB).send({from: owner}); + expect(await contractB.methods.ownerOf(nftTokenIdB).call()).to.be.equal(nftTokenAddressA1); + }); }); - itWeb3('NFT: disallows to nest token in an unlisted collection', async ({api, web3, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(api, web3, owner); - const {contract: contractB} = await createNestingCollection(api, web3, owner); - - await contractA.methods.setCollectionNesting(true, [collectionAddressA]).send({from: owner}); - - // Create a token in one collection - const nftTokenIdA = await contractA.methods.nextTokenId().call(); - await contractA.methods.mint( - owner, - nftTokenIdA, - ).send({from: owner}); - const nftTokenAddressA = tokenIdToAddress(collectionIdA, nftTokenIdA); - - // Create a token in another collection - const nftTokenIdB = await contractB.methods.nextTokenId().call(); - await contractB.methods.mint( - owner, - nftTokenIdB, - ).send({from: owner}); - - // Try to nest into a token in the other collection, disallowed in the first - await expect(contractB.methods - .transfer(nftTokenAddressA, nftTokenIdB) - .call()).to.be.rejectedWith('SourceCollectionIsNotAllowedToNest'); + describe('Negative Test: EVM Nesting', async() => { + itEth('NFT: disallows to nest token if nesting is disabled', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const {collectionId, contract} = await createNestingCollection(helper, owner); + await contract.methods.setCollectionNesting(false).send({from: owner}); + + // Create a token to nest into + const mintingTargetTokenIdResult = await contract.methods.mint(owner).send({from: owner}); + const targetTokenId = mintingTargetTokenIdResult.events.Transfer.returnValues.tokenId; + const targetNftTokenAddress = helper.ethAddress.fromTokenId(collectionId, targetTokenId); + + // Create a token to nest + const mintingNftTokenIdResult = await contract.methods.mint(owner).send({from: owner}); + const nftTokenId = mintingNftTokenIdResult.events.Transfer.returnValues.tokenId; + + // Try to nest + await expect(contract.methods + .transfer(targetNftTokenAddress, nftTokenId) + .call({from: owner})).to.be.rejectedWith('UserIsNotAllowedToNest'); + }); + + itEth('NFT: disallows a non-Owner to nest someone else\'s token', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const malignant = await helper.eth.createAccountWithBalance(donor); + + const {collectionId, contract} = await createNestingCollection(helper, owner); + + // Mint a token + const mintingTargetTokenIdResult = await contract.methods.mint(owner).send({from: owner}); + const targetTokenId = mintingTargetTokenIdResult.events.Transfer.returnValues.tokenId; + const targetTokenAddress = helper.ethAddress.fromTokenId(collectionId, targetTokenId); + + // Mint a token belonging to a different account + const mintingTokenIdResult = await contract.methods.mint(malignant).send({from: owner}); + const tokenId = mintingTokenIdResult.events.Transfer.returnValues.tokenId; + + // Try to nest one token in another as a non-owner account + await expect(contract.methods + .transfer(targetTokenAddress, tokenId) + .call({from: malignant})).to.be.rejectedWith('UserIsNotAllowedToNest'); + }); + + itEth('NFT: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const malignant = await helper.eth.createAccountWithBalance(donor); + + const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(helper, owner); + const {collectionAddress: collectionAddressB, contract: contractB} = await createNestingCollection(helper, owner); + + await contractA.methods.setCollectionNesting(true, [collectionAddressA, collectionAddressB]).send({from: owner}); + + // Create a token in one collection + const mintingTokenIdAResult = await contractA.methods.mint(owner).send({from: owner}); + const nftTokenIdA = mintingTokenIdAResult.events.Transfer.returnValues.tokenId; + const nftTokenAddressA = helper.ethAddress.fromTokenId(collectionIdA, nftTokenIdA); + + // Create a token in another collection + const mintingTokenIdBResult = await contractB.methods.mint(malignant).send({from: owner}); + const nftTokenIdB = mintingTokenIdBResult.events.Transfer.returnValues.tokenId; + + // Try to drag someone else's token into the other collection and nest + await expect(contractB.methods + .transfer(nftTokenAddressA, nftTokenIdB) + .call({from: malignant})).to.be.rejectedWith('UserIsNotAllowedToNest'); + }); + + itEth('NFT: disallows to nest token in an unlisted collection', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const {collectionId: collectionIdA, collectionAddress: collectionAddressA, contract: contractA} = await createNestingCollection(helper, owner); + const {contract: contractB} = await createNestingCollection(helper, owner); + + await contractA.methods.setCollectionNesting(true, [collectionAddressA]).send({from: owner}); + + // Create a token in one collection + const mintingTokenIdAResult = await contractA.methods.mint(owner).send({from: owner}); + const nftTokenIdA = mintingTokenIdAResult.events.Transfer.returnValues.tokenId; + const nftTokenAddressA = helper.ethAddress.fromTokenId(collectionIdA, nftTokenIdA); + + // Create a token in another collection + const mintingTokenIdBResult = await contractB.methods.mint(owner).send({from: owner}); + const nftTokenIdB = mintingTokenIdBResult.events.Transfer.returnValues.tokenId; + + + // Try to nest into a token in the other collection, disallowed in the first + await expect(contractB.methods + .transfer(nftTokenAddressA, nftTokenIdB) + .call()).to.be.rejectedWith('SourceCollectionIsNotAllowedToNest'); + }); }); }); diff --git a/tests/src/eth/nonFungible.test.ts b/tests/src/eth/nonFungible.test.ts index 1eda79af1f..5b74d7b448 100644 --- a/tests/src/eth/nonFungible.test.ts +++ b/tests/src/eth/nonFungible.test.ts @@ -14,97 +14,156 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {approveExpectSuccess, burnItemExpectSuccess, createCollectionExpectSuccess, createItemExpectSuccess, transferExpectSuccess, transferFromExpectSuccess, UNIQUE} from '../util/helpers'; -import {collectionIdToAddress, createEthAccount, createEthAccountWithBalance, evmCollection, evmCollectionHelpers, GAS_ARGS, getCollectionAddressFromResult, itWeb3, normalizeEvents, recordEthFee, recordEvents, subToEth, transferBalanceToEth} from './util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import {expect} from 'chai'; -import {submitTransactionAsync} from '../substrate/substrate-api'; +import {itEth, usingEthPlaygrounds, expect, EthUniqueHelper} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; +import {Contract} from 'web3-eth-contract'; + describe('NFT: Information getting', () => { - itWeb3('totalSupply', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + }); + + itEth('totalSupply', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.mintToken(alice); - await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: alice.address}); + const caller = await helper.eth.createAccountWithBalance(donor); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); const totalSupply = await contract.methods.totalSupply().call(); expect(totalSupply).to.equal('1'); }); - itWeb3('balanceOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('balanceOf', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + const caller = await helper.eth.createAccountWithBalance(donor); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum:caller}); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); + await collection.mintToken(alice, {Ethereum: caller}); + await collection.mintToken(alice, {Ethereum: caller}); + await collection.mintToken(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); const balance = await contract.methods.balanceOf(caller).call(); expect(balance).to.equal('3'); }); - itWeb3('ownerOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('ownerOf', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + const caller = await helper.eth.createAccountWithBalance(donor); + + const token = await collection.mintToken(alice, {Ethereum: caller}); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); - const owner = await contract.methods.ownerOf(tokenId).call(); + const owner = await contract.methods.ownerOf(token.tokenId).call(); expect(owner).to.equal(caller); }); + + itEth('name/symbol is available regardless of ERC721Metadata support', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', tokenPrefix: 'TEST'}); + const caller = helper.eth.createAccount(); + + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); + + expect(await contract.methods.name().call()).to.equal('test'); + expect(await contract.methods.symbol().call()).to.equal('TEST'); + }); +}); + +describe('Check ERC721 token URI for NFT', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + async function setup(helper: EthUniqueHelper, baseUri: string, propertyKey?: string, propertyValue?: string): Promise<{contract: Contract, nextTokenId: string}> { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, 'Mint collection', 'a', 'b', baseUri); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const result = await contract.methods.mint(receiver).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + expect(tokenId).to.be.equal('1'); + + if (propertyKey && propertyValue) { + // Set URL or suffix + await contract.methods.setProperty(tokenId, propertyKey, Buffer.from(propertyValue)).send(); + } + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(tokenId); + + return {contract, nextTokenId: tokenId}; + } + + itEth('Empty tokenURI', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, ''); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal(''); + }); + + itEth('TokenURI from url', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, 'BaseURI_', 'URI', 'Token URI'); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Token URI'); + }); + + itEth('TokenURI from baseURI', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, 'BaseURI_'); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('BaseURI_'); + }); + + itEth('TokenURI from baseURI + suffix', async ({helper}) => { + const suffix = '/some/suffix'; + const {contract, nextTokenId} = await setup(helper, 'BaseURI_', 'URISuffix', suffix); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('BaseURI_' + suffix); + }); }); describe('NFT: Plain calls', () => { - itWeb3('Can perform mint()', async ({web3, api, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const helper = evmCollectionHelpers(web3, owner); - let result = await helper.methods.createNonfungibleCollection('Mint collection', '6', '6').send(); - const {collectionIdAddress, collectionId} = await getCollectionAddressFromResult(api, result); - const receiver = createEthAccount(web3); - const contract = evmCollection(web3, owner, collectionIdAddress); - const nextTokenId = await contract.methods.nextTokenId().call(); - - expect(nextTokenId).to.be.equal('1'); - result = await contract.methods.mintWithTokenURI( - receiver, - nextTokenId, - 'Test URI', - ).send(); - - const events = normalizeEvents(result.events); - const address = collectionIdToAddress(collectionId); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: receiver, - tokenId: nextTokenId, - }, - }, - ]); + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); + }); + }); + + itEth('Can perform mint()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, 'Mint collection', '6', '6', ''); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + + const result = await contract.methods.mintWithTokenURI(receiver, 'Test URI').send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + expect(tokenId).to.be.equal('1'); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.be.equal(receiver); - expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); + expect(await contract.methods.tokenURI(tokenId).call()).to.be.equal('Test URI'); // TODO: this wont work right now, need release 919000 first // await helper.methods.setOffchainSchema(collectionIdAddress, 'https://offchain-service.local/token-info/{id}').send(); @@ -113,169 +172,101 @@ describe('NFT: Plain calls', () => { }); //TODO: CORE-302 add eth methods - itWeb3.skip('Can perform mintBulk()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const changeAdminTx = api.tx.unique.addCollectionAdmin(collection, {Ethereum: caller}); - await submitTransactionAsync(alice, changeAdminTx); - const receiver = createEthAccount(web3); + itEth.skip('Can perform mintBulk()', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const collection = await helper.nft.mintCollection(alice); + await collection.addAdmin(alice, {Ethereum: caller}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', caller); { + const bulkSize = 3; const nextTokenId = await contract.methods.nextTokenId().call(); expect(nextTokenId).to.be.equal('1'); const result = await contract.methods.mintBulkWithTokenURI( receiver, - [ - [nextTokenId, 'Test URI 0'], - [+nextTokenId + 1, 'Test URI 1'], - [+nextTokenId + 2, 'Test URI 2'], - ], + Array.from({length: bulkSize}, (_, i) => ( + [+nextTokenId + i, `Test URI ${i}`] + )), ).send({from: caller}); - const events = normalizeEvents(result.events); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: receiver, - tokenId: nextTokenId, - }, - }, - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: receiver, - tokenId: String(+nextTokenId + 1), - }, - }, - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: receiver, - tokenId: String(+nextTokenId + 2), - }, - }, - ]); - - expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI 0'); - expect(await contract.methods.tokenURI(+nextTokenId + 1).call()).to.be.equal('Test URI 1'); - expect(await contract.methods.tokenURI(+nextTokenId + 2).call()).to.be.equal('Test URI 2'); + + const events = result.events.Transfer.sort((a: any, b: any) => +a.returnValues.tokenId - b.returnValues.tokenId); + for (let i = 0; i < bulkSize; i++) { + const event = events[i]; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.tokenId).to.equal(`${+nextTokenId + i}`); + + expect(await contract.methods.tokenURI(+nextTokenId + i).call()).to.be.equal(`Test URI ${i}`); + } } }); - itWeb3('Can perform burn()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Can perform burn()', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', caller); { - const result = await contract.methods.burn(tokenId).send({from: owner}); - const events = normalizeEvents(result.events); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: owner, - to: '0x0000000000000000000000000000000000000000', - tokenId: tokenId.toString(), - }, - }, - ]); + const result = await contract.methods.burn(tokenId).send({from: caller}); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(caller); + expect(event.returnValues.to).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.tokenId).to.be.equal(`${tokenId}`); } }); - itWeb3('Can perform approve()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = createEthAccount(web3); - await transferBalanceToEth(api, alice, owner); - - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + itEth('Can perform approve()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); - const spender = createEthAccount(web3); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); { - const result = await contract.methods.approve(spender, tokenId).send({from: owner, ...GAS_ARGS}); - const events = normalizeEvents(result.events); - - expect(events).to.be.deep.equal([ - { - address, - event: 'Approval', - args: { - owner, - approved: spender, - tokenId: tokenId.toString(), - }, - }, - ]); + const result = await contract.methods.approve(spender, tokenId).send({from: owner}); + + const event = result.events.Approval; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(owner); + expect(event.returnValues.approved).to.be.equal(spender); + expect(event.returnValues.tokenId).to.be.equal(`${tokenId}`); } }); - itWeb3('Can perform transferFrom()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = createEthAccount(web3); - await transferBalanceToEth(api, alice, owner); + itEth('Can perform transferFrom()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const spender = createEthAccount(web3); - await transferBalanceToEth(api, alice, spender); - - const receiver = createEthAccount(web3); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); await contract.methods.approve(spender, tokenId).send({from: owner}); { const result = await contract.methods.transferFrom(owner, receiver, tokenId).send({from: spender}); - const events = normalizeEvents(result.events); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: owner, - to: receiver, - tokenId: tokenId.toString(), - }, - }, - ]); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(`${tokenId}`); } { @@ -289,37 +280,24 @@ describe('NFT: Plain calls', () => { } }); - itWeb3('Can perform transfer()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('Can perform transfer()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const owner = createEthAccount(web3); - await transferBalanceToEth(api, alice, owner); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); - - const receiver = createEthAccount(web3); - await transferBalanceToEth(api, alice, receiver); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); { const result = await contract.methods.transfer(receiver, tokenId).send({from: owner}); - const events = normalizeEvents(result.events); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: owner, - to: receiver, - tokenId: tokenId.toString(), - }, - }, - ]); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(`${tokenId}`); } { @@ -335,237 +313,249 @@ describe('NFT: Plain calls', () => { }); describe('NFT: Fees', () => { - itWeb3('approve() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); - const alice = privateKeyWrapper('//Alice'); + }); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = createEthAccount(web3); + itEth('approve() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', owner); - const cost = await recordEthFee(api, owner, () => contract.methods.approve(spender, tokenId).send({from: owner})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.approve(spender, tokenId).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); }); - itWeb3('transferFrom() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('transferFrom() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', owner); await contract.methods.approve(spender, tokenId).send({from: owner}); - const cost = await recordEthFee(api, spender, () => contract.methods.transferFrom(owner, spender, tokenId).send({from: spender})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); + const cost = await helper.eth.recordCallFee(spender, () => contract.methods.transferFrom(owner, spender, tokenId).send({from: spender})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); }); - itWeb3('transfer() call fee is less than 0.2UNQ', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); + itEth('transfer() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const collection = await helper.nft.mintCollection(alice, {}); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: owner, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', owner); - const cost = await recordEthFee(api, owner, () => contract.methods.transfer(receiver, tokenId).send({from: owner})); - expect(cost < BigInt(0.2 * Number(UNIQUE))); + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.transfer(receiver, tokenId).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); }); }); describe('NFT: Substrate calls', () => { - itWeb3('Events emitted for mint()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); }); - const alice = privateKeyWrapper('//Alice'); + }); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + itEth('Events emitted for mint()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft'); - let tokenId: number; - const events = await recordEvents(contract, async () => { - tokenId = await createItemExpectSuccess(alice, collection, 'NFT'); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: '0x0000000000000000000000000000000000000000', - to: subToEth(alice.address), - tokenId: tokenId!.toString(), - }, - }, - ]); + const {tokenId} = await collection.mintToken(alice); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.tokenId).to.be.equal(tokenId.toString()); }); - itWeb3('Events emitted for burn()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('Events emitted for burn()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + const token = await collection.mintToken(alice); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft'); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT'); - const events = await recordEvents(contract, async () => { - await burnItemExpectSuccess(alice, collection, tokenId); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: subToEth(alice.address), - to: '0x0000000000000000000000000000000000000000', - tokenId: tokenId.toString(), - }, - }, - ]); - }); + await token.burn(alice); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; - itWeb3('Events emitted for approve()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.tokenId).to.be.equal(token.tokenId.toString()); + }); - const receiver = createEthAccount(web3); + itEth('Events emitted for approve()', async ({helper}) => { + const receiver = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT'); + const collection = await helper.nft.mintCollection(alice, {}); + const token = await collection.mintToken(alice); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft'); - const events = await recordEvents(contract, async () => { - await approveExpectSuccess(collection, tokenId, alice, {Ethereum: receiver}, 1); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Approval', - args: { - owner: subToEth(alice.address), - approved: receiver, - tokenId: tokenId.toString(), - }, - }, - ]); - }); + await token.approve(alice, {Ethereum: receiver}); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; - itWeb3('Events emitted for transferFrom()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + expect(event.event).to.be.equal('Approval'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.owner).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.approved).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(token.tokenId.toString()); + }); - const receiver = createEthAccount(web3); + itEth('Events emitted for transferFrom()', async ({helper}) => { + const [bob] = await helper.arrange.createAccounts([10n], donor); + const receiver = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT'); - await approveExpectSuccess(collection, tokenId, alice, bob.address, 1); + const collection = await helper.nft.mintCollection(alice, {}); + const token = await collection.mintToken(alice); + await token.approve(alice, {Substrate: bob.address}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft'); - const events = await recordEvents(contract, async () => { - await transferFromExpectSuccess(collection, tokenId, bob, alice, {Ethereum: receiver}, 1, 'NFT'); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: subToEth(alice.address), - to: receiver, - tokenId: tokenId.toString(), - }, - }, - ]); - }); + await token.transferFrom(bob, {Substrate: alice.address}, {Ethereum: receiver}); - itWeb3('Events emitted for transfer()', async ({web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; - const receiver = createEthAccount(web3); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(`${token.tokenId}`); + }); + + itEth('Events emitted for transfer()', async ({helper}) => { + const receiver = helper.eth.createAccount(); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT'); + const collection = await helper.nft.mintCollection(alice, {}); + const token = await collection.mintToken(alice); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address); + const collectionAddress = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft'); - const events = await recordEvents(contract, async () => { - await transferExpectSuccess(collection, tokenId, alice, {Ethereum: receiver}, 1, 'NFT'); + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); }); - expect(events).to.be.deep.equal([ - { - address, - event: 'Transfer', - args: { - from: subToEth(alice.address), - to: receiver, - tokenId: tokenId.toString(), - }, - }, - ]); + await token.transfer(alice, {Ethereum: receiver}); + + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(`${token.tokenId}`); }); }); describe('Common metadata', () => { - itWeb3('Returns collection name', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'NFT'}, + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); }); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + }); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); - const name = await contract.methods.name().call(); + itEth('Returns collection name', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const tokenPropertyPermissions = [{ + key: 'URI', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: false, + }, + }]; + const collection = await helper.nft.mintCollection( + alice, + { + name: 'oh River', + tokenPrefix: 'CHANGE', + properties: [{key: 'ERC721Metadata', value: '1'}], + tokenPropertyPermissions, + }, + ); - expect(name).to.equal('token name'); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); + const name = await contract.methods.name().call(); + expect(name).to.equal('oh River'); }); - itWeb3('Returns symbol name', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - tokenPrefix: 'TOK', - mode: {type: 'NFT'}, - }); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Returns symbol name', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const tokenPropertyPermissions = [{ + key: 'URI', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: false, + }, + }]; + const collection = await helper.nft.mintCollection( + alice, + { + name: 'oh River', + tokenPrefix: 'CHANGE', + properties: [{key: 'ERC721Metadata', value: '1'}], + tokenPropertyPermissions, + }, + ); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'nft', caller); const symbol = await contract.methods.symbol().call(); - - expect(symbol).to.equal('TOK'); + expect(symbol).to.equal('CHANGE'); }); -}); \ No newline at end of file +}); diff --git a/tests/src/eth/nonFungibleAbi.json b/tests/src/eth/nonFungibleAbi.json index e41e92cc0c..bebf71ba9e 100644 --- a/tests/src/eth/nonFungibleAbi.json +++ b/tests/src/eth/nonFungibleAbi.json @@ -86,24 +86,24 @@ ], "name": "addCollectionAdmin", "outputs": [], - "stateMutability": "view", + "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ - { "internalType": "uint256", "name": "newAdmin", "type": "uint256" } + { "internalType": "address", "name": "user", "type": "address" } ], - "name": "addCollectionAdminSubstrate", + "name": "addToCollectionAllowList", "outputs": [], - "stateMutability": "view", + "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "user", "type": "address" } ], - "name": "addToCollectionAllowList", - "outputs": [], + "name": "allowed", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "view", "type": "function" }, @@ -145,6 +145,32 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { "internalType": "address", "name": "newOwner", "type": "address" } + ], + "name": "changeCollectionOwner", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "collectionOwner", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple17", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [{ "internalType": "string", "name": "key", "type": "string" }], "name": "collectionProperty", @@ -152,6 +178,23 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "collectionSponsor", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple17", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "confirmCollectionSponsorship", @@ -199,6 +242,13 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "hasCollectionPendingSponsor", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { "internalType": "address", "name": "owner", "type": "address" }, @@ -211,50 +261,27 @@ }, { "inputs": [ - { "internalType": "address", "name": "to", "type": "address" }, - { "internalType": "uint256", "name": "tokenId", "type": "uint256" } - ], - "name": "mint", - "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { "internalType": "address", "name": "to", "type": "address" }, - { "internalType": "uint256[]", "name": "tokenIds", "type": "uint256[]" } + { "internalType": "address", "name": "user", "type": "address" } ], - "name": "mintBulk", + "name": "isOwnerOrAdmin", "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { - "inputs": [ - { "internalType": "address", "name": "to", "type": "address" }, - { - "components": [ - { "internalType": "uint256", "name": "field_0", "type": "uint256" }, - { "internalType": "string", "name": "field_1", "type": "string" } - ], - "internalType": "struct Tuple0[]", - "name": "tokens", - "type": "tuple[]" - } - ], - "name": "mintBulkWithTokenURI", - "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "inputs": [{ "internalType": "address", "name": "to", "type": "address" }], + "name": "mint", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "to", "type": "address" }, - { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, { "internalType": "string", "name": "tokenUri", "type": "string" } ], "name": "mintWithTokenURI", - "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], "stateMutability": "nonpayable", "type": "function" }, @@ -304,16 +331,14 @@ ], "name": "removeCollectionAdmin", "outputs": [], - "stateMutability": "view", + "stateMutability": "nonpayable", "type": "function" }, { - "inputs": [ - { "internalType": "uint256", "name": "newAdmin", "type": "uint256" } - ], - "name": "removeCollectionAdminSubstrate", + "inputs": [], + "name": "removeCollectionSponsor", "outputs": [], - "stateMutability": "view", + "stateMutability": "nonpayable", "type": "function" }, { @@ -322,7 +347,7 @@ ], "name": "removeFromCollectionAllowList", "outputs": [], - "stateMutability": "view", + "stateMutability": "nonpayable", "type": "function" }, { @@ -343,7 +368,7 @@ { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, { "internalType": "bytes", "name": "data", "type": "bytes" } ], - "name": "safeTransferFromWithData", + "name": "safeTransferFrom", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -526,5 +551,12 @@ "outputs": [], "stateMutability": "nonpayable", "type": "function" + }, + { + "inputs": [], + "name": "uniqueCollectionType", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" } ] diff --git a/tests/src/eth/payable.test.ts b/tests/src/eth/payable.test.ts index 61364dfc8d..5b0c014515 100644 --- a/tests/src/eth/payable.test.ts +++ b/tests/src/eth/payable.test.ts @@ -14,99 +14,256 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import {submitTransactionAsync} from '../substrate/substrate-api'; -import {createEthAccountWithBalance, deployCollector, GAS_ARGS, itWeb3, subToEth, transferBalanceToEth} from './util/helpers'; -import {evmToAddress} from '@polkadot/util-crypto'; -import {getGenericResult, UNIQUE} from '../util/helpers'; -import {getBalanceSingle, transferBalanceExpectSuccess} from '../substrate/get-balance'; +import {IKeyringPair} from '@polkadot/types/types'; + +import {itEth, expect, usingEthPlaygrounds, EthUniqueHelper} from './util'; describe('EVM payable contracts', () => { - itWeb3('Evm contract can receive wei from eth account', async ({api, web3, privateKeyWrapper}) => { - const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const contract = await deployCollector(web3, deployer); + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Evm contract can receive wei from eth account', async ({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const contract = await helper.eth.deployCollectorContract(deployer); - await web3.eth.sendTransaction({from: deployer, to: contract.options.address, value: '10000', ...GAS_ARGS}); + const web3 = helper.getWeb3(); + + await web3.eth.sendTransaction({from: deployer, to: contract.options.address, value: '10000', gas: helper.eth.DEFAULT_GAS}); expect(await contract.methods.getCollected().call()).to.be.equal('10000'); }); - itWeb3('Evm contract can receive wei from substrate account', async ({api, web3, privateKeyWrapper}) => { - const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const contract = await deployCollector(web3, deployer); - const alice = privateKeyWrapper('//Alice'); + itEth('Evm contract can receive wei from substrate account', async ({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const contract = await helper.eth.deployCollectorContract(deployer); + const [alice] = await helper.arrange.createAccounts([10n], donor); + + const weiCount = '10000'; // Transaction fee/value will be payed from subToEth(sender) evm balance, // which is backed by evmToAddress(subToEth(sender)) substrate balance - await transferBalanceToEth(api, alice, subToEth(alice.address)); + await helper.eth.transferBalanceFromSubstrate(alice, helper.address.substrateToEth(alice.address), 5n); - { - const tx = api.tx.evm.call( - subToEth(alice.address), - contract.options.address, - contract.methods.giveMoney().encodeABI(), - '10000', - GAS_ARGS.gas, - await web3.eth.getGasPrice(), - null, - null, - [], - ); - const events = await submitTransactionAsync(alice, tx); - const result = getGenericResult(events); - expect(result.success).to.be.true; - } - expect(await contract.methods.getCollected().call()).to.be.equal('10000'); + await helper.eth.sendEVM(alice, contract.options.address, contract.methods.giveMoney().encodeABI(), weiCount); + + expect(await contract.methods.getCollected().call()).to.be.equal(weiCount); }); // We can't handle sending balance to backing storage of evm balance, because evmToAddress operation is irreversible - itWeb3('Wei sent directly to backing storage of evm contract balance is unaccounted', async({api, web3, privateKeyWrapper}) => { - const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const contract = await deployCollector(web3, deployer); - const alice = privateKeyWrapper('//Alice'); + itEth('Wei sent directly to backing storage of evm contract balance is unaccounted', async({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const contract = await helper.eth.deployCollectorContract(deployer); + const [alice] = await helper.arrange.createAccounts([10n], donor); - await transferBalanceExpectSuccess(api, alice, evmToAddress(contract.options.address), '10000'); + const weiCount = 10_000n; - expect(await contract.methods.getUnaccounted().call()).to.be.equal('10000'); + await helper.eth.transferBalanceFromSubstrate(alice, contract.options.address, weiCount, false); + + expect(await contract.methods.getUnaccounted().call()).to.be.equal(weiCount.toString()); }); - itWeb3('Balance can be retrieved from evm contract', async({api, web3, privateKeyWrapper}) => { - const FEE_BALANCE = 1000n * UNIQUE; - const CONTRACT_BALANCE = 1n * UNIQUE; + itEth('Balance can be retrieved from evm contract', async({helper}) => { + const FEE_BALANCE = 10n * helper.balance.getOneTokenNominal(); + const CONTRACT_BALANCE = 1n * helper.balance.getOneTokenNominal(); + + const deployer = await helper.eth.createAccountWithBalance(donor); + const contract = await helper.eth.deployCollectorContract(deployer); + const [alice] = await helper.arrange.createAccounts([20n], donor); - const deployer = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const contract = await deployCollector(web3, deployer); - const alice = privateKeyWrapper('//Alice'); + const web3 = helper.getWeb3(); - await web3.eth.sendTransaction({from: deployer, to: contract.options.address, value: CONTRACT_BALANCE.toString(), ...GAS_ARGS}); + await web3.eth.sendTransaction({from: deployer, to: contract.options.address, value: CONTRACT_BALANCE.toString(), gas: helper.eth.DEFAULT_GAS}); - const receiver = privateKeyWrapper(`//Receiver${Date.now()}`); + const [receiver] = await helper.arrange.createAccounts([0n], donor); // First receive balance on eth balance of bob { - const ethReceiver = subToEth(receiver.address); + const ethReceiver = helper.address.substrateToEth(receiver.address); expect(await web3.eth.getBalance(ethReceiver)).to.be.equal('0'); await contract.methods.withdraw(ethReceiver).send({from: deployer}); expect(await web3.eth.getBalance(ethReceiver)).to.be.equal(CONTRACT_BALANCE.toString()); } // Some balance is required to pay fee for evm.withdraw call - await transferBalanceExpectSuccess(api, alice, receiver.address, FEE_BALANCE.toString()); + await helper.balance.transferToSubstrate(alice, receiver.address, FEE_BALANCE); + // await transferBalanceExpectSuccess(api, alice, receiver.address, FEE_BALANCE.toString()); // Withdraw balance from eth to substrate { - const initialReceiverBalance = await getBalanceSingle(api, receiver.address); - const tx = api.tx.evm.withdraw( - subToEth(receiver.address), - CONTRACT_BALANCE.toString(), - ); - const events = await submitTransactionAsync(receiver, tx); - const result = getGenericResult(events); - expect(result.success).to.be.true; - const finalReceiverBalance = await getBalanceSingle(api, receiver.address); + const initialReceiverBalance = await helper.balance.getSubstrate(receiver.address); + await helper.executeExtrinsic(receiver, 'api.tx.evm.withdraw', [helper.address.substrateToEth(receiver.address), CONTRACT_BALANCE.toString()], true); + const finalReceiverBalance = await helper.balance.getSubstrate(receiver.address); expect(finalReceiverBalance > initialReceiverBalance).to.be.true; } }); }); + +describe('EVM transaction fees', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Fee is withdrawn from the user', async({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const contract = await helper.eth.deployFlipper(deployer); + + const initialCallerBalance = await helper.balance.getEthereum(caller); + await contract.methods.flip().send({from: caller}); + const finalCallerBalance = await helper.balance.getEthereum(caller); + expect(finalCallerBalance < initialCallerBalance).to.be.true; + }); + + itEth('Fee for nested calls is withdrawn from the user', async({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const contract = await deployProxyContract(helper, deployer); + + const initialCallerBalance = await helper.balance.getEthereum(caller); + const initialContractBalance = await helper.balance.getEthereum(contract.options.address); + await contract.methods.flip().send({from: caller}); + const finalCallerBalance = await helper.balance.getEthereum(caller); + const finalContractBalance = await helper.balance.getEthereum(contract.options.address); + expect(finalCallerBalance < initialCallerBalance).to.be.true; + expect(finalContractBalance == initialContractBalance).to.be.true; + }); + + itEth('Fee for nested calls to native methods is withdrawn from the user', async({helper}) => { + const CONTRACT_BALANCE = 2n * helper.balance.getOneTokenNominal(); + + const deployer = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const contract = await deployProxyContract(helper, deployer); + + const collectionAddress = (await contract.methods.createNFTCollection().send({from: caller, value: Number(CONTRACT_BALANCE)})).events.CollectionCreated.returnValues.collection; + const initialCallerBalance = await helper.balance.getEthereum(caller); + const initialContractBalance = await helper.balance.getEthereum(contract.options.address); + await contract.methods.mintNftToken(collectionAddress).send({from: caller}); + const finalCallerBalance = await helper.balance.getEthereum(caller); + const finalContractBalance = await helper.balance.getEthereum(contract.options.address); + expect(finalCallerBalance < initialCallerBalance).to.be.true; + expect(finalContractBalance == initialContractBalance).to.be.true; + }); + + itEth('Fee for nested calls to create*Collection methods is withdrawn from the user and from the contract', async({helper}) => { + const CONTRACT_BALANCE = 2n * helper.balance.getOneTokenNominal(); + const deployer = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const contract = await deployProxyContract(helper, deployer); + + const initialCallerBalance = await helper.balance.getEthereum(caller); + const initialContractBalance = await helper.balance.getEthereum(contract.options.address); + await contract.methods.createNFTCollection().send({from: caller, value: Number(CONTRACT_BALANCE)}); + const finalCallerBalance = await helper.balance.getEthereum(caller); + const finalContractBalance = await helper.balance.getEthereum(contract.options.address); + expect(finalCallerBalance < initialCallerBalance).to.be.true; + expect(finalContractBalance == initialContractBalance).to.be.true; + }); + + itEth('Negative test: call createNFTCollection with wrong fee', async({helper}) => { + const SMALL_FEE = 1n * helper.balance.getOneTokenNominal(); + const BIG_FEE = 3n * helper.balance.getOneTokenNominal(); + const caller = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(caller); + + await expect(collectionHelper.methods.createNFTCollection('A', 'B', 'C').call({value: Number(SMALL_FEE)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + await expect(collectionHelper.methods.createNFTCollection('A', 'B', 'C').call({value: Number(BIG_FEE)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + }); + + itEth('Negative test: call createRFTCollection with wrong fee', async({helper}) => { + const SMALL_FEE = 1n * helper.balance.getOneTokenNominal(); + const BIG_FEE = 3n * helper.balance.getOneTokenNominal(); + const caller = await helper.eth.createAccountWithBalance(donor); + const collectionHelper = helper.ethNativeContract.collectionHelpers(caller); + + await expect(collectionHelper.methods.createRFTCollection('A', 'B', 'C').call({value: Number(SMALL_FEE)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + await expect(collectionHelper.methods.createRFTCollection('A', 'B', 'C').call({value: Number(BIG_FEE)})).to.be.rejectedWith('Sent amount not equals to collection creation price (2000000000000000000)'); + }); + + itEth('Get collection creation fee', async({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + expect(await helper.eth.getCollectionCreationFee(deployer)).to.be.equal(String(2n * helper.balance.getOneTokenNominal())); + }); + + async function deployProxyContract(helper: EthUniqueHelper, deployer: string) { + return await helper.ethContract.deployByCode( + deployer, + 'ProxyContract', + ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + import {CollectionHelpers} from "../api/CollectionHelpers.sol"; + import {UniqueNFT} from "../api/UniqueNFT.sol"; + + error Value(uint256 value); + + contract ProxyContract { + bool value = false; + address innerContract; + + event CollectionCreated(address collection); + event TokenMinted(uint256 tokenId); + + receive() external payable {} + + constructor() { + innerContract = address(new InnerContract()); + } + + function flip() public { + value = !value; + InnerContract(innerContract).flip(); + } + + function createNFTCollection() external payable { + address collectionHelpers = 0x6C4E9fE1AE37a41E93CEE429e8E1881aBdcbb54F; + address nftCollection = CollectionHelpers(collectionHelpers).createNFTCollection{value: msg.value}("A", "B", "C"); + emit CollectionCreated(nftCollection); + } + + function mintNftToken(address collectionAddress) external { + UniqueNFT collection = UniqueNFT(collectionAddress); + uint256 tokenId = collection.mint(msg.sender); + emit TokenMinted(tokenId); + } + + function getValue() external view returns (bool) { + return InnerContract(innerContract).getValue(); + } + } + + contract InnerContract { + bool value = false; + function flip() external { + value = !value; + } + function getValue() external view returns (bool) { + return value; + } + } + `, + [ + { + solPath: 'api/CollectionHelpers.sol', + fsPath: `${__dirname}/api/CollectionHelpers.sol`, + }, + { + solPath: 'api/UniqueNFT.sol', + fsPath: `${__dirname}/api/UniqueNFT.sol`, + }, + ], + ); + } +}); diff --git a/tests/src/eth/proxy/UniqueNFTProxy.sol b/tests/src/eth/proxy/UniqueNFTProxy.sol index 3ba29fd98e..aaddf07d61 100644 --- a/tests/src/eth/proxy/UniqueNFTProxy.sol +++ b/tests/src/eth/proxy/UniqueNFTProxy.sol @@ -120,20 +120,19 @@ contract UniqueNFTProxy is UniqueNFT { return proxied.mintingFinished(); } - function mint(address to, uint256 tokenId) + function mint(address to) external override - returns (bool) + returns (uint256) { - return proxied.mint(to, tokenId); + return proxied.mint(to); } function mintWithTokenURI( address to, - uint256 tokenId, string memory tokenUri - ) external override returns (bool) { - return proxied.mintWithTokenURI(to, tokenId, tokenUri); + ) external override returns (uint256) { + return proxied.mintWithTokenURI(to, tokenUri); } function finishMinting() external override returns (bool) { @@ -169,7 +168,7 @@ contract UniqueNFTProxy is UniqueNFT { return proxied.mintBulk(to, tokenIds); } - function mintBulkWithTokenURI(address to, Tuple0[] memory tokens) + function mintBulkWithTokenURI(address to, Tuple6[] memory tokens) external override returns (bool) diff --git a/tests/src/eth/proxy/fungibleProxy.test.ts b/tests/src/eth/proxy/fungibleProxy.test.ts index 9c2e2baef3..47475cacfb 100644 --- a/tests/src/eth/proxy/fungibleProxy.test.ts +++ b/tests/src/eth/proxy/fungibleProxy.test.ts @@ -14,56 +14,56 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {createCollectionExpectSuccess, createFungibleItemExpectSuccess} from '../../util/helpers'; -import {collectionIdToAddress, createEthAccount, createEthAccountWithBalance, GAS_ARGS, itWeb3, normalizeEvents} from '../util/helpers'; -import fungibleAbi from '../fungibleAbi.json'; import {expect} from 'chai'; -import {ApiPromise} from '@polkadot/api'; -import Web3 from 'web3'; import {readFile} from 'fs/promises'; import {IKeyringPair} from '@polkadot/types/types'; +import {EthUniqueHelper, itEth, usingEthPlaygrounds} from '../util'; -async function proxyWrap(api: ApiPromise, web3: Web3, wrapped: any, privateKeyWrapper: (account: string) => IKeyringPair) { +async function proxyWrap(helper: EthUniqueHelper, wrapped: any, donor: IKeyringPair) { // Proxy owner has no special privilegies, we don't need to reuse them - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const owner = await helper.eth.createAccountWithBalance(donor); + const web3 = helper.getWeb3(); const proxyContract = new web3.eth.Contract(JSON.parse((await readFile(`${__dirname}/UniqueFungibleProxy.abi`)).toString()), undefined, { from: owner, - ...GAS_ARGS, + gas: helper.eth.DEFAULT_GAS, }); const proxy = await proxyContract.deploy({data: (await readFile(`${__dirname}/UniqueFungibleProxy.bin`)).toString(), arguments: [wrapped.options.address]}).send({from: owner}); return proxy; } describe('Fungible (Via EVM proxy): Information getting', () => { - itWeb3('totalSupply', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, + let alice: IKeyringPair; + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + }); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Substrate: alice.address}); + itEth('totalSupply', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const caller = await helper.eth.createAccountWithBalance(donor); + await collection.mint(alice, 200n, {Substrate: alice.address}); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'ft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); const totalSupply = await contract.methods.totalSupply().call(); expect(totalSupply).to.equal('200'); }); - itWeb3('balanceOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('balanceOf', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const caller = await helper.eth.createAccountWithBalance(donor); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: caller}); + await collection.mint(alice, 200n, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'ft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); const balance = await contract.methods.balanceOf(caller).call(); expect(balance).to.equal('200'); @@ -71,22 +71,29 @@ describe('Fungible (Via EVM proxy): Information getting', () => { }); describe('Fungible (Via EVM proxy): Plain calls', () => { - itWeb3('Can perform approve()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, + let alice: IKeyringPair; + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = createEthAccount(web3); + }); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: contract.options.address}); + itEth('Can perform approve()', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const caller = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'ft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + await collection.mint(alice, 200n, {Ethereum: contract.options.address}); { const result = await contract.methods.approve(spender, 100).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { @@ -107,28 +114,23 @@ describe('Fungible (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform transferFrom()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: owner}); + itEth('Can perform transferFrom()', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const caller = await helper.eth.createAccountWithBalance(donor); + const owner = await helper.eth.createAccountWithBalance(donor); - const receiver = createEthAccount(web3); + await collection.mint(alice, 200n, {Ethereum: owner}); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const evmCollection = new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}); - const contract = await proxyWrap(api, web3, evmCollection, privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'ft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); await evmCollection.methods.approve(contract.options.address, 100).send({from: owner}); { const result = await contract.methods.transferFrom(owner, receiver, 49).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { address, @@ -162,22 +164,19 @@ describe('Fungible (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform transfer()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - name: 'token name', - mode: {type: 'Fungible', decimalPoints: 0}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Can perform transfer()', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}, 0); + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = await helper.eth.createAccountWithBalance(donor); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(fungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); - await createFungibleItemExpectSuccess(alice, collection, {Value: 200n}, {Ethereum: contract.options.address}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'ft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + await collection.mint(alice, 200n, {Ethereum: contract.options.address}); { const result = await contract.methods.transfer(receiver, 50).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { address, diff --git a/tests/src/eth/proxy/nonFungibleProxy.test.ts b/tests/src/eth/proxy/nonFungibleProxy.test.ts index 428e9a76de..dc05394088 100644 --- a/tests/src/eth/proxy/nonFungibleProxy.test.ts +++ b/tests/src/eth/proxy/nonFungibleProxy.test.ts @@ -14,73 +14,74 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {createCollectionExpectSuccess, createItemExpectSuccess} from '../../util/helpers'; -import {collectionIdToAddress, createEthAccount, createEthAccountWithBalance, evmCollection, evmCollectionHelpers, GAS_ARGS, getCollectionAddressFromResult, itWeb3, normalizeEvents} from '../util/helpers'; -import nonFungibleAbi from '../nonFungibleAbi.json'; -import {expect} from 'chai'; -import {submitTransactionAsync} from '../../substrate/substrate-api'; -import Web3 from 'web3'; import {readFile} from 'fs/promises'; -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; +import {EthUniqueHelper, itEth, usingEthPlaygrounds, expect} from '../util'; -async function proxyWrap(api: ApiPromise, web3: Web3, wrapped: any, privateKeyWrapper: (account: string) => IKeyringPair) { + +async function proxyWrap(helper: EthUniqueHelper, wrapped: any, donor: IKeyringPair) { // Proxy owner has no special privilegies, we don't need to reuse them - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + const owner = await helper.eth.createAccountWithBalance(donor); + const web3 = helper.getWeb3(); const proxyContract = new web3.eth.Contract(JSON.parse((await readFile(`${__dirname}/UniqueNFTProxy.abi`)).toString()), undefined, { from: owner, - ...GAS_ARGS, + gas: helper.eth.DEFAULT_GAS, }); const proxy = await proxyContract.deploy({data: (await readFile(`${__dirname}/UniqueNFTProxy.bin`)).toString(), arguments: [wrapped.options.address]}).send({from: owner}); return proxy; } describe('NFT (Via EVM proxy): Information getting', () => { - itWeb3('totalSupply', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, + let alice: IKeyringPair; + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + }); - await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: alice.address}); + itEth('totalSupply', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const caller = await helper.eth.createAccountWithBalance(donor); + await collection.mintToken(alice, {Substrate: alice.address}); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); const totalSupply = await contract.methods.totalSupply().call(); expect(totalSupply).to.equal('1'); }); - itWeb3('balanceOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('balanceOf', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); - await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); + const caller = await helper.eth.createAccountWithBalance(donor); + await collection.mintMultipleTokens(alice, [ + {owner: {Ethereum: caller}}, + {owner: {Ethereum: caller}}, + {owner: {Ethereum: caller}}, + ]); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); const balance = await contract.methods.balanceOf(caller).call(); expect(balance).to.equal('3'); }); - itWeb3('ownerOf', async ({api, web3, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth('ownerOf', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: caller}); + const caller = await helper.eth.createAccountWithBalance(donor); + const {tokenId} = await collection.mintToken(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); const owner = await contract.methods.ownerOf(tokenId).call(); expect(owner).to.equal(caller); @@ -88,61 +89,63 @@ describe('NFT (Via EVM proxy): Information getting', () => { }); describe('NFT (Via EVM proxy): Plain calls', () => { - itWeb3('Can perform mint()', async ({web3, api, privateKeyWrapper}) => { - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collectionHelper = evmCollectionHelpers(web3, owner); - const result = await collectionHelper.methods - .createNonfungibleCollection('A', 'A', 'A') - .send(); - const {collectionIdAddress} = await getCollectionAddressFromResult(api, result); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); - const collectionEvmOwned = evmCollection(web3, owner, collectionIdAddress); - const collectionEvm = evmCollection(web3, caller, collectionIdAddress); - const contract = await proxyWrap(api, web3, collectionEvm, privateKeyWrapper); + let alice: IKeyringPair; + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); + }); + }); + + itEth('Can perform mint()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, 'A', 'A', 'A', ''); + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + + const collectionEvmOwned = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + const collectionEvm = helper.ethNativeContract.collection(collectionAddress, 'nft', caller); + const contract = await proxyWrap(helper, collectionEvm, donor); await collectionEvmOwned.methods.addCollectionAdmin(contract.options.address).send(); { const nextTokenId = await contract.methods.nextTokenId().call(); - expect(nextTokenId).to.be.equal('1'); - const result = await contract.methods.mintWithTokenURI( - receiver, - nextTokenId, - 'Test URI', - ).send({from: caller}); - const events = normalizeEvents(result.events); + const result = await contract.methods.mintWithTokenURI(receiver, nextTokenId, 'Test URI').send({from: caller}); + const tokenId = result.events.Transfer.returnValues.tokenId; + expect(tokenId).to.be.equal('1'); + + const events = helper.eth.normalizeEvents(result.events); events[0].address = events[0].address.toLocaleLowerCase(); expect(events).to.be.deep.equal([ { - address: collectionIdAddress.toLocaleLowerCase(), + address: collectionAddress.toLocaleLowerCase(), event: 'Transfer', args: { from: '0x0000000000000000000000000000000000000000', to: receiver, - tokenId: nextTokenId, + tokenId, }, }, ]); - expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI'); + expect(await contract.methods.tokenURI(tokenId).call()).to.be.equal('Test URI'); } }); - + //TODO: CORE-302 add eth methods - itWeb3.skip('Can perform mintBulk()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); + itEth.skip('Can perform mintBulk()', async ({helper}) => { + const collection = await helper.nft.mintCollection(donor, {name: 'New', description: 'New collection', tokenPrefix: 'NEW'}); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); + const caller = await helper.eth.createAccountWithBalance(donor, 30n); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); - const changeAdminTx = api.tx.unique.addCollectionAdmin(collection, {Ethereum: contract.options.address}); - await submitTransactionAsync(alice, changeAdminTx); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + await collection.addAdmin(donor, {Ethereum: contract.options.address}); { const nextTokenId = await contract.methods.nextTokenId().call(); @@ -155,7 +158,7 @@ describe('NFT (Via EVM proxy): Plain calls', () => { [+nextTokenId + 2, 'Test URI 2'], ], ).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { @@ -193,23 +196,19 @@ describe('NFT (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform burn()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: contract.options.address}); + itEth('Can perform burn()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const caller = await helper.eth.createAccountWithBalance(donor); - const changeAdminTx = api.tx.unique.addCollectionAdmin(collection, {Ethereum: contract.options.address}); - await submitTransactionAsync(alice, changeAdminTx); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + const {tokenId} = await collection.mintToken(alice, {Ethereum: contract.options.address}); + await collection.addAdmin(alice, {Ethereum: contract.options.address}); { const result = await contract.methods.burn(tokenId).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { @@ -225,21 +224,19 @@ describe('NFT (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform approve()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const spender = createEthAccount(web3); + itEth('Can perform approve()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const caller = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address), privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: contract.options.address}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + const {tokenId} = await collection.mintToken(alice, {Ethereum: contract.options.address}); { - const result = await contract.methods.approve(spender, tokenId).send({from: caller, ...GAS_ARGS}); - const events = normalizeEvents(result.events); + const result = await contract.methods.approve(spender, tokenId).send({from: caller, gas: helper.eth.DEFAULT_GAS}); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { @@ -255,26 +252,23 @@ describe('NFT (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform transferFrom()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + itEth('Can perform transferFrom()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const caller = await helper.eth.createAccountWithBalance(donor); + const owner = await helper.eth.createAccountWithBalance(donor); - const receiver = createEthAccount(web3); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const evmCollection = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); - const contract = await proxyWrap(api, web3, evmCollection, privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: owner}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + const {tokenId} = await collection.mintToken(alice, {Ethereum: owner}); await evmCollection.methods.approve(contract.options.address, tokenId).send({from: owner}); { const result = await contract.methods.transferFrom(owner, receiver, tokenId).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { address, @@ -299,21 +293,19 @@ describe('NFT (Via EVM proxy): Plain calls', () => { } }); - itWeb3('Can perform transfer()', async ({web3, api, privateKeyWrapper}) => { - const collection = await createCollectionExpectSuccess({ - mode: {type: 'NFT'}, - }); - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const receiver = createEthAccount(web3); + itEth('Can perform transfer()', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); - const address = collectionIdToAddress(collection); - const contract = await proxyWrap(api, web3, new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}), privateKeyWrapper); - const tokenId = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: contract.options.address}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const evmCollection = helper.ethNativeContract.collection(address, 'nft', caller); + const contract = await proxyWrap(helper, evmCollection, donor); + const {tokenId} = await collection.mintToken(alice, {Ethereum: contract.options.address}); { const result = await contract.methods.transfer(receiver, tokenId).send({from: caller}); - const events = normalizeEvents(result.events); + const events = helper.eth.normalizeEvents(result.events); expect(events).to.be.deep.equal([ { address, diff --git a/tests/src/eth/proxyContract.test.ts b/tests/src/eth/proxyContract.test.ts new file mode 100644 index 0000000000..06ae24c0e7 --- /dev/null +++ b/tests/src/eth/proxyContract.test.ts @@ -0,0 +1,152 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; + +import {itEth, expect, usingEthPlaygrounds, EthUniqueHelper} from './util'; + +describe('EVM payable contracts', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Update proxy contract', async({helper}) => { + const deployer = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const proxyContract = await deployProxyContract(helper, deployer); + + const realContractV1 = await deployRealContractV1(helper, deployer); + const realContractV1proxy = new helper.web3!.eth.Contract(realContractV1.options.jsonInterface, proxyContract.options.address, {from: caller, gas: helper.eth.DEFAULT_GAS}); + await proxyContract.methods.updateVersion(realContractV1.options.address).send(); + + await realContractV1proxy.methods.flip().send(); + await realContractV1proxy.methods.flip().send(); + await realContractV1proxy.methods.flip().send(); + const value1 = await realContractV1proxy.methods.getValue().call(); + const flipCount1 = await realContractV1proxy.methods.getFlipCount().call(); + expect(flipCount1).to.be.equal('3'); + expect(value1).to.be.equal(true); + + const realContractV2 = await deployRealContractV2(helper, deployer); + const realContractV2proxy = new helper.web3!.eth.Contract(realContractV2.options.jsonInterface, proxyContract.options.address, {from: caller, gas: helper.eth.DEFAULT_GAS}); + await proxyContract.methods.updateVersion(realContractV2.options.address).send(); + + await realContractV2proxy.methods.flip().send(); + await realContractV2proxy.methods.flip().send(); + await realContractV2proxy.methods.setStep(5).send(); + await realContractV2proxy.methods.increaseFlipCount().send(); + const value2 = await realContractV2proxy.methods.getValue().call(); + const flipCount2 = await realContractV2proxy.methods.getFlipCount().call(); + expect(value2).to.be.equal(true); + expect(flipCount2).to.be.equal('6'); + }); + + async function deployProxyContract(helper: EthUniqueHelper, deployer: string) { + return await helper.ethContract.deployByCode(deployer, 'ProxyContract', ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + contract ProxyContract { + event NewEvent(uint data); + receive() external payable {} + bytes32 private constant implementationSlot = bytes32(uint256(keccak256('eip1967.proxy.implementation')) - 1); + constructor() {} + function updateVersion(address newContractAddress) external { + bytes32 slot = implementationSlot; + assembly { + sstore(slot, newContractAddress) + } + } + fallback() external { + bytes32 slot = implementationSlot; + assembly { + let ptr := mload(0x40) + let contractAddress := sload(slot) + + calldatacopy(ptr, 0, calldatasize()) + + let result := delegatecall(gas(), contractAddress, ptr, calldatasize(), 0, 0) + let size := returndatasize() + + returndatacopy(ptr, 0, size) + + switch result + case 0 { revert(ptr, size) } + default { return(ptr, size) } + } + } + } + + interface RealContract { + function flip() external; + function getValue() external view returns (bool); + function getFlipCount() external view returns (uint); + }`); + } + + async function deployRealContractV1(helper: EthUniqueHelper, deployer: string) { + return await helper.ethContract.deployByCode(deployer, 'RealContractV1', ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + contract RealContractV1 { + bool value = false; + uint flipCount = 0; + function flip() external { + value = !value; + flipCount++; + } + function getValue() external view returns (bool) { + return value; + } + function getFlipCount() external view returns (uint) { + return flipCount; + } + }`); + } + + async function deployRealContractV2(helper: EthUniqueHelper, deployer: string) { + return await helper.ethContract.deployByCode(deployer, 'RealContractV2', ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + contract RealContractV2 { + bool value = false; + uint flipCount = 10; + uint step = 1; + function flip() external { + value = !value; + flipCount--; + } + function setStep(uint value) external { + step = value; + } + function increaseFlipCount() external { + flipCount = flipCount + step; + } + function getValue() external view returns (bool) { + return value; + } + function getFlipCount() external view returns (uint) { + return flipCount; + } + }`); + } +}); \ No newline at end of file diff --git a/tests/src/eth/reFungible.test.ts b/tests/src/eth/reFungible.test.ts new file mode 100644 index 0000000000..0e6ad142e3 --- /dev/null +++ b/tests/src/eth/reFungible.test.ts @@ -0,0 +1,420 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {Pallets, requirePalletsOrSkip} from '../util'; +import {expect, itEth, usingEthPlaygrounds} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; + +describe('Refungible: Information getting', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('totalSupply', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'TotalSupply', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + await contract.methods.mint(caller).send(); + + const totalSupply = await contract.methods.totalSupply().call(); + expect(totalSupply).to.equal('1'); + }); + + itEth('balanceOf', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'BalanceOf', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + await contract.methods.mint(caller).send(); + await contract.methods.mint(caller).send(); + await contract.methods.mint(caller).send(); + + const balance = await contract.methods.balanceOf(caller).call(); + expect(balance).to.equal('3'); + }); + + itEth('ownerOf', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'OwnerOf', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const owner = await contract.methods.ownerOf(tokenId).call(); + expect(owner).to.equal(caller); + }); + + itEth('ownerOf after burn', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'OwnerOf-AfterBurn', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + const tokenContract = helper.ethNativeContract.rftTokenById(collectionId, tokenId, caller); + + await tokenContract.methods.repartition(2).send(); + await tokenContract.methods.transfer(receiver, 1).send(); + + await tokenContract.methods.burnFrom(caller, 1).send(); + + const owner = await contract.methods.ownerOf(tokenId).call(); + expect(owner).to.equal(receiver); + }); + + itEth('ownerOf for partial ownership', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'Partial-OwnerOf', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + const tokenContract = helper.ethNativeContract.rftTokenById(collectionId, tokenId, caller); + + await tokenContract.methods.repartition(2).send(); + await tokenContract.methods.transfer(receiver, 1).send(); + + const owner = await contract.methods.ownerOf(tokenId).call(); + expect(owner).to.equal('0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF'); + }); +}); + +describe('Refungible: Plain calls', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Can perform mint()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleRFTCollection(owner, 'Minty', '6', '6', ''); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + + const result = await contract.methods.mintWithTokenURI(receiver, 'Test URI').send(); + + const event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.equal(receiver); + const tokenId = event.returnValues.tokenId; + expect(tokenId).to.be.equal('1'); + + expect(await contract.methods.tokenURI(tokenId).call()).to.be.equal('Test URI'); + }); + + itEth.skip('Can perform mintBulk()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleRFTCollection(owner, 'MintBulky', '6', '6', ''); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + + { + const nextTokenId = await contract.methods.nextTokenId().call(); + expect(nextTokenId).to.be.equal('1'); + const result = await contract.methods.mintBulkWithTokenURI( + receiver, + [ + [nextTokenId, 'Test URI 0'], + [+nextTokenId + 1, 'Test URI 1'], + [+nextTokenId + 2, 'Test URI 2'], + ], + ).send(); + + const events = result.events.Transfer; + for (let i = 0; i < 2; i++) { + const event = events[i]; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.tokenId).to.equal(String(+nextTokenId + i)); + } + + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Test URI 0'); + expect(await contract.methods.tokenURI(+nextTokenId + 1).call()).to.be.equal('Test URI 1'); + expect(await contract.methods.tokenURI(+nextTokenId + 2).call()).to.be.equal('Test URI 2'); + } + }); + + itEth('Can perform burn()', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'Burny', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + { + const result = await contract.methods.burn(tokenId).send(); + const event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal(caller); + expect(event.returnValues.to).to.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.tokenId).to.equal(tokenId.toString()); + } + }); + + itEth('Can perform transferFrom()', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'TransferFromy', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const tokenAddress = helper.ethAddress.fromTokenId(collectionId, tokenId); + + const tokenContract = helper.ethNativeContract.rftToken(tokenAddress, caller); + await tokenContract.methods.repartition(15).send(); + + { + const tokenEvents: any = []; + tokenContract.events.allEvents((_: any, event: any) => { + tokenEvents.push(event); + }); + const result = await contract.methods.transferFrom(caller, receiver, tokenId).send(); + if (tokenEvents.length == 0) await helper.wait.newBlocks(1); + + let event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal(caller); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.tokenId).to.equal(tokenId.toString()); + + event = tokenEvents[0]; + expect(event.address).to.equal(tokenAddress); + expect(event.returnValues.from).to.equal(caller); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.value).to.equal('15'); + } + + { + const balance = await contract.methods.balanceOf(receiver).call(); + expect(+balance).to.equal(1); + } + + { + const balance = await contract.methods.balanceOf(caller).call(); + expect(+balance).to.equal(0); + } + }); + + itEth('Can perform transfer()', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'Transferry', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + { + const result = await contract.methods.transfer(receiver, tokenId).send(); + + const event = result.events.Transfer; + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal(caller); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.tokenId).to.equal(tokenId.toString()); + } + + { + const balance = await contract.methods.balanceOf(caller).call(); + expect(+balance).to.equal(0); + } + + { + const balance = await contract.methods.balanceOf(receiver).call(); + expect(+balance).to.equal(1); + } + }); + + itEth('transfer event on transfer from partial ownership to full ownership', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'Transferry-Partial-to-Full', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const tokenContract = helper.ethNativeContract.rftTokenById(collectionId, tokenId, caller); + + await tokenContract.methods.repartition(2).send(); + await tokenContract.methods.transfer(receiver, 1).send(); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + + await tokenContract.methods.transfer(receiver, 1).send(); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal('0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF'); + expect(event.returnValues.to).to.equal(receiver); + expect(event.returnValues.tokenId).to.equal(tokenId.toString()); + }); + + itEth('transfer event on transfer from full ownership to partial ownership', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'Transferry-Full-to-Partial', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const tokenContract = helper.ethNativeContract.rftTokenById(collectionId, tokenId, caller); + + await tokenContract.methods.repartition(2).send(); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + + await tokenContract.methods.transfer(receiver, 1).send(); + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.address).to.equal(collectionAddress); + expect(event.returnValues.from).to.equal(caller); + expect(event.returnValues.to).to.equal('0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF'); + expect(event.returnValues.tokenId).to.equal(tokenId.toString()); + }); +}); + +describe('RFT: Fees', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('transferFrom() call fee is less than 0.2UNQ', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'Feeful-Transfer-From', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const cost = await helper.eth.recordCallFee(caller, () => contract.methods.transferFrom(caller, receiver, tokenId).send()); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + expect(cost > 0n); + }); + + itEth('transfer() call fee is less than 0.2UNQ', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const {collectionAddress} = await helper.eth.createRFTCollection(caller, 'Feeful-Transfer', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const cost = await helper.eth.recordCallFee(caller, () => contract.methods.transfer(receiver, tokenId).send()); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + expect(cost > 0n); + }); +}); + +describe('Common metadata', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); + }); + }); + + itEth('Returns collection name', async ({helper}) => { + const caller = helper.eth.createAccount(); + const tokenPropertyPermissions = [{ + key: 'URI', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: false, + }, + }]; + const collection = await helper.rft.mintCollection( + alice, + { + name: 'Leviathan', + tokenPrefix: '11', + properties: [{key: 'ERC721Metadata', value: '1'}], + tokenPropertyPermissions, + }, + ); + + const contract = helper.ethNativeContract.collectionById(collection.collectionId, 'rft', caller); + const name = await contract.methods.name().call(); + expect(name).to.equal('Leviathan'); + }); + + itEth('Returns symbol name', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const tokenPropertyPermissions = [{ + key: 'URI', + permission: { + mutable: true, + collectionAdmin: true, + tokenOwner: false, + }, + }]; + const {collectionId} = await helper.rft.mintCollection( + alice, + { + name: 'Leviathan', + tokenPrefix: '12', + properties: [{key: 'ERC721Metadata', value: '1'}], + tokenPropertyPermissions, + }, + ); + + const contract = helper.ethNativeContract.collectionById(collectionId, 'rft', caller); + const symbol = await contract.methods.symbol().call(); + expect(symbol).to.equal('12'); + }); +}); diff --git a/tests/src/eth/reFungibleAbi.json b/tests/src/eth/reFungibleAbi.json new file mode 100644 index 0000000000..a6417973d2 --- /dev/null +++ b/tests/src/eth/reFungibleAbi.json @@ -0,0 +1,571 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "approved", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "operator", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "approved", + "type": "bool" + } + ], + "name": "ApprovalForAll", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "MintingFinished", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [ + { "internalType": "address", "name": "newAdmin", "type": "address" } + ], + "name": "addCollectionAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "addToCollectionAllowList", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "allowed", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "approved", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "approve", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "owner", "type": "address" } + ], + "name": "balanceOf", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "burn", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "burnFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "newOwner", "type": "address" } + ], + "name": "changeCollectionOwner", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "collectionOwner", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple17", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [{ "internalType": "string", "name": "key", "type": "string" }], + "name": "collectionProperty", + "outputs": [{ "internalType": "bytes", "name": "", "type": "bytes" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "collectionSponsor", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple17", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "confirmCollectionSponsorship", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "contractAddress", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [{ "internalType": "string", "name": "key", "type": "string" }], + "name": "deleteCollectionProperty", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, + { "internalType": "string", "name": "key", "type": "string" } + ], + "name": "deleteProperty", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "finishMinting", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "getApproved", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "hasCollectionPendingSponsor", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "owner", "type": "address" }, + { "internalType": "address", "name": "operator", "type": "address" } + ], + "name": "isApprovedForAll", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "isOwnerOrAdmin", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [{ "internalType": "address", "name": "to", "type": "address" }], + "name": "mint", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "string", "name": "tokenUri", "type": "string" } + ], + "name": "mintWithTokenURI", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "mintingFinished", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "nextTokenId", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "ownerOf", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, + { "internalType": "string", "name": "key", "type": "string" } + ], + "name": "property", + "outputs": [{ "internalType": "bytes", "name": "", "type": "bytes" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "admin", "type": "address" } + ], + "name": "removeCollectionAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "removeCollectionSponsor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "user", "type": "address" } + ], + "name": "removeFromCollectionAllowList", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "safeTransferFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, + { "internalType": "bytes", "name": "data", "type": "bytes" } + ], + "name": "safeTransferFromWithData", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "operator", "type": "address" }, + { "internalType": "bool", "name": "approved", "type": "bool" } + ], + "name": "setApprovalForAll", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "uint8", "name": "mode", "type": "uint8" }], + "name": "setCollectionAccess", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "limit", "type": "string" }, + { "internalType": "uint32", "name": "value", "type": "uint32" } + ], + "name": "setCollectionLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "limit", "type": "string" }, + { "internalType": "bool", "name": "value", "type": "bool" } + ], + "name": "setCollectionLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "bool", "name": "mode", "type": "bool" }], + "name": "setCollectionMintMode", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [{ "internalType": "bool", "name": "enable", "type": "bool" }], + "name": "setCollectionNesting", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "bool", "name": "enable", "type": "bool" }, + { + "internalType": "address[]", + "name": "collections", + "type": "address[]" + } + ], + "name": "setCollectionNesting", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "key", "type": "string" }, + { "internalType": "bytes", "name": "value", "type": "bytes" } + ], + "name": "setCollectionProperty", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "sponsor", "type": "address" } + ], + "name": "setCollectionSponsor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" }, + { "internalType": "string", "name": "key", "type": "string" }, + { "internalType": "bytes", "name": "value", "type": "bytes" } + ], + "name": "setProperty", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "string", "name": "key", "type": "string" }, + { "internalType": "bool", "name": "isMutable", "type": "bool" }, + { "internalType": "bool", "name": "collectionAdmin", "type": "bool" }, + { "internalType": "bool", "name": "tokenOwner", "type": "bool" } + ], + "name": "setTokenPropertyPermission", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "bytes4", "name": "interfaceID", "type": "bytes4" } + ], + "name": "supportsInterface", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "index", "type": "uint256" } + ], + "name": "tokenByIndex", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "token", "type": "uint256" } + ], + "name": "tokenContractAddress", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "owner", "type": "address" }, + { "internalType": "uint256", "name": "index", "type": "uint256" } + ], + "name": "tokenOfOwnerByIndex", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "tokenURI", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "transfer", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "tokenId", "type": "uint256" } + ], + "name": "transferFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "uniqueCollectionType", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + } +] diff --git a/tests/src/eth/reFungibleToken.test.ts b/tests/src/eth/reFungibleToken.test.ts new file mode 100644 index 0000000000..c9f41c95a7 --- /dev/null +++ b/tests/src/eth/reFungibleToken.test.ts @@ -0,0 +1,493 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {Pallets, requirePalletsOrSkip} from '../util'; +import {EthUniqueHelper, expect, itEth, usingEthPlaygrounds} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; +import {Contract} from 'web3-eth-contract'; + + +describe('Refungible token: Information getting', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([20n], donor); + }); + }); + + itEth('totalSupply', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice, {tokenPrefix: 'MUON'}); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: caller}); + + const contract = helper.ethNativeContract.rftTokenById(collection.collectionId, tokenId, caller); + const totalSupply = await contract.methods.totalSupply().call(); + expect(totalSupply).to.equal('200'); + }); + + itEth('balanceOf', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice, {tokenPrefix: 'MUON'}); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: caller}); + + const contract = helper.ethNativeContract.rftTokenById(collection.collectionId, tokenId, caller); + const balance = await contract.methods.balanceOf(caller).call(); + expect(balance).to.equal('200'); + }); + + itEth('decimals', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice, {tokenPrefix: 'MUON'}); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: caller}); + + const contract = helper.ethNativeContract.rftTokenById(collection.collectionId, tokenId, caller); + const decimals = await contract.methods.decimals().call(); + expect(decimals).to.equal('0'); + }); +}); + +// FIXME: Need erc721 for ReFubgible. +describe('Check ERC721 token URI for ReFungible', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + }); + }); + + async function setup(helper: EthUniqueHelper, baseUri: string, propertyKey?: string, propertyValue?: string): Promise<{contract: Contract, nextTokenId: string}> { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleRFTCollection(owner, 'Mint collection', 'a', 'b', baseUri); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + + const result = await contract.methods.mint(receiver).send(); + + const event = result.events.Transfer; + const tokenId = event.returnValues.tokenId; + expect(tokenId).to.be.equal('1'); + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.be.equal(receiver); + + if (propertyKey && propertyValue) { + // Set URL or suffix + await contract.methods.setProperty(tokenId, propertyKey, Buffer.from(propertyValue)).send(); + } + + return {contract, nextTokenId: tokenId}; + } + + itEth('Empty tokenURI', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, ''); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal(''); + }); + + itEth('TokenURI from url', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, 'BaseURI_', 'URI', 'Token URI'); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('Token URI'); + }); + + itEth('TokenURI from baseURI', async ({helper}) => { + const {contract, nextTokenId} = await setup(helper, 'BaseURI_'); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('BaseURI_'); + }); + + itEth('TokenURI from baseURI + suffix', async ({helper}) => { + const suffix = '/some/suffix'; + const {contract, nextTokenId} = await setup(helper, 'BaseURI_', 'URISuffix', suffix); + expect(await contract.methods.tokenURI(nextTokenId).call()).to.be.equal('BaseURI_' + suffix); + }); +}); + +describe('Refungible: Plain calls', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([50n], donor); + }); + }); + + itEth('Can perform approve()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + { + const result = await contract.methods.approve(spender, 100).send({from: owner}); + const event = result.events.Approval; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.owner).to.be.equal(owner); + expect(event.returnValues.spender).to.be.equal(spender); + expect(event.returnValues.value).to.be.equal('100'); + } + + { + const allowance = await contract.methods.allowance(owner, spender).call(); + expect(+allowance).to.equal(100); + } + }); + + itEth('Can perform transferFrom()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + await contract.methods.approve(spender, 100).send(); + + { + const result = await contract.methods.transferFrom(owner, receiver, 49).send({from: spender}); + let event = result.events.Transfer; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('49'); + + event = result.events.Approval; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.owner).to.be.equal(owner); + expect(event.returnValues.spender).to.be.equal(spender); + expect(event.returnValues.value).to.be.equal('51'); + } + + { + const balance = await contract.methods.balanceOf(receiver).call(); + expect(+balance).to.equal(49); + } + + { + const balance = await contract.methods.balanceOf(owner).call(); + expect(+balance).to.equal(151); + } + }); + + itEth('Can perform transfer()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + { + const result = await contract.methods.transfer(receiver, 50).send({from: owner}); + const event = result.events.Transfer; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('50'); + } + + { + const balance = await contract.methods.balanceOf(owner).call(); + expect(+balance).to.equal(150); + } + + { + const balance = await contract.methods.balanceOf(receiver).call(); + expect(+balance).to.equal(50); + } + }); + + itEth('Can perform repartition()', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 100n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + await contract.methods.repartition(200).send({from: owner}); + expect(+await contract.methods.balanceOf(owner).call()).to.be.equal(200); + await contract.methods.transfer(receiver, 110).send({from: owner}); + expect(+await contract.methods.balanceOf(owner).call()).to.be.equal(90); + expect(+await contract.methods.balanceOf(receiver).call()).to.be.equal(110); + + await expect(contract.methods.repartition(80).send({from: owner})).to.eventually.be.rejected; // Transaction is reverted + + await contract.methods.transfer(receiver, 90).send({from: owner}); + expect(+await contract.methods.balanceOf(owner).call()).to.be.equal(0); + expect(+await contract.methods.balanceOf(receiver).call()).to.be.equal(200); + + await contract.methods.repartition(150).send({from: receiver}); + await expect(contract.methods.transfer(owner, 160).send({from: receiver})).to.eventually.be.rejected; // Transaction is reverted + expect(+await contract.methods.balanceOf(receiver).call()).to.be.equal(150); + }); + + itEth('Can repartition with increased amount', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 100n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + const result = await contract.methods.repartition(200).send(); + + const event = result.events.Transfer; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.to).to.be.equal(owner); + expect(event.returnValues.value).to.be.equal('100'); + }); + + itEth('Can repartition with decreased amount', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 100n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + const result = await contract.methods.repartition(50).send(); + const event = result.events.Transfer; + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal(owner); + expect(event.returnValues.to).to.be.equal('0x0000000000000000000000000000000000000000'); + expect(event.returnValues.value).to.be.equal('50'); + }); + + itEth('Receiving Transfer event on burning into full ownership', async ({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const receiver = await helper.eth.createAccountWithBalance(donor); + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(caller, 'Devastation', '6', '6'); + const contract = helper.ethNativeContract.collection(collectionAddress, 'rft', caller); + + const result = await contract.methods.mint(caller).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + const tokenAddress = helper.ethAddress.fromTokenId(collectionId, tokenId); + const tokenContract = helper.ethNativeContract.rftToken(tokenAddress, caller); + + await tokenContract.methods.repartition(2).send(); + await tokenContract.methods.transfer(receiver, 1).send(); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + await tokenContract.methods.burnFrom(caller, 1).send(); + + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + expect(event.address).to.be.equal(collectionAddress); + expect(event.returnValues.from).to.be.equal('0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF'); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.tokenId).to.be.equal(tokenId); + }); +}); + +describe('Refungible: Fees', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([50n], donor); + }); + }); + + itEth('approve() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 100n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.approve(spender, 100).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + }); + + itEth('transferFrom() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const spender = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + await contract.methods.approve(spender, 100).send({from: owner}); + + const cost = await helper.eth.recordCallFee(spender, () => contract.methods.transferFrom(owner, spender, 100).send({from: spender})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + }); + + itEth('transfer() call fee is less than 0.2UNQ', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const {tokenId} = await collection.mintToken(alice, 200n, {Ethereum: owner}); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + const cost = await helper.eth.recordCallFee(owner, () => contract.methods.transfer(receiver, 100).send({from: owner})); + expect(cost < BigInt(0.2 * Number(helper.balance.getOneTokenNominal()))); + }); +}); + +describe('Refungible: Substrate calls', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([50n], donor); + }); + }); + + itEth('Events emitted for approve()', async ({helper}) => { + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 200n); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, token.tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + + expect(await token.approve(alice, {Ethereum: receiver}, 100n)).to.be.true; + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.event).to.be.equal('Approval'); + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.owner).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.spender).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('100'); + }); + + itEth('Events emitted for transferFrom()', async ({helper}) => { + const [bob] = await helper.arrange.createAccounts([10n], donor); + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 200n); + await token.approve(alice, {Substrate: bob.address}, 100n); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, token.tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + + expect(await token.transferFrom(bob, {Substrate: alice.address}, {Ethereum: receiver}, 51n)).to.be.true; + if (events.length == 0) await helper.wait.newBlocks(1); + + let event = events[0]; + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('51'); + + event = events[1]; + expect(event.event).to.be.equal('Approval'); + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.owner).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.spender).to.be.equal(helper.address.substrateToEth(bob.address)); + expect(event.returnValues.value).to.be.equal('49'); + }); + + itEth('Events emitted for transfer()', async ({helper}) => { + const receiver = helper.eth.createAccount(); + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 200n); + + const tokenAddress = helper.ethAddress.fromTokenId(collection.collectionId, token.tokenId); + const contract = helper.ethNativeContract.rftToken(tokenAddress); + + const events: any = []; + contract.events.allEvents((_: any, event: any) => { + events.push(event); + }); + + expect(await token.transfer(alice, {Ethereum: receiver}, 51n)).to.be.true; + if (events.length == 0) await helper.wait.newBlocks(1); + const event = events[0]; + + expect(event.event).to.be.equal('Transfer'); + expect(event.address).to.be.equal(tokenAddress); + expect(event.returnValues.from).to.be.equal(helper.address.substrateToEth(alice.address)); + expect(event.returnValues.to).to.be.equal(receiver); + expect(event.returnValues.value).to.be.equal('51'); + }); +}); + +describe('ERC 1633 implementation', () => { + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Default parent token address and id', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + + const {collectionId, collectionAddress} = await helper.eth.createRFTCollection(owner, 'Sands', '', 'GRAIN'); + const collectionContract = helper.ethNativeContract.collection(collectionAddress, 'rft', owner); + + const result = await collectionContract.methods.mint(owner).send(); + const tokenId = result.events.Transfer.returnValues.tokenId; + + const tokenAddress = helper.ethAddress.fromTokenId(collectionId, tokenId); + const tokenContract = helper.ethNativeContract.rftToken(tokenAddress, owner); + + expect(await tokenContract.methods.parentToken().call()).to.be.equal(collectionAddress); + expect(await tokenContract.methods.parentTokenId().call()).to.be.equal(tokenId); + }); +}); diff --git a/tests/src/eth/reFungibleTokenAbi.json b/tests/src/eth/reFungibleTokenAbi.json new file mode 100644 index 0000000000..608f4feafc --- /dev/null +++ b/tests/src/eth/reFungibleTokenAbi.json @@ -0,0 +1,172 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [ + { "internalType": "address", "name": "owner", "type": "address" }, + { "internalType": "address", "name": "spender", "type": "address" } + ], + "name": "allowance", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "spender", "type": "address" }, + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "approve", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "owner", "type": "address" } + ], + "name": "balanceOf", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "burnFrom", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [{ "internalType": "uint8", "name": "", "type": "uint8" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "parentToken", + "outputs": [{ "internalType": "address", "name": "", "type": "address" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "parentTokenId", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "repartition", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "bytes4", "name": "interfaceID", "type": "bytes4" } + ], + "name": "supportsInterface", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [{ "internalType": "string", "name": "", "type": "string" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "transfer", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { "internalType": "address", "name": "from", "type": "address" }, + { "internalType": "address", "name": "to", "type": "address" }, + { "internalType": "uint256", "name": "amount", "type": "uint256" } + ], + "name": "transferFrom", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "nonpayable", + "type": "function" + } +] diff --git a/tests/src/eth/sponsoring.test.ts b/tests/src/eth/sponsoring.test.ts index cca9293db9..52b748b69a 100644 --- a/tests/src/eth/sponsoring.test.ts +++ b/tests/src/eth/sponsoring.test.ts @@ -14,52 +14,65 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {expect} from 'chai'; -import {contractHelpers, createEthAccount, createEthAccountWithBalance, deployCollector, deployFlipper, itWeb3, SponsoringMode, transferBalanceToEth} from './util/helpers'; +import {IKeyringPair} from '@polkadot/types/types'; +import {itEth, expect, SponsoringMode} from './util'; +import {usingPlaygrounds} from '../util/index'; describe('EVM sponsoring', () => { - itWeb3('Fee is deducted from contract if sponsoring is enabled', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); + let donor: IKeyringPair; - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = createEthAccount(web3); - const originalCallerBalance = await web3.eth.getBalance(caller); - expect(originalCallerBalance).to.be.equal('0'); + before(async () => { + await usingPlaygrounds(async (_helper, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Fee is deducted from contract if sponsoring is enabled', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = helper.eth.createAccount(); + const originalCallerBalance = await helper.balance.getEthereum(caller); + + expect(originalCallerBalance).to.be.equal(0n); + + const flipper = await helper.eth.deployFlipper(owner); - const flipper = await deployFlipper(web3, owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); - const helpers = contractHelpers(web3, owner); await helpers.methods.toggleAllowlist(flipper.options.address, true).send({from: owner}); await helpers.methods.toggleAllowed(flipper.options.address, caller, true).send({from: owner}); + + await helpers.methods.setSponsor(flipper.options.address, sponsor).send({from: owner}); + await helpers.methods.confirmSponsorship(flipper.options.address).send({from: sponsor}); expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.false; await helpers.methods.setSponsoringMode(flipper.options.address, SponsoringMode.Allowlisted).send({from: owner}); await helpers.methods.setSponsoringRateLimit(flipper.options.address, 0).send({from: owner}); expect(await helpers.methods.sponsoringEnabled(flipper.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, flipper.options.address); - - const originalFlipperBalance = await web3.eth.getBalance(flipper.options.address); - expect(originalFlipperBalance).to.be.not.equal('0'); + const originalSponsorBalance = await helper.balance.getEthereum(sponsor); + expect(originalSponsorBalance).to.be.not.equal(0n); await flipper.methods.flip().send({from: caller}); expect(await flipper.methods.getValue().call()).to.be.true; // Balance should be taken from flipper instead of caller - expect(await web3.eth.getBalance(caller)).to.be.equals(originalCallerBalance); - expect(await web3.eth.getBalance(flipper.options.address)).to.be.not.equals(originalFlipperBalance); + expect(await helper.balance.getEthereum(caller)).to.be.equal(originalCallerBalance); + expect(await helper.balance.getEthereum(sponsor)).to.be.not.equal(originalSponsorBalance); }); - itWeb3('...but this doesn\'t applies to payable value', async ({api, web3, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const owner = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const originalCallerBalance = await web3.eth.getBalance(caller); - expect(originalCallerBalance).to.be.not.equal('0'); + itEth('...but this doesn\'t applies to payable value', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const sponsor = await helper.eth.createAccountWithBalance(donor); + const caller = await helper.eth.createAccountWithBalance(donor); + const originalCallerBalance = await helper.balance.getEthereum(caller); + + expect(originalCallerBalance).to.be.not.equal(0n); + + const collector = await helper.eth.deployCollectorContract(owner); - const collector = await deployCollector(web3, owner); + const helpers = helper.ethNativeContract.contractHelpers(owner); - const helpers = contractHelpers(web3, owner); await helpers.methods.toggleAllowlist(collector.options.address, true).send({from: owner}); await helpers.methods.toggleAllowed(collector.options.address, caller, true).send({from: owner}); @@ -68,16 +81,17 @@ describe('EVM sponsoring', () => { await helpers.methods.setSponsoringRateLimit(collector.options.address, 0).send({from: owner}); expect(await helpers.methods.sponsoringEnabled(collector.options.address).call()).to.be.true; - await transferBalanceToEth(api, alice, collector.options.address); + await helpers.methods.setSponsor(collector.options.address, sponsor).send({from: owner}); + await helpers.methods.confirmSponsorship(collector.options.address).send({from: sponsor}); - const originalCollectorBalance = await web3.eth.getBalance(collector.options.address); - expect(originalCollectorBalance).to.be.not.equal('0'); + const originalSponsorBalance = await helper.balance.getEthereum(sponsor); + expect(originalSponsorBalance).to.be.not.equal(0n); await collector.methods.giveMoney().send({from: caller, value: '10000'}); // Balance will be taken from both caller (value) and from collector (fee) - expect(await web3.eth.getBalance(caller)).to.be.equals((BigInt(originalCallerBalance) - 10000n).toString()); - expect(await web3.eth.getBalance(collector.options.address)).to.be.not.equals(originalCollectorBalance); + expect(await helper.balance.getEthereum(caller)).to.be.equals((originalCallerBalance - 10000n)); + expect(await helper.balance.getEthereum(sponsor)).to.be.not.equals(originalSponsorBalance); expect(await collector.methods.getCollected().call()).to.be.equal('10000'); }); }); diff --git a/tests/src/eth/tokenProperties.test.ts b/tests/src/eth/tokenProperties.test.ts index c04814f16f..57c69c53a8 100644 --- a/tests/src/eth/tokenProperties.test.ts +++ b/tests/src/eth/tokenProperties.test.ts @@ -1,94 +1,130 @@ -import {addCollectionAdminExpectSuccess, createCollectionExpectSuccess, createItemExpectSuccess} from '../util/helpers'; -import {cartesian, collectionIdToAddress, createEthAccount, createEthAccountWithBalance, GAS_ARGS, itWeb3} from './util/helpers'; -import nonFungibleAbi from './nonFungibleAbi.json'; -import {expect} from 'chai'; -import {executeTransaction} from '../substrate/substrate-api'; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {itEth, usingEthPlaygrounds, expect} from './util'; +import {IKeyringPair} from '@polkadot/types/types'; describe('EVM token properties', () => { - itWeb3('Can be reconfigured', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); + let donor: IKeyringPair; + let alice: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([100n], donor); + }); + }); + + itEth('Can be reconfigured', async({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); for(const [mutable,collectionAdmin, tokenOwner] of cartesian([], [false, true], [false, true], [false, true])) { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addCollectionAdminExpectSuccess(alice, collection, {Ethereum: caller}); + const collection = await helper.nft.mintCollection(alice); + await collection.addAdmin(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); await contract.methods.setTokenPropertyPermission('testKey', mutable, collectionAdmin, tokenOwner).send({from: caller}); - const state = (await api.query.common.collectionPropertyPermissions(collection)).toJSON(); - expect(state).to.be.deep.equal({ - [web3.utils.toHex('testKey')]: {mutable, collectionAdmin, tokenOwner}, - }); + expect(await collection.getPropertyPermissions()).to.be.deep.equal([{ + key: 'testKey', + permission: {mutable, collectionAdmin, tokenOwner}, + }]); } }); - itWeb3('Can be set', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - const token = await createItemExpectSuccess(alice, collection, 'NFT'); - await executeTransaction(api, alice, api.tx.unique.setTokenPropertyPermissions(collection, [{ - key: 'testKey', - permission: { - collectionAdmin: true, - }, - }])); + itEth('Can be set', async({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: [{ + key: 'testKey', + permission: { + collectionAdmin: true, + }, + }], + }); + const token = await collection.mintToken(alice); - await addCollectionAdminExpectSuccess(alice, collection, {Ethereum: caller}); + await collection.addAdmin(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); - await contract.methods.setProperty(token, 'testKey', Buffer.from('testValue')).send({from: caller}); + await contract.methods.setProperty(token.tokenId, 'testKey', Buffer.from('testValue')).send({from: caller}); - const [{value}] = (await api.rpc.unique.tokenProperties(collection, token, ['testKey'])).toHuman()! as any; + const [{value}] = await token.getProperties(['testKey']); expect(value).to.equal('testValue'); }); - itWeb3('Can be deleted', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = await createEthAccountWithBalance(api, web3, privateKeyWrapper); - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - const token = await createItemExpectSuccess(alice, collection, 'NFT'); - await executeTransaction(api, alice, api.tx.unique.setTokenPropertyPermissions(collection, [{ - key: 'testKey', - permission: { - mutable: true, - collectionAdmin: true, - }, - }])); - await executeTransaction(api, alice, api.tx.unique.setTokenProperties(collection, token, [{key: 'testKey', value: 'testValue'}])); + itEth('Can be deleted', async({helper}) => { + const caller = await helper.eth.createAccountWithBalance(donor); + const collection = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: [{ + key: 'testKey', + permission: { + mutable: true, + collectionAdmin: true, + }, + }], + }); + + const token = await collection.mintToken(alice); + await token.setProperties(alice, [{key: 'testKey', value: 'testValue'}]); - await addCollectionAdminExpectSuccess(alice, collection, {Ethereum: caller}); + await collection.addAdmin(alice, {Ethereum: caller}); - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); - await contract.methods.deleteProperty(token, 'testKey').send({from: caller}); + await contract.methods.deleteProperty(token.tokenId, 'testKey').send({from: caller}); - const result = (await api.rpc.unique.tokenProperties(collection, token, ['testKey'])).toJSON()! as any; + const result = await token.getProperties(['testKey']); expect(result.length).to.equal(0); }); - itWeb3('Can be read', async({web3, api, privateKeyWrapper}) => { - const alice = privateKeyWrapper('//Alice'); - const caller = createEthAccount(web3); - const collection = await createCollectionExpectSuccess({mode: {type:'NFT'}}); - const token = await createItemExpectSuccess(alice, collection, 'NFT'); - - await executeTransaction(api, alice, api.tx.unique.setTokenPropertyPermissions(collection, [{ - key: 'testKey', - permission: { - collectionAdmin: true, - }, - }])); - await executeTransaction(api, alice, api.tx.unique.setTokenProperties(collection, token, [{key: 'testKey', value: 'testValue'}])); - - const address = collectionIdToAddress(collection); - const contract = new web3.eth.Contract(nonFungibleAbi as any, address, {from: caller, ...GAS_ARGS}); - - const value = await contract.methods.property(token, 'testKey').call(); - expect(value).to.equal(web3.utils.toHex('testValue')); + + itEth('Can be read', async({helper}) => { + const caller = helper.eth.createAccount(); + const collection = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: [{ + key: 'testKey', + permission: { + collectionAdmin: true, + }, + }], + }); + + const token = await collection.mintToken(alice); + await token.setProperties(alice, [{key: 'testKey', value: 'testValue'}]); + + const address = helper.ethAddress.fromCollectionId(collection.collectionId); + const contract = helper.ethNativeContract.collection(address, 'nft', caller); + + const value = await contract.methods.property(token.tokenId, 'testKey').call(); + expect(value).to.equal(helper.getWeb3().utils.toHex('testValue')); }); }); + + +type ElementOf = A extends readonly (infer T)[] ? T : never; +function* cartesian>, R extends Array>(internalRest: [...R], ...args: [...T]): Generator<[...R, ...{[K in keyof T]: ElementOf}]> { + if(args.length === 0) { + yield internalRest as any; + return; + } + for(const value of args[0]) { + yield* cartesian([...internalRest, value], ...args.slice(1)) as any; + } +} diff --git a/tests/src/eth/util/contractHelpersAbi.json b/tests/src/eth/util/contractHelpersAbi.json index 158e35d96f..f10d1e5a83 100644 --- a/tests/src/eth/util/contractHelpersAbi.json +++ b/tests/src/eth/util/contractHelpersAbi.json @@ -1,4 +1,55 @@ [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "ContractSponsorRemoved", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "contractAddress", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "sponsor", + "type": "address" + } + ], + "name": "ContractSponsorSet", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "contractAddress", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "sponsor", + "type": "address" + } + ], + "name": "ContractSponsorshipConfirmed", + "type": "event" + }, { "inputs": [ { @@ -26,6 +77,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "confirmSponsorship", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -47,11 +111,78 @@ "type": "address" } ], - "name": "getSponsoringRateLimit", - "outputs": [{ "internalType": "uint32", "name": "", "type": "uint32" }], + "name": "hasPendingSponsor", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "hasSponsor", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "removeSponsor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "selfSponsoredEnable", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + }, + { "internalType": "address", "name": "sponsor", "type": "address" } + ], + "name": "setSponsor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + }, + { "internalType": "uint256", "name": "feeLimit", "type": "uint256" } + ], + "name": "setSponsoringFeeLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -88,8 +219,18 @@ "type": "address" } ], - "name": "sponsoringEnabled", - "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "name": "sponsor", + "outputs": [ + { + "components": [ + { "internalType": "address", "name": "field_0", "type": "address" }, + { "internalType": "uint256", "name": "field_1", "type": "uint256" } + ], + "internalType": "struct Tuple0", + "name": "", + "type": "tuple" + } + ], "stateMutability": "view", "type": "function" }, @@ -101,17 +242,21 @@ "type": "address" } ], - "name": "sponsoringMode", - "outputs": [{ "internalType": "uint8", "name": "", "type": "uint8" }], + "name": "sponsoringEnabled", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], "stateMutability": "view", "type": "function" }, { "inputs": [ - { "internalType": "bytes4", "name": "interfaceID", "type": "bytes4" } + { + "internalType": "address", + "name": "contractAddress", + "type": "address" + } ], - "name": "supportsInterface", - "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "name": "sponsoringFeeLimit", + "outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }], "stateMutability": "view", "type": "function" }, @@ -121,13 +266,20 @@ "internalType": "address", "name": "contractAddress", "type": "address" - }, - { "internalType": "address", "name": "user", "type": "address" }, - { "internalType": "bool", "name": "allowed", "type": "bool" } + } ], - "name": "toggleAllowed", - "outputs": [], - "stateMutability": "nonpayable", + "name": "sponsoringRateLimit", + "outputs": [{ "internalType": "uint32", "name": "", "type": "uint32" }], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { "internalType": "bytes4", "name": "interfaceID", "type": "bytes4" } + ], + "name": "supportsInterface", + "outputs": [{ "internalType": "bool", "name": "", "type": "bool" }], + "stateMutability": "view", "type": "function" }, { @@ -137,9 +289,10 @@ "name": "contractAddress", "type": "address" }, - { "internalType": "bool", "name": "enabled", "type": "bool" } + { "internalType": "address", "name": "user", "type": "address" }, + { "internalType": "bool", "name": "isAllowed", "type": "bool" } ], - "name": "toggleAllowlist", + "name": "toggleAllowed", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -153,7 +306,7 @@ }, { "internalType": "bool", "name": "enabled", "type": "bool" } ], - "name": "toggleSponsoring", + "name": "toggleAllowlist", "outputs": [], "stateMutability": "nonpayable", "type": "function" diff --git a/tests/src/eth/util/helpers.ts b/tests/src/eth/util/helpers.ts deleted file mode 100644 index ec2c22f529..0000000000 --- a/tests/src/eth/util/helpers.ts +++ /dev/null @@ -1,376 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -// eslint-disable-next-line @typescript-eslint/triple-slash-reference -/// - -import {ApiPromise} from '@polkadot/api'; -import {IKeyringPair} from '@polkadot/types/types'; -import {addressToEvm, evmToAddress} from '@polkadot/util-crypto'; -import {expect} from 'chai'; -import * as solc from 'solc'; -import Web3 from 'web3'; -import config from '../../config'; -import getBalance from '../../substrate/get-balance'; -import privateKey from '../../substrate/privateKey'; -import usingApi, {submitTransactionAsync} from '../../substrate/substrate-api'; -import waitNewBlocks from '../../substrate/wait-new-blocks'; -import {CrossAccountId, getDetailedCollectionInfo, getGenericResult, UNIQUE} from '../../util/helpers'; -import collectionHelpersAbi from '../collectionHelpersAbi.json'; -import nonFungibleAbi from '../nonFungibleAbi.json'; -import contractHelpersAbi from './contractHelpersAbi.json'; - -export const GAS_ARGS = {gas: 2500000}; - -export enum SponsoringMode { - Disabled = 0, - Allowlisted = 1, - Generous = 2, -} - -let web3Connected = false; -export async function usingWeb3(cb: (web3: Web3) => Promise | T): Promise { - if (web3Connected) throw new Error('do not nest usingWeb3 calls'); - web3Connected = true; - - const provider = new Web3.providers.WebsocketProvider(config.substrateUrl); - const web3 = new Web3(provider); - - try { - return await cb(web3); - } finally { - // provider.disconnect(3000, 'normal disconnect'); - provider.connection.close(); - web3Connected = false; - } -} - -function encodeIntBE(v: number): number[] { - if (v >= 0xffffffff || v < 0) throw new Error('id overflow'); - return [ - v >> 24, - (v >> 16) & 0xff, - (v >> 8) & 0xff, - v & 0xff, - ]; -} - -export async function getCollectionAddressFromResult(api: ApiPromise, result: any) { - const collectionIdAddress = normalizeAddress(result.events.CollectionCreated.returnValues.collectionId); - const collectionId = collectionIdFromAddress(collectionIdAddress); - const collection = (await getDetailedCollectionInfo(api, collectionId))!; - return {collectionIdAddress, collectionId, collection}; -} - -export function collectionIdToAddress(collection: number): string { - const buf = Buffer.from([0x17, 0xc4, 0xe6, 0x45, 0x3c, 0xc4, 0x9a, 0xaa, 0xae, 0xac, 0xa8, 0x94, 0xe6, 0xd9, 0x68, 0x3e, - ...encodeIntBE(collection), - ]); - return Web3.utils.toChecksumAddress('0x' + buf.toString('hex')); -} -export function collectionIdFromAddress(address: string): number { - if (!address.startsWith('0x')) - throw 'address not starts with "0x"'; - if (address.length > 42) - throw 'address length is more than 20 bytes'; - return Number('0x' + address.substring(address.length - 8)); -} - -export function normalizeAddress(address: string): string { - return '0x' + address.substring(address.length - 40); -} - -export function tokenIdToAddress(collection: number, token: number): string { - const buf = Buffer.from([0xf8, 0x23, 0x8c, 0xcf, 0xff, 0x8e, 0xd8, 0x87, 0x46, 0x3f, 0xd5, 0xe0, - ...encodeIntBE(collection), - ...encodeIntBE(token), - ]); - return Web3.utils.toChecksumAddress('0x' + buf.toString('hex')); -} -export function tokenIdToCross(collection: number, token: number): CrossAccountId { - return { - Ethereum: tokenIdToAddress(collection, token), - }; -} - -export function createEthAccount(web3: Web3) { - const account = web3.eth.accounts.create(); - web3.eth.accounts.wallet.add(account.privateKey); - return account.address; -} - -export async function createEthAccountWithBalance(api: ApiPromise, web3: Web3, privateKeyWrapper: (account: string) => IKeyringPair) { - const alice = privateKeyWrapper('//Alice'); - const account = createEthAccount(web3); - await transferBalanceToEth(api, alice, account); - - return account; -} - -export async function transferBalanceToEth(api: ApiPromise, source: IKeyringPair, target: string, amount = 1000n * UNIQUE) { - const tx = api.tx.balances.transfer(evmToAddress(target), amount); - const events = await submitTransactionAsync(source, tx); - const result = getGenericResult(events); - expect(result.success).to.be.true; -} - -export async function itWeb3(name: string, cb: (apis: { web3: Web3, api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair }) => any, opts: { only?: boolean, skip?: boolean } = {}) { - let i: any = it; - if (opts.only) i = i.only; - else if (opts.skip) i = i.skip; - i(name, async () => { - await usingApi(async (api, privateKeyWrapper) => { - await usingWeb3(async web3 => { - await cb({api, web3, privateKeyWrapper}); - }); - }); - }); -} -itWeb3.only = (name: string, cb: (apis: { web3: Web3, api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair }) => any) => itWeb3(name, cb, {only: true}); -itWeb3.skip = (name: string, cb: (apis: { web3: Web3, api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair }) => any) => itWeb3(name, cb, {skip: true}); - -export async function generateSubstrateEthPair(web3: Web3) { - const account = web3.eth.accounts.create(); - evmToAddress(account.address); -} - -type NormalizedEvent = { - address: string, - event: string, - args: { [key: string]: string } -}; - -export function normalizeEvents(events: any): NormalizedEvent[] { - const output = []; - for (const key of Object.keys(events)) { - if (key.match(/^[0-9]+$/)) { - output.push(events[key]); - } else if (Array.isArray(events[key])) { - output.push(...events[key]); - } else { - output.push(events[key]); - } - } - output.sort((a, b) => a.logIndex - b.logIndex); - return output.map(({address, event, returnValues}) => { - const args: { [key: string]: string } = {}; - for (const key of Object.keys(returnValues)) { - if (!key.match(/^[0-9]+$/)) { - args[key] = returnValues[key]; - } - } - return { - address, - event, - args, - }; - }); -} - -export async function recordEvents(contract: any, action: () => Promise): Promise { - const out: any = []; - contract.events.allEvents((_: any, event: any) => { - out.push(event); - }); - await action(); - return normalizeEvents(out); -} - -export function subToEthLowercase(eth: string): string { - const bytes = addressToEvm(eth); - return '0x' + Buffer.from(bytes).toString('hex'); -} - -export function subToEth(eth: string): string { - return Web3.utils.toChecksumAddress(subToEthLowercase(eth)); -} - -export function compileContract(name: string, src: string) { - const out = JSON.parse(solc.compile(JSON.stringify({ - language: 'Solidity', - sources: { - [`${name}.sol`]: { - content: ` - // SPDX-License-Identifier: UNLICENSED - pragma solidity ^0.8.6; - - ${src} - `, - }, - }, - settings: { - outputSelection: { - '*': { - '*': ['*'], - }, - }, - }, - }))).contracts[`${name}.sol`][name]; - - return { - abi: out.abi, - object: '0x' + out.evm.bytecode.object, - }; -} - -export async function deployFlipper(web3: Web3, deployer: string) { - const compiled = compileContract('Flipper', ` - contract Flipper { - bool value = false; - function flip() public { - value = !value; - } - function getValue() public view returns (bool) { - return value; - } - } - `); - const flipperContract = new web3.eth.Contract(compiled.abi, undefined, { - data: compiled.object, - from: deployer, - ...GAS_ARGS, - }); - const flipper = await flipperContract.deploy({data: compiled.object}).send({from: deployer}); - - return flipper; -} - -export async function deployCollector(web3: Web3, deployer: string) { - const compiled = compileContract('Collector', ` - contract Collector { - uint256 collected; - fallback() external payable { - giveMoney(); - } - function giveMoney() public payable { - collected += msg.value; - } - function getCollected() public view returns (uint256) { - return collected; - } - function getUnaccounted() public view returns (uint256) { - return address(this).balance - collected; - } - - function withdraw(address payable target) public { - target.transfer(collected); - collected = 0; - } - } - `); - const collectorContract = new web3.eth.Contract(compiled.abi, undefined, { - data: compiled.object, - from: deployer, - ...GAS_ARGS, - }); - const collector = await collectorContract.deploy({data: compiled.object}).send({from: deployer}); - - return collector; -} - -/** - * pallet evm_contract_helpers - * @param web3 - * @param caller - eth address - * @returns - */ -export function contractHelpers(web3: Web3, caller: string) { - return new web3.eth.Contract(contractHelpersAbi as any, '0x842899ECF380553E8a4de75bF534cdf6fBF64049', {from: caller, ...GAS_ARGS}); -} - -/** - * evm collection helper - * @param web3 - * @param caller - eth address - * @returns - */ -export function evmCollectionHelpers(web3: Web3, caller: string) { - return new web3.eth.Contract(collectionHelpersAbi as any, '0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f', {from: caller, ...GAS_ARGS}); -} - -/** - * evm collection - * @param web3 - * @param caller - eth address - * @returns - */ -export function evmCollection(web3: Web3, caller: string, collection: string) { - return new web3.eth.Contract(nonFungibleAbi as any, collection, {from: caller, ...GAS_ARGS}); -} - -/** - * Execute ethereum method call using substrate account - * @param to target contract - * @param mkTx - closure, receiving `contract.methods`, and returning method call, - * to be used as following (assuming `to` = erc20 contract): - * `m => m.transfer(to, amount)` - * - * # Example - * ```ts - * executeEthTxOnSub(api, alice, erc20Contract, m => m.transfer(target, amount)); - * ``` - */ -export async function executeEthTxOnSub(web3: Web3, api: ApiPromise, from: IKeyringPair, to: any, mkTx: (methods: any) => any, {value = 0}: {value?: bigint | number} = { }) { - const tx = api.tx.evm.call( - subToEth(from.address), - to.options.address, - mkTx(to.methods).encodeABI(), - value, - GAS_ARGS.gas, - await web3.eth.getGasPrice(), - null, - null, - [], - ); - const events = await submitTransactionAsync(from, tx); - expect(events.some(({event: {section, method}}) => section == 'evm' && method == 'Executed')).to.be.true; -} - -export async function ethBalanceViaSub(api: ApiPromise, address: string): Promise { - return (await getBalance(api, [evmToAddress(address)]))[0]; -} - -/** - * Measure how much gas given closure consumes - * - * @param user which user balance will be checked - */ -export async function recordEthFee(api: ApiPromise, user: string, call: () => Promise): Promise { - const before = await ethBalanceViaSub(api, user); - - await call(); - - // In dev mode, the transaction might not finish processing in time - await waitNewBlocks(api, 1); - const after = await ethBalanceViaSub(api, user); - - // Can't use .to.be.less, because chai doesn't supports bigint - expect(after < before).to.be.true; - - return before - after; -} - -type ElementOf = A extends readonly (infer T)[] ? T : never; -// I want a fancier api, not a memory efficiency -export function* cartesian>, R extends Array>(internalRest: [...R], ...args: [...T]): Generator<[...R, ...{[K in keyof T]: ElementOf}]> { - if(args.length === 0) { - yield internalRest as any; - return; - } - for(const value of args[0]) { - yield* cartesian([...internalRest, value], ...args.slice(1)) as any; - } -} \ No newline at end of file diff --git a/tests/src/eth/util/index.ts b/tests/src/eth/util/index.ts new file mode 100644 index 0000000000..67e9b6c881 --- /dev/null +++ b/tests/src/eth/util/index.ts @@ -0,0 +1,83 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +import * as path from 'path'; +import {IKeyringPair} from '@polkadot/types/types'; + +import config from '../../config'; + +import {EthUniqueHelper} from './playgrounds/unique.dev'; +import {SilentLogger, SilentConsole} from '../../util/playgrounds/unique.dev'; + +export {EthUniqueHelper} from './playgrounds/unique.dev'; + +import chai from 'chai'; +import chaiAsPromised from 'chai-as-promised'; +import chaiLike from 'chai-like'; +import {getTestSeed, MINIMUM_DONOR_FUND, requirePalletsOrSkip} from '../../util'; + +chai.use(chaiAsPromised); +chai.use(chaiLike); +export const expect = chai.expect; + +export enum SponsoringMode { + Disabled = 0, + Allowlisted = 1, + Generous = 2, +} + +export const usingEthPlaygrounds = async (code: (helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise) => Promise) => { + const silentConsole = new SilentConsole(); + silentConsole.enable(); + + const helper = new EthUniqueHelper(new SilentLogger()); + + try { + await helper.connect(config.substrateUrl); + await helper.connectWeb3(config.substrateUrl); + const ss58Format = helper.chain.getChainProperties().ss58Format; + const privateKey = async (seed: string | {filename: string}) => { + if (typeof seed === 'string') { + return helper.util.fromSeed(seed, ss58Format); + } + else { + const actualSeed = getTestSeed(seed.filename); + let account = helper.util.fromSeed(actualSeed, ss58Format); + if (await helper.balance.getSubstrate(account.address) < MINIMUM_DONOR_FUND) { + console.warn(`${path.basename(seed.filename)}: Not enough funds present on the filename account. Using the default one as the donor instead.`); + account = helper.util.fromSeed('//Alice', ss58Format); + } + return account; + } + }; + await code(helper, privateKey); + } + finally { + await helper.disconnect(); + silentConsole.disable(); + } +}; + +export async function itEth(name: string, cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any, opts: { only?: boolean, skip?: boolean, requiredPallets?: string[] } = {}) { + (opts.only ? it.only : + opts.skip ? it.skip : it)(name, async function() { + await usingEthPlaygrounds(async (helper, privateKey) => { + if (opts.requiredPallets) { + requirePalletsOrSkip(this, helper, opts.requiredPallets); + } + + await cb({helper, privateKey}); + }); + }); +} + +export async function itEthIfWithPallet(name: string, required: string[], cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any, opts: { only?: boolean, skip?: boolean, requiredPallets?: string[] } = {}) { + return itEth(name, cb, {requiredPallets: required, ...opts}); +} + +itEth.only = (name: string, cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any) => itEth(name, cb, {only: true}); +itEth.skip = (name: string, cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any) => itEth(name, cb, {skip: true}); + +itEthIfWithPallet.only = (name: string, required: string[], cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any) => itEthIfWithPallet(name, required, cb, {only: true}); +itEthIfWithPallet.skip = (name: string, required: string[], cb: (apis: { helper: EthUniqueHelper, privateKey: (seed: string | {filename: string}) => Promise }) => any) => itEthIfWithPallet(name, required, cb, {skip: true}); +itEth.ifWithPallets = itEthIfWithPallet; diff --git a/tests/src/eth/util/playgrounds/types.ts b/tests/src/eth/util/playgrounds/types.ts new file mode 100644 index 0000000000..eda8a191ca --- /dev/null +++ b/tests/src/eth/util/playgrounds/types.ts @@ -0,0 +1,15 @@ +export interface ContractImports { + solPath: string; + fsPath: string; +} + +export interface CompiledContract { + abi: any; + object: string; +} + +export type NormalizedEvent = { + address: string, + event: string, + args: { [key: string]: string } +}; diff --git a/tests/src/eth/util/helpers.d.ts b/tests/src/eth/util/playgrounds/unique.dev.d.ts similarity index 100% rename from tests/src/eth/util/helpers.d.ts rename to tests/src/eth/util/playgrounds/unique.dev.d.ts diff --git a/tests/src/eth/util/playgrounds/unique.dev.ts b/tests/src/eth/util/playgrounds/unique.dev.ts new file mode 100644 index 0000000000..f3c070336f --- /dev/null +++ b/tests/src/eth/util/playgrounds/unique.dev.ts @@ -0,0 +1,395 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable function-call-argument-newline */ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// + +import {readFile} from 'fs/promises'; + +import Web3 from 'web3'; +import {WebsocketProvider} from 'web3-core'; +import {Contract} from 'web3-eth-contract'; + +import * as solc from 'solc'; + +import {evmToAddress} from '@polkadot/util-crypto'; +import {IKeyringPair} from '@polkadot/types/types'; + +import {DevUniqueHelper} from '../../../util/playgrounds/unique.dev'; + +import {ContractImports, CompiledContract, NormalizedEvent} from './types'; + +// Native contracts ABI +import collectionHelpersAbi from '../../collectionHelpersAbi.json'; +import fungibleAbi from '../../fungibleAbi.json'; +import nonFungibleAbi from '../../nonFungibleAbi.json'; +import refungibleAbi from '../../reFungibleAbi.json'; +import refungibleTokenAbi from '../../reFungibleTokenAbi.json'; +import contractHelpersAbi from './../contractHelpersAbi.json'; +import {ICrossAccountId, TEthereumAccount} from '../../../util/playgrounds/types'; + +class EthGroupBase { + helper: EthUniqueHelper; + + constructor(helper: EthUniqueHelper) { + this.helper = helper; + } +} + + +class ContractGroup extends EthGroupBase { + async findImports(imports?: ContractImports[]){ + if(!imports) return function(path: string) { + return {error: `File not found: ${path}`}; + }; + + const knownImports = {} as {[key: string]: string}; + for(const imp of imports) { + knownImports[imp.solPath] = (await readFile(imp.fsPath)).toString(); + } + + return function(path: string) { + if(path in knownImports) return {contents: knownImports[path]}; + return {error: `File not found: ${path}`}; + }; + } + + async compile(name: string, src: string, imports?: ContractImports[]): Promise { + const out = JSON.parse(solc.compile(JSON.stringify({ + language: 'Solidity', + sources: { + [`${name}.sol`]: { + content: src, + }, + }, + settings: { + outputSelection: { + '*': { + '*': ['*'], + }, + }, + }, + }), {import: await this.findImports(imports)})).contracts[`${name}.sol`][name]; + + return { + abi: out.abi, + object: '0x' + out.evm.bytecode.object, + }; + } + + async deployByCode(signer: string, name: string, src: string, imports?: ContractImports[]): Promise { + const compiledContract = await this.compile(name, src, imports); + return this.deployByAbi(signer, compiledContract.abi, compiledContract.object); + } + + async deployByAbi(signer: string, abi: any, object: string): Promise { + const web3 = this.helper.getWeb3(); + const contract = new web3.eth.Contract(abi, undefined, { + data: object, + from: signer, + gas: this.helper.eth.DEFAULT_GAS, + }); + return await contract.deploy({data: object}).send({from: signer}); + } + +} + +class NativeContractGroup extends EthGroupBase { + + contractHelpers(caller: string): Contract { + const web3 = this.helper.getWeb3(); + return new web3.eth.Contract(contractHelpersAbi as any, '0x842899ECF380553E8a4de75bF534cdf6fBF64049', {from: caller, gas: this.helper.eth.DEFAULT_GAS}); + } + + collectionHelpers(caller: string) { + const web3 = this.helper.getWeb3(); + return new web3.eth.Contract(collectionHelpersAbi as any, '0x6c4e9fe1ae37a41e93cee429e8e1881abdcbb54f', {from: caller, gas: this.helper.eth.DEFAULT_GAS}); + } + + collection(address: string, mode: 'nft' | 'rft' | 'ft', caller?: string): Contract { + const abi = { + 'nft': nonFungibleAbi, + 'rft': refungibleAbi, + 'ft': fungibleAbi, + }[mode]; + const web3 = this.helper.getWeb3(); + return new web3.eth.Contract(abi as any, address, {gas: this.helper.eth.DEFAULT_GAS, ...(caller ? {from: caller} : {})}); + } + + collectionById(collectionId: number, mode: 'nft' | 'rft' | 'ft', caller?: string): Contract { + return this.collection(this.helper.ethAddress.fromCollectionId(collectionId), mode, caller); + } + + rftToken(address: string, caller?: string): Contract { + const web3 = this.helper.getWeb3(); + return new web3.eth.Contract(refungibleTokenAbi as any, address, {gas: this.helper.eth.DEFAULT_GAS, ...(caller ? {from: caller} : {})}); + } + + rftTokenById(collectionId: number, tokenId: number, caller?: string): Contract { + return this.rftToken(this.helper.ethAddress.fromTokenId(collectionId, tokenId), caller); + } +} + + +class EthGroup extends EthGroupBase { + DEFAULT_GAS = 2_500_000; + + createAccount() { + const web3 = this.helper.getWeb3(); + const account = web3.eth.accounts.create(); + web3.eth.accounts.wallet.add(account.privateKey); + return account.address; + } + + async createAccountWithBalance(donor: IKeyringPair, amount=100n) { + const account = this.createAccount(); + await this.transferBalanceFromSubstrate(donor, account, amount); + + return account; + } + + async transferBalanceFromSubstrate(donor: IKeyringPair, recepient: string, amount=100n, inTokens=true) { + return await this.helper.balance.transferToSubstrate(donor, evmToAddress(recepient), amount * (inTokens ? this.helper.balance.getOneTokenNominal() : 1n)); + } + + async getCollectionCreationFee(signer: string) { + const collectionHelper = this.helper.ethNativeContract.collectionHelpers(signer); + return await collectionHelper.methods.collectionCreationFee().call(); + } + + async sendEVM(signer: IKeyringPair, contractAddress: string, abi: string, value: string, gasLimit?: number) { + if(!gasLimit) gasLimit = this.DEFAULT_GAS; + const web3 = this.helper.getWeb3(); + const gasPrice = await web3.eth.getGasPrice(); + // TODO: check execution status + await this.helper.executeExtrinsic( + signer, + 'api.tx.evm.call', [this.helper.address.substrateToEth(signer.address), contractAddress, abi, value, gasLimit, gasPrice, null, null, []], + true, + ); + } + + async callEVM(signer: TEthereumAccount, contractAddress: string, abi: string) { + return await this.helper.callRpc('api.rpc.eth.call', [{from: signer, to: contractAddress, data: abi}]); + } + + async createNFTCollection(signer: string, name: string, description: string, tokenPrefix: string): Promise<{collectionId: number, collectionAddress: string}> { + const collectionCreationPrice = this.helper.balance.getCollectionCreationPrice(); + const collectionHelper = this.helper.ethNativeContract.collectionHelpers(signer); + + const result = await collectionHelper.methods.createNFTCollection(name, description, tokenPrefix).send({value: Number(collectionCreationPrice)}); + + const collectionAddress = this.helper.ethAddress.normalizeAddress(result.events.CollectionCreated.returnValues.collectionId); + const collectionId = this.helper.ethAddress.extractCollectionId(collectionAddress); + + return {collectionId, collectionAddress}; + } + + async createERC721MetadataCompatibleNFTCollection(signer: string, name: string, description: string, tokenPrefix: string, baseUri: string): Promise<{collectionId: number, collectionAddress: string}> { + const collectionHelper = this.helper.ethNativeContract.collectionHelpers(signer); + + const {collectionId, collectionAddress} = await this.createNFTCollection(signer, name, description, tokenPrefix); + + await collectionHelper.methods.makeCollectionERC721MetadataCompatible(collectionAddress, baseUri).send(); + + return {collectionId, collectionAddress}; + } + + async createRFTCollection(signer: string, name: string, description: string, tokenPrefix: string): Promise<{collectionId: number, collectionAddress: string}> { + const collectionCreationPrice = this.helper.balance.getCollectionCreationPrice(); + const collectionHelper = this.helper.ethNativeContract.collectionHelpers(signer); + + const result = await collectionHelper.methods.createRFTCollection(name, description, tokenPrefix).send({value: Number(collectionCreationPrice)}); + + const collectionAddress = this.helper.ethAddress.normalizeAddress(result.events.CollectionCreated.returnValues.collectionId); + const collectionId = this.helper.ethAddress.extractCollectionId(collectionAddress); + + return {collectionId, collectionAddress}; + } + + async createERC721MetadataCompatibleRFTCollection(signer: string, name: string, description: string, tokenPrefix: string, baseUri: string): Promise<{collectionId: number, collectionAddress: string}> { + const collectionHelper = this.helper.ethNativeContract.collectionHelpers(signer); + + const {collectionId, collectionAddress} = await this.createRFTCollection(signer, name, description, tokenPrefix); + + await collectionHelper.methods.makeCollectionERC721MetadataCompatible(collectionAddress, baseUri).send(); + + return {collectionId, collectionAddress}; + } + + async deployCollectorContract(signer: string): Promise { + return await this.helper.ethContract.deployByCode(signer, 'Collector', ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + contract Collector { + uint256 collected; + fallback() external payable { + giveMoney(); + } + function giveMoney() public payable { + collected += msg.value; + } + function getCollected() public view returns (uint256) { + return collected; + } + function getUnaccounted() public view returns (uint256) { + return address(this).balance - collected; + } + + function withdraw(address payable target) public { + target.transfer(collected); + collected = 0; + } + } + `); + } + + async deployFlipper(signer: string): Promise { + return await this.helper.ethContract.deployByCode(signer, 'Flipper', ` + // SPDX-License-Identifier: UNLICENSED + pragma solidity ^0.8.6; + + contract Flipper { + bool value = false; + function flip() public { + value = !value; + } + function getValue() public view returns (bool) { + return value; + } + } + `); + } + + async recordCallFee(user: string, call: () => Promise): Promise { + const before = await this.helper.balance.getEthereum(user); + await call(); + // In dev mode, the transaction might not finish processing in time + await this.helper.wait.newBlocks(1); + const after = await this.helper.balance.getEthereum(user); + + return before - after; + } + + normalizeEvents(events: any): NormalizedEvent[] { + const output = []; + for (const key of Object.keys(events)) { + if (key.match(/^[0-9]+$/)) { + output.push(events[key]); + } else if (Array.isArray(events[key])) { + output.push(...events[key]); + } else { + output.push(events[key]); + } + } + output.sort((a, b) => a.logIndex - b.logIndex); + return output.map(({address, event, returnValues}) => { + const args: { [key: string]: string } = {}; + for (const key of Object.keys(returnValues)) { + if (!key.match(/^[0-9]+$/)) { + args[key] = returnValues[key]; + } + } + return { + address, + event, + args, + }; + }); + } + + async calculateFee(address: ICrossAccountId, code: () => Promise): Promise { + const wrappedCode = async () => { + await code(); + // In dev mode, the transaction might not finish processing in time + await this.helper.wait.newBlocks(1); + }; + return await this.helper.arrange.calculcateFee(address, wrappedCode); + } +} + +class EthAddressGroup extends EthGroupBase { + extractCollectionId(address: string): number { + if (!(address.length === 42 || address.length === 40)) throw new Error('address wrong format'); + return parseInt(address.substr(address.length - 8), 16); + } + + fromCollectionId(collectionId: number): string { + if (collectionId >= 0xffffffff || collectionId < 0) throw new Error('collectionId overflow'); + return Web3.utils.toChecksumAddress(`0x17c4e6453cc49aaaaeaca894e6d9683e${collectionId.toString(16).padStart(8, '0')}`); + } + + extractTokenId(address: string): {collectionId: number, tokenId: number} { + if (!address.startsWith('0x')) + throw 'address not starts with "0x"'; + if (address.length > 42) + throw 'address length is more than 20 bytes'; + return { + collectionId: Number('0x' + address.substring(address.length - 16, address.length - 8)), + tokenId: Number('0x' + address.substring(address.length - 8)), + }; + } + + fromTokenId(collectionId: number, tokenId: number): string { + return this.helper.util.getTokenAddress({collectionId, tokenId}); + } + + normalizeAddress(address: string): string { + return '0x' + address.substring(address.length - 40); + } +} + +export type EthUniqueHelperConstructor = new (...args: any[]) => EthUniqueHelper; + +export class EthUniqueHelper extends DevUniqueHelper { + web3: Web3 | null = null; + web3Provider: WebsocketProvider | null = null; + + eth: EthGroup; + ethAddress: EthAddressGroup; + ethNativeContract: NativeContractGroup; + ethContract: ContractGroup; + + constructor(logger: { log: (msg: any, level: any) => void, level: any }, options: {[key: string]: any} = {}) { + options.helperBase = options.helperBase ?? EthUniqueHelper; + + super(logger, options); + this.eth = new EthGroup(this); + this.ethAddress = new EthAddressGroup(this); + this.ethNativeContract = new NativeContractGroup(this); + this.ethContract = new ContractGroup(this); + } + + getWeb3(): Web3 { + if(this.web3 === null) throw Error('Web3 not connected'); + return this.web3; + } + + async connectWeb3(wsEndpoint: string) { + if(this.web3 !== null) return; + this.web3Provider = new Web3.providers.WebsocketProvider(wsEndpoint); + this.web3 = new Web3(this.web3Provider); + } + + async disconnect() { + if(this.web3 === null) return; + this.web3Provider?.connection.close(); + + await super.disconnect(); + } + + clearApi() { + super.clearApi(); + this.web3 = null; + } + + clone(helperCls: EthUniqueHelperConstructor, options?: { [key: string]: any; }): EthUniqueHelper { + const newHelper = super.clone(helperCls, options) as EthUniqueHelper; + newHelper.web3 = this.web3; + newHelper.web3Provider = this.web3Provider; + + return newHelper; + } +} diff --git a/tests/src/fungible.test.ts b/tests/src/fungible.test.ts new file mode 100644 index 0000000000..e461dab541 --- /dev/null +++ b/tests/src/fungible.test.ts @@ -0,0 +1,147 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; + +const U128_MAX = (1n << 128n) - 1n; + +describe('integration test: Fungible functionality:', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 10n], donor); + }); + }); + + itSub('Create fungible collection and token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'trest'}); + const defaultTokenId = await collection.getLastTokenId(); + expect(defaultTokenId).to.be.equal(0); + + await collection.mint(alice, U128_MAX); + const aliceBalance = await collection.getBalance({Substrate: alice.address}); + const itemCountAfter = await collection.getLastTokenId(); + + expect(itemCountAfter).to.be.equal(defaultTokenId); + expect(aliceBalance).to.be.equal(U128_MAX); + }); + + itSub('RPC method tokenOnewrs for fungible collection and token', async ({helper}) => { + const ethAcc = {Ethereum: '0x67fb3503a61b284dc83fa96dceec4192db47dc7c'}; + const facelessCrowd = (await helper.arrange.createAccounts(Array(7).fill(0n), donor)).map(keyring => {return {Substrate: keyring.address};}); + + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + await collection.mint(alice, U128_MAX); + + await collection.transfer(alice, {Substrate: bob.address}, 1000n); + await collection.transfer(alice, ethAcc, 900n); + + for (let i = 0; i < 7; i++) { + await collection.transfer(alice, facelessCrowd[i], 1n); + } + + const owners = await collection.getTop10Owners(); + + // What to expect + expect(owners).to.deep.include.members([{Substrate: alice.address}, ethAcc, {Substrate: bob.address}, ...facelessCrowd]); + expect(owners.length).to.be.equal(10); + + const [eleven] = await helper.arrange.createAccounts([0n], donor); + expect(await collection.transfer(alice, {Substrate: eleven.address}, 10n)).to.be.true; + expect((await collection.getTop10Owners()).length).to.be.equal(10); + }); + + itSub('Transfer token', async ({helper}) => { + const ethAcc = {Ethereum: '0x67fb3503a61b284dc83fa96dceec4192db47dc7c'}; + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + await collection.mint(alice, 500n); + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(500n); + expect(await collection.transfer(alice, {Substrate: bob.address}, 60n)).to.be.true; + expect(await collection.transfer(alice, ethAcc, 140n)).to.be.true; + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(300n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(60n); + expect(await collection.getBalance(ethAcc)).to.be.equal(140n); + + await expect(collection.transfer(alice, {Substrate: bob.address}, 350n)).to.eventually.be.rejectedWith(/common\.TokenValueTooLow/); + }); + + itSub('Tokens multiple creation', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + await collection.mintWithOneOwner(alice, [ + {value: 500n}, + {value: 400n}, + {value: 300n}, + ]); + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(1200n); + }); + + itSub('Burn some tokens ', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + await collection.mint(alice, 500n); + + expect(await collection.doesTokenExist(0)).to.be.true; + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(500n); + expect(await collection.burnTokens(alice, 499n)).to.be.true; + expect(await collection.doesTokenExist(0)).to.be.true; + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(1n); + }); + + itSub('Burn all tokens ', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + await collection.mint(alice, 500n); + + expect(await collection.doesTokenExist(0)).to.be.true; + expect(await collection.burnTokens(alice, 500n)).to.be.true; + expect(await collection.doesTokenExist(0)).to.be.true; + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(0n); + expect(await collection.getTotalPieces()).to.be.equal(0n); + }); + + itSub('Set allowance for token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const ethAcc = {Ethereum: '0x67fb3503a61b284dc83fa96dceec4192db47dc7c'}; + await collection.mint(alice, 100n); + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(100n); + + expect(await collection.approveTokens(alice, {Substrate: bob.address}, 60n)).to.be.true; + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(60n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(0n); + + expect(await collection.transferFrom(bob, {Substrate: alice.address}, {Substrate: bob.address}, 20n)).to.be.true; + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(80n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(20n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(40n); + + await collection.burnTokensFrom(bob, {Substrate: alice.address}, 10n); + + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(70n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(30n); + expect(await collection.transferFrom(bob, {Substrate: alice.address}, ethAcc, 10n)).to.be.true; + expect(await collection.getBalance(ethAcc)).to.be.equal(10n); + }); +}); diff --git a/tests/src/getPropertiesRpc.test.ts b/tests/src/getPropertiesRpc.test.ts new file mode 100644 index 0000000000..19fd9a7a06 --- /dev/null +++ b/tests/src/getPropertiesRpc.test.ts @@ -0,0 +1,126 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; +import {UniqueHelper, UniqueNFTCollection} from './util/playgrounds/unique'; + +const collectionProps = [ + {key: 'col-0', value: 'col-0-value'}, + {key: 'col-1', value: 'col-1-value'}, +]; + +const tokenProps = [ + {key: 'tok-0', value: 'tok-0-value'}, + {key: 'tok-1', value: 'tok-1-value'}, +]; + +const tokPropPermission = { + mutable: false, + tokenOwner: true, + collectionAdmin: false, +}; + +const tokenPropPermissions = [ + { + key: 'tok-0', + permission: tokPropPermission, + }, + { + key: 'tok-1', + permission: tokPropPermission, + }, +]; + +describe('query properties RPC', () => { + let alice: IKeyringPair; + + const mintCollection = async (helper: UniqueHelper) => { + return await helper.nft.mintCollection(alice, { + tokenPrefix: 'prps', + properties: collectionProps, + tokenPropertyPermissions: tokenPropPermissions, + }); + }; + + const mintToken = async (collection: UniqueNFTCollection) => { + return await collection.mintToken(alice, {Substrate: alice.address}, tokenProps); + }; + + + before(async () => { + await usingPlaygrounds(async (_, privateKey) => { + alice = await privateKey({filename: __filename}); + }); + }); + + itSub('query empty collection key set', async ({helper}) => { + const collection = await mintCollection(helper); + const props = await collection.getProperties([]); + expect(props).to.be.empty; + }); + + itSub('query empty token key set', async ({helper}) => { + const collection = await mintCollection(helper); + const token = await mintToken(collection); + const props = await token.getProperties([]); + expect(props).to.be.empty; + }); + + itSub('query empty token key permissions set', async ({helper}) => { + const collection = await mintCollection(helper); + const propPermissions = await collection.getPropertyPermissions([]); + expect(propPermissions).to.be.empty; + }); + + itSub('query all collection props by null arg', async ({helper}) => { + const collection = await mintCollection(helper); + const props = await collection.getProperties(null); + expect(props).to.be.deep.equal(collectionProps); + }); + + itSub('query all token props by null arg', async ({helper}) => { + const collection = await mintCollection(helper); + const token = await mintToken(collection); + const props = await token.getProperties(null); + expect(props).to.be.deep.equal(tokenProps); + }); + + itSub('query empty token key permissions by null arg', async ({helper}) => { + const collection = await mintCollection(helper); + const propPermissions = await collection.getPropertyPermissions(null); + expect(propPermissions).to.be.deep.equal(tokenPropPermissions); + }); + + itSub('query all collection props by undefined arg', async ({helper}) => { + const collection = await mintCollection(helper); + const props = await collection.getProperties(); + expect(props).to.be.deep.equal(collectionProps); + }); + + itSub('query all token props by undefined arg', async ({helper}) => { + const collection = await mintCollection(helper); + const token = await mintToken(collection); + const props = await token.getProperties(); + expect(props).to.be.deep.equal(tokenProps); + }); + + itSub('query empty token key permissions by undefined arg', async ({helper}) => { + const collection = await mintCollection(helper); + const propPermissions = await collection.getPropertyPermissions(); + expect(propPermissions).to.be.deep.equal(tokenPropPermissions); + }); +}); diff --git a/tests/src/inflation.test.ts b/tests/src/inflation.test.ts index 9e8f4990e3..78e5131f18 100644 --- a/tests/src/inflation.test.ts +++ b/tests/src/inflation.test.ts @@ -14,42 +14,45 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {IKeyringPair} from '@polkadot/types/types'; +import {expect, itSub, usingPlaygrounds} from './util'; +// todo:playgrounds requires sudo, look into on the later stage describe('integration test: Inflation', () => { - it('First year inflation is 10%', async () => { - await usingApi(async (api, privateKeyWrapper) => { + let superuser: IKeyringPair; - // Make sure non-sudo can't start inflation - const tx = api.tx.inflation.startInflation(1); - const bob = privateKeyWrapper('//Bob'); - await expect(submitTransactionExpectFailAsync(bob, tx)).to.be.rejected; + before(async () => { + await usingPlaygrounds(async (_, privateKey) => { + superuser = await privateKey('//Alice'); + }); + }); + + itSub('First year inflation is 10%', async ({helper}) => { + // Make sure non-sudo can't start inflation + const [bob] = await helper.arrange.createAccounts([10n], superuser); - // Start inflation on relay block 1 (Alice is sudo) - const alice = privateKeyWrapper('//Alice'); - const sudoTx = api.tx.sudo.sudo(tx as any); - await submitTransactionAsync(alice, sudoTx); + await expect(helper.executeExtrinsic(bob, 'api.tx.inflation.startInflation', [1])).to.be.rejectedWith(/BadOrigin/); - const blockInterval = (api.consts.inflation.inflationBlockInterval).toBigInt(); - const totalIssuanceStart = (await api.query.inflation.startingYearTotalIssuance()).toBigInt(); - const blockInflation = (await api.query.inflation.blockInflation()).toBigInt(); + // Make sure superuser can't start inflation without explicit sudo + await expect(helper.executeExtrinsic(superuser, 'api.tx.inflation.startInflation', [1])).to.be.rejectedWith(/BadOrigin/); - const YEAR = 5259600n; // 6-second block. Blocks in one year - // const YEAR = 2629800n; // 12-second block. Blocks in one year + // Start inflation on relay block 1 (Alice is sudo) + const tx = helper.constructApiCall('api.tx.inflation.startInflation', [1]); + await expect(helper.executeExtrinsic(superuser, 'api.tx.sudo.sudo', [tx])).to.not.be.rejected; - const totalExpectedInflation = totalIssuanceStart / 10n; - const totalActualInflation = blockInflation * YEAR / blockInterval; + const blockInterval = (helper.getApi().consts.inflation.inflationBlockInterval as any).toBigInt(); + const totalIssuanceStart = ((await helper.callRpc('api.query.inflation.startingYearTotalIssuance', [])) as any).toBigInt(); + const blockInflation = (await helper.callRpc('api.query.inflation.blockInflation', []) as any).toBigInt(); - const tolerance = 0.00001; // Relative difference per year between theoretical and actual inflation - const expectedInflation = totalExpectedInflation / totalActualInflation - 1n; + const YEAR = 5259600n; // 6-second block. Blocks in one year + // const YEAR = 2629800n; // 12-second block. Blocks in one year - expect(Math.abs(Number(expectedInflation))).to.be.lessThanOrEqual(tolerance); - }); - }); + const totalExpectedInflation = totalIssuanceStart / 10n; + const totalActualInflation = blockInflation * YEAR / blockInterval; + + const tolerance = 0.00001; // Relative difference per year between theoretical and actual inflation + const expectedInflation = totalExpectedInflation / totalActualInflation - 1n; + expect(Math.abs(Number(expectedInflation))).to.be.lessThanOrEqual(tolerance); + }); }); diff --git a/tests/src/interfaces/appPromotion/definitions.ts b/tests/src/interfaces/appPromotion/definitions.ts new file mode 100644 index 0000000000..87e1e6d8a0 --- /dev/null +++ b/tests/src/interfaces/appPromotion/definitions.ts @@ -0,0 +1,58 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +type RpcParam = { + name: string; + type: string; + isOptional?: true; +}; + +const CROSS_ACCOUNT_ID_TYPE = 'PalletEvmAccountBasicCrossAccountIdRepr'; + +const crossAccountParam = (name = 'account') => ({name, type: CROSS_ACCOUNT_ID_TYPE}); +const atParam = {name: 'at', type: 'Hash', isOptional: true}; + +const fun = (description: string, params: RpcParam[], type: string) => ({ + description, + params: [...params, atParam], + type, +}); + +export default { + types: {}, + rpc: { + totalStaked: fun( + 'Returns the total amount of staked tokens', + [{name: 'staker', type: CROSS_ACCOUNT_ID_TYPE, isOptional: true}], + 'u128', + ), + totalStakedPerBlock: fun( + 'Returns the total amount of staked tokens per block when staked', + [crossAccountParam('staker')], + 'Vec<(u32, u128)>', + ), + pendingUnstake: fun( + 'Returns the total amount of unstaked tokens', + [{name: 'staker', type: CROSS_ACCOUNT_ID_TYPE, isOptional: true}], + 'u128', + ), + pendingUnstakePerBlock: fun( + 'Returns the total amount of unstaked tokens per block', + [crossAccountParam('staker')], + 'Vec<(u32, u128)>', + ), + }, +}; diff --git a/tests/src/interfaces/appPromotion/index.ts b/tests/src/interfaces/appPromotion/index.ts new file mode 100644 index 0000000000..2d307291c3 --- /dev/null +++ b/tests/src/interfaces/appPromotion/index.ts @@ -0,0 +1,4 @@ +// Auto-generated via `yarn polkadot-types-from-defs`, do not edit +/* eslint-disable */ + +export * from './types'; diff --git a/tests/src/interfaces/appPromotion/types.ts b/tests/src/interfaces/appPromotion/types.ts new file mode 100644 index 0000000000..1aaf7f0000 --- /dev/null +++ b/tests/src/interfaces/appPromotion/types.ts @@ -0,0 +1,4 @@ +// Auto-generated via `yarn polkadot-types-from-defs`, do not edit +/* eslint-disable */ + +export type PHANTOM_APPPROMOTION = 'appPromotion'; diff --git a/tests/src/interfaces/augment-api-consts.ts b/tests/src/interfaces/augment-api-consts.ts index 902377851a..a8f8862d3b 100644 --- a/tests/src/interfaces/augment-api-consts.ts +++ b/tests/src/interfaces/augment-api-consts.ts @@ -1,14 +1,46 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ -import type { ApiTypes } from '@polkadot/api-base/types'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/api-base/types/consts'; + +import type { ApiTypes, AugmentedConst } from '@polkadot/api-base/types'; import type { Option, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; import type { Codec } from '@polkadot/types-codec/types'; -import type { Permill } from '@polkadot/types/interfaces/runtime'; -import type { FrameSupportPalletId, FrameSupportWeightsRuntimeDbWeight, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, SpVersionRuntimeVersion } from '@polkadot/types/lookup'; +import type { Perbill, Permill, Weight } from '@polkadot/types/interfaces/runtime'; +import type { FrameSupportPalletId, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, SpVersionRuntimeVersion, SpWeightsRuntimeDbWeight, XcmV1MultiLocation } from '@polkadot/types/lookup'; + +export type __AugmentedConst = AugmentedConst; declare module '@polkadot/api-base/types/consts' { - export interface AugmentedConsts { + interface AugmentedConsts { + appPromotion: { + /** + * Rate of return for interval in blocks defined in `RecalculationInterval`. + **/ + intervalIncome: Perbill & AugmentedConst; + /** + * Decimals for the `Currency`. + **/ + nominal: u128 & AugmentedConst; + /** + * The app's pallet id, used for deriving its sovereign account address. + **/ + palletId: FrameSupportPalletId & AugmentedConst; + /** + * In parachain blocks. + **/ + pendingInterval: u32 & AugmentedConst; + /** + * In relay blocks. + **/ + recalculationInterval: u32 & AugmentedConst; + /** + * Generic const + **/ + [key: string]: Codec; + }; balances: { /** * The minimum amount required to keep an account open. @@ -29,13 +61,27 @@ declare module '@polkadot/api-base/types/consts' { [key: string]: Codec; }; common: { + /** + * Maximum admins per collection. + **/ collectionAdminsLimit: u32 & AugmentedConst; + /** + * Set price to create a collection. + **/ collectionCreationPrice: u128 & AugmentedConst; /** * Generic const **/ [key: string]: Codec; }; + configuration: { + defaultMinGasPrice: u64 & AugmentedConst; + defaultWeightToFeeCoefficient: u32 & AugmentedConst; + /** + * Generic const + **/ + [key: string]: Codec; + }; inflation: { /** * Number of blocks that pass between treasury balance updates due to inflation @@ -51,7 +97,7 @@ declare module '@polkadot/api-base/types/consts' { * The maximum weight that may be scheduled per block for any dispatchables of less * priority than `schedule::HARD_DEADLINE`. **/ - maximumWeight: u64 & AugmentedConst; + maximumWeight: Weight & AugmentedConst; /** * The maximum number of scheduled calls in the queue for a single block. * Not strictly enforced, but used for weight estimation. @@ -78,9 +124,9 @@ declare module '@polkadot/api-base/types/consts' { /** * The weight of runtime database operations the runtime can invoke. **/ - dbWeight: FrameSupportWeightsRuntimeDbWeight & AugmentedConst; + dbWeight: SpWeightsRuntimeDbWeight & AugmentedConst; /** - * The designated SS85 prefix of this chain. + * The designated SS58 prefix of this chain. * * This replaces the "ss58Format" property declared in the chain spec. Reason is * that the runtime should know about the prefix in order to make use of it as @@ -109,6 +155,17 @@ declare module '@polkadot/api-base/types/consts' { **/ [key: string]: Codec; }; + tokens: { + maxLocks: u32 & AugmentedConst; + /** + * The maximum number of named reserves that can exist on an account. + **/ + maxReserves: u32 & AugmentedConst; + /** + * Generic const + **/ + [key: string]: Codec; + }; transactionPayment: { /** * A fee mulitplier for `Operational` extrinsics to compute "virtual tip" to boost their @@ -186,5 +243,22 @@ declare module '@polkadot/api-base/types/consts' { **/ [key: string]: Codec; }; + xTokens: { + /** + * Base XCM weight. + * + * The actually weight for an XCM message is `T::BaseXcmWeight + + * T::Weigher::weight(&msg)`. + **/ + baseXcmWeight: u64 & AugmentedConst; + /** + * Self chain location. + **/ + selfLocation: XcmV1MultiLocation & AugmentedConst; + /** + * Generic const + **/ + [key: string]: Codec; + }; } // AugmentedConsts } // declare module diff --git a/tests/src/interfaces/augment-api-errors.ts b/tests/src/interfaces/augment-api-errors.ts index d6b6b2cedf..2b43272d31 100644 --- a/tests/src/interfaces/augment-api-errors.ts +++ b/tests/src/interfaces/augment-api-errors.ts @@ -1,10 +1,46 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ -import type { ApiTypes } from '@polkadot/api-base/types'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/api-base/types/errors'; + +import type { ApiTypes, AugmentedError } from '@polkadot/api-base/types'; + +export type __AugmentedError = AugmentedError; declare module '@polkadot/api-base/types/errors' { - export interface AugmentedErrors { + interface AugmentedErrors { + appPromotion: { + /** + * Error due to action requiring admin to be set. + **/ + AdminNotSet: AugmentedError; + /** + * Errors caused by incorrect actions with a locked balance. + **/ + IncorrectLockedBalanceOperation: AugmentedError; + /** + * No permission to perform an action. + **/ + NoPermission: AugmentedError; + /** + * Insufficient funds to perform an action. + **/ + NotSufficientFunds: AugmentedError; + /** + * Occurs when a pending unstake cannot be added in this block. PENDING_LIMIT_PER_BLOCK` limits exceeded. + **/ + PendingForBlockOverflow: AugmentedError; + /** + * The error is due to the fact that the collection/contract must already be sponsored in order to perform the action. + **/ + SponsorNotSet: AugmentedError; + /** + * Generic error + **/ + [key: string]: AugmentedError; + }; balances: { /** * Beneficiary account must pre-exist @@ -57,7 +93,7 @@ declare module '@polkadot/api-base/types/errors' { **/ AddressNotInAllowlist: AugmentedError; /** - * Requested value more than approved. + * Requested value is more than the approved **/ ApprovedValueTooLow: AugmentedError; /** @@ -113,7 +149,7 @@ declare module '@polkadot/api-base/types/errors' { **/ EmptyPropertyKey: AugmentedError; /** - * Only ASCII letters, digits, and '_', '-' are allowed + * Only ASCII letters, digits, and symbols `_`, `-`, and `.` are allowed **/ InvalidCharacterInPropertyKey: AugmentedError; /** @@ -133,7 +169,7 @@ declare module '@polkadot/api-base/types/errors' { **/ NoSpaceForProperty: AugmentedError; /** - * Not sufficient funds to perform action + * Insufficient funds to perform an action **/ NotSufficientFounds: AugmentedError; /** @@ -153,15 +189,15 @@ declare module '@polkadot/api-base/types/errors' { **/ PublicMintingNotAllowed: AugmentedError; /** - * Only tokens from specific collections may nest tokens under this + * Only tokens from specific collections may nest tokens under this one **/ SourceCollectionIsNotAllowedToNest: AugmentedError; /** - * Item not exists. + * Item does not exist **/ TokenNotFound: AugmentedError; /** - * Item balance not enough. + * Item is balance not enough **/ TokenValueTooLow: AugmentedError; /** @@ -173,11 +209,11 @@ declare module '@polkadot/api-base/types/errors' { **/ TransferNotAllowed: AugmentedError; /** - * Target collection doesn't supports this operation + * The operation is not supported **/ UnsupportedOperation: AugmentedError; /** - * User not passed nesting rule + * User does not satisfy the nesting rule **/ UserIsNotAllowedToNest: AugmentedError; /** @@ -259,33 +295,70 @@ declare module '@polkadot/api-base/types/errors' { }; evmContractHelpers: { /** - * This method is only executable by owner + * No pending sponsor for contract. + **/ + NoPendingSponsor: AugmentedError; + /** + * This method is only executable by contract owner **/ NoPermission: AugmentedError; + /** + * Number of methods that sponsored limit is defined for exceeds maximum. + **/ + TooManyMethodsHaveSponsoredLimit: AugmentedError; /** * Generic error **/ [key: string]: AugmentedError; }; evmMigration: { + /** + * Migration of this account is not yet started, or already finished. + **/ AccountIsNotMigrating: AugmentedError; + /** + * Can only migrate to empty address. + **/ AccountNotEmpty: AugmentedError; /** * Generic error **/ [key: string]: AugmentedError; }; + foreignAssets: { + /** + * AssetId exists + **/ + AssetIdExisted: AugmentedError; + /** + * AssetId not exists + **/ + AssetIdNotExists: AugmentedError; + /** + * The given location could not be used (e.g. because it cannot be expressed in the + * desired version of XCM). + **/ + BadLocation: AugmentedError; + /** + * MultiLocation existed + **/ + MultiLocationExisted: AugmentedError; + /** + * Generic error + **/ + [key: string]: AugmentedError; + }; fungible: { /** - * Fungible token does not support nested + * Fungible token does not support nesting. **/ FungibleDisallowsNesting: AugmentedError; /** - * Tried to set data for fungible item + * Tried to set data for fungible item. **/ FungibleItemsDontHaveData: AugmentedError; /** - * Not default id passed as TokenId argument + * Fungible tokens hold no ID, and the default value of TokenId for Fungible collection is 0. **/ FungibleItemsHaveNoId: AugmentedError; /** @@ -293,7 +366,7 @@ declare module '@polkadot/api-base/types/errors' { **/ NotFungibleDataUsedToMintFungibleCollectionToken: AugmentedError; /** - * Setting item properties is not allowed + * Setting item properties is not allowed. **/ SettingPropertiesNotAllowed: AugmentedError; /** @@ -431,19 +504,19 @@ declare module '@polkadot/api-base/types/errors' { **/ NotRefungibleDataUsedToMintFungibleCollectionToken: AugmentedError; /** - * Refungible token can't nest other tokens + * Refungible token can't nest other tokens. **/ RefungibleDisallowsNesting: AugmentedError; /** - * Refungible token can't be repartitioned by user who isn't owns all pieces + * Refungible token can't be repartitioned by user who isn't owns all pieces. **/ RepartitionWhileNotOwningAllPieces: AugmentedError; /** - * Setting item properties is not allowed + * Setting item properties is not allowed. **/ SettingPropertiesNotAllowed: AugmentedError; /** - * Maximum refungibility exceeded + * Maximum refungibility exceeded. **/ WrongRefungiblePieces: AugmentedError; /** @@ -452,25 +525,83 @@ declare module '@polkadot/api-base/types/errors' { [key: string]: AugmentedError; }; rmrkCore: { + /** + * Not the target owner of the sent NFT. + **/ CannotAcceptNonOwnedNft: AugmentedError; + /** + * Not the target owner of the sent NFT. + **/ CannotRejectNonOwnedNft: AugmentedError; + /** + * NFT was not sent and is not pending. + **/ CannotRejectNonPendingNft: AugmentedError; + /** + * If an NFT is sent to a descendant, that would form a nesting loop, an ouroboros. + * Sending to self is redundant. + **/ CannotSendToDescendentOrSelf: AugmentedError; + /** + * Too many tokens created in the collection, no new ones are allowed. + **/ CollectionFullOrLocked: AugmentedError; + /** + * Only destroying collections without tokens is allowed. + **/ CollectionNotEmpty: AugmentedError; + /** + * Collection does not exist, has a wrong type, or does not map to a Unique ID. + **/ CollectionUnknown: AugmentedError; + /** + * Property of the type of RMRK collection could not be read successfully. + **/ CorruptedCollectionType: AugmentedError; - NftTypeEncodeError: AugmentedError; + /** + * Could not find an ID for a collection. It is likely there were too many collections created on the chain, causing an overflow. + **/ NoAvailableCollectionId: AugmentedError; + /** + * Token does not exist, or there is no suitable ID for it, likely too many tokens were created in a collection, causing an overflow. + **/ NoAvailableNftId: AugmentedError; + /** + * Could not find an ID for the resource. It is likely there were too many resources created on an NFT, causing an overflow. + **/ NoAvailableResourceId: AugmentedError; + /** + * Token is marked as non-transferable, and thus cannot be transferred. + **/ NonTransferable: AugmentedError; + /** + * No permission to perform action. + **/ NoPermission: AugmentedError; + /** + * No such resource found. + **/ ResourceDoesntExist: AugmentedError; + /** + * Resource is not pending for the operation. + **/ ResourceNotPending: AugmentedError; + /** + * Could not find a property by the supplied key. + **/ RmrkPropertyIsNotFound: AugmentedError; + /** + * Too many symbols supplied as the property key. The maximum is [256](up_data_structs::MAX_PROPERTY_KEY_LENGTH). + **/ RmrkPropertyKeyIsTooLong: AugmentedError; + /** + * Too many bytes supplied as the property value. The maximum is [32768](up_data_structs::MAX_PROPERTY_VALUE_LENGTH). + **/ RmrkPropertyValueIsTooLong: AugmentedError; + /** + * Something went wrong when decoding encoded data from the storage. + * Perhaps, there was a wrong key supplied for the type, or the data was improperly stored. + **/ UnableToDecodeRmrkData: AugmentedError; /** * Generic error @@ -478,12 +609,33 @@ declare module '@polkadot/api-base/types/errors' { [key: string]: AugmentedError; }; rmrkEquip: { + /** + * Base collection linked to this ID does not exist. + **/ BaseDoesntExist: AugmentedError; + /** + * No Theme named "default" is associated with the Base. + **/ NeedsDefaultThemeFirst: AugmentedError; + /** + * Could not find an ID for a Base collection. It is likely there were too many collections created on the chain, causing an overflow. + **/ NoAvailableBaseId: AugmentedError; + /** + * Could not find a suitable ID for a Part, likely too many Part tokens were created in the Base, causing an overflow + **/ NoAvailablePartId: AugmentedError; + /** + * Cannot assign equippables to a fixed Part. + **/ NoEquippableOnFixedPart: AugmentedError; + /** + * Part linked to this ID does not exist. + **/ PartDoesntExist: AugmentedError; + /** + * No permission to perform action. + **/ PermissionError: AugmentedError; /** * Generic error @@ -514,19 +666,19 @@ declare module '@polkadot/api-base/types/errors' { }; structure: { /** - * While iterating over children, encountered breadth limit + * While nesting, reached the breadth limit of nesting, exceeding the provided budget. **/ BreadthLimit: AugmentedError; /** - * While searched for owner, encountered depth limit + * While nesting, reached the depth limit of nesting, exceeding the provided budget. **/ DepthLimit: AugmentedError; /** - * While searched for owner, got already checked account + * While nesting, encountered an already checked account, detecting a loop. **/ OuroborosDetected: AugmentedError; /** - * While searched for owner, found token owner by not-yet-existing token + * Couldn't find the token owner that is itself a token. **/ TokenNotFound: AugmentedError; /** @@ -578,7 +730,55 @@ declare module '@polkadot/api-base/types/errors' { **/ [key: string]: AugmentedError; }; + testUtils: { + TestPalletDisabled: AugmentedError; + TriggerRollback: AugmentedError; + /** + * Generic error + **/ + [key: string]: AugmentedError; + }; + tokens: { + /** + * Cannot convert Amount into Balance type + **/ + AmountIntoBalanceFailed: AugmentedError; + /** + * The balance is too low + **/ + BalanceTooLow: AugmentedError; + /** + * Beneficiary account must pre-exist + **/ + DeadAccount: AugmentedError; + /** + * Value too low to create account due to existential deposit + **/ + ExistentialDeposit: AugmentedError; + /** + * Transfer/payment would kill account + **/ + KeepAlive: AugmentedError; + /** + * Failed because liquidity restrictions due to locking + **/ + LiquidityRestrictions: AugmentedError; + /** + * Failed because the maximum locks was exceeded + **/ + MaxLocksExceeded: AugmentedError; + TooManyReserves: AugmentedError; + /** + * Generic error + **/ + [key: string]: AugmentedError; + }; treasury: { + /** + * The spend origin is valid but the amount it is allowed to spend is lower than the + * amount to be spent. + **/ + InsufficientPermission: AugmentedError; /** * Proposer's balance is too low. **/ @@ -602,7 +802,7 @@ declare module '@polkadot/api-base/types/errors' { }; unique: { /** - * Decimal_points parameter must be lower than MAX_DECIMAL_POINTS constant, currently it is 30. + * Decimal_points parameter must be lower than [`up_data_structs::MAX_DECIMAL_POINTS`]. **/ CollectionDecimalPointLimitExceeded: AugmentedError; /** @@ -614,7 +814,7 @@ declare module '@polkadot/api-base/types/errors' { **/ EmptyArgument: AugmentedError; /** - * Repertition is only supported by refungible collection + * Repertition is only supported by refungible collection. **/ RepartitionCalledOnNonRefungibleCollection: AugmentedError; /** @@ -678,5 +878,90 @@ declare module '@polkadot/api-base/types/errors' { **/ [key: string]: AugmentedError; }; + xTokens: { + /** + * Asset has no reserve location. + **/ + AssetHasNoReserve: AugmentedError; + /** + * The specified index does not exist in a MultiAssets struct. + **/ + AssetIndexNonExistent: AugmentedError; + /** + * The version of the `Versioned` value used is not able to be + * interpreted. + **/ + BadVersion: AugmentedError; + /** + * Could not re-anchor the assets to declare the fees for the + * destination chain. + **/ + CannotReanchor: AugmentedError; + /** + * The destination `MultiLocation` provided cannot be inverted. + **/ + DestinationNotInvertible: AugmentedError; + /** + * We tried sending distinct asset and fee but they have different + * reserve chains. + **/ + DistinctReserveForAssetAndFee: AugmentedError; + /** + * Fee is not enough. + **/ + FeeNotEnough: AugmentedError; + /** + * Could not get ancestry of asset reserve location. + **/ + InvalidAncestry: AugmentedError; + /** + * The MultiAsset is invalid. + **/ + InvalidAsset: AugmentedError; + /** + * Invalid transfer destination. + **/ + InvalidDest: AugmentedError; + /** + * MinXcmFee not registered for certain reserve location + **/ + MinXcmFeeNotDefined: AugmentedError; + /** + * Not cross-chain transfer. + **/ + NotCrossChainTransfer: AugmentedError; + /** + * Currency is not cross-chain transferable. + **/ + NotCrossChainTransferableCurrency: AugmentedError; + /** + * Not supported MultiLocation + **/ + NotSupportedMultiLocation: AugmentedError; + /** + * The number of assets to be sent is over the maximum. + **/ + TooManyAssetsBeingSent: AugmentedError; + /** + * The message's weight could not be determined. + **/ + UnweighableMessage: AugmentedError; + /** + * XCM execution failed. + **/ + XcmExecutionFailed: AugmentedError; + /** + * The transfering asset amount is zero. + **/ + ZeroAmount: AugmentedError; + /** + * The fee is zero. + **/ + ZeroFee: AugmentedError; + /** + * Generic error + **/ + [key: string]: AugmentedError; + }; } // AugmentedErrors } // declare module diff --git a/tests/src/interfaces/augment-api-events.ts b/tests/src/interfaces/augment-api-events.ts index a3fe7f9a9b..5901edc289 100644 --- a/tests/src/interfaces/augment-api-events.ts +++ b/tests/src/interfaces/augment-api-events.ts @@ -1,14 +1,58 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ -import type { ApiTypes } from '@polkadot/api-base/types'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/api-base/types/events'; + +import type { ApiTypes, AugmentedEvent } from '@polkadot/api-base/types'; import type { Bytes, Null, Option, Result, U256, U8aFixed, bool, u128, u32, u64, u8 } from '@polkadot/types-codec'; import type { ITuple } from '@polkadot/types-codec/types'; -import type { AccountId32, H160, H256 } from '@polkadot/types/interfaces/runtime'; -import type { EthereumLog, EvmCoreErrorExitReason, FrameSupportScheduleLookupError, FrameSupportTokensMiscBalanceStatus, FrameSupportWeightsDispatchInfo, OrmlVestingVestingSchedule, PalletEvmAccountBasicCrossAccountIdRepr, RmrkTraitsNftAccountIdOrCollectionNftTuple, SpRuntimeDispatchError, XcmV1MultiLocation, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2Xcm, XcmVersionedMultiAssets, XcmVersionedMultiLocation } from '@polkadot/types/lookup'; +import type { AccountId32, H160, H256, Weight } from '@polkadot/types/interfaces/runtime'; +import type { EthereumLog, EvmCoreErrorExitReason, FrameSupportDispatchDispatchInfo, FrameSupportScheduleLookupError, FrameSupportTokensMiscBalanceStatus, OrmlVestingVestingSchedule, PalletEvmAccountBasicCrossAccountIdRepr, PalletForeignAssetsAssetIds, PalletForeignAssetsModuleAssetMetadata, RmrkTraitsNftAccountIdOrCollectionNftTuple, SpRuntimeDispatchError, XcmV1MultiAsset, XcmV1MultiLocation, XcmV1MultiassetMultiAssets, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2Xcm, XcmVersionedMultiAssets, XcmVersionedMultiLocation } from '@polkadot/types/lookup'; + +export type __AugmentedEvent = AugmentedEvent; declare module '@polkadot/api-base/types/events' { - export interface AugmentedEvents { + interface AugmentedEvents { + appPromotion: { + /** + * The admin was set + * + * # Arguments + * * AccountId: account address of the admin + **/ + SetAdmin: AugmentedEvent; + /** + * Staking was performed + * + * # Arguments + * * AccountId: account of the staker + * * Balance : staking amount + **/ + Stake: AugmentedEvent; + /** + * Staking recalculation was performed + * + * # Arguments + * * AccountId: account of the staker. + * * Balance : recalculation base + * * Balance : total income + **/ + StakingRecalculation: AugmentedEvent; + /** + * Unstaking was performed + * + * # Arguments + * * AccountId: account of the staker + * * Balance : unstaking amount + **/ + Unstake: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; balances: { /** * A balance was set by root. @@ -59,82 +103,47 @@ declare module '@polkadot/api-base/types/events' { }; common: { /** - * * collection_id - * - * * item_id - * - * * sender - * - * * spender - * - * * amount + * Amount pieces of token owned by `sender` was approved for `spender`. **/ Approved: AugmentedEvent; /** * New collection was created - * - * # Arguments - * - * * collection_id: Globally unique identifier of newly created collection. - * - * * mode: [CollectionMode] converted into u8. - * - * * account_id: Collection owner. **/ CollectionCreated: AugmentedEvent; /** * New collection was destroyed - * - * # Arguments - * - * * collection_id: Globally unique identifier of collection. **/ CollectionDestroyed: AugmentedEvent; + /** + * The property has been deleted. + **/ CollectionPropertyDeleted: AugmentedEvent; + /** + * The colletion property has been added or edited. + **/ CollectionPropertySet: AugmentedEvent; /** * New item was created. - * - * # Arguments - * - * * collection_id: Id of the collection where item was created. - * - * * item_id: Id of an item. Unique within the collection. - * - * * recipient: Owner of newly created item - * - * * amount: Always 1 for NFT **/ ItemCreated: AugmentedEvent; /** * Collection item was burned. - * - * # Arguments - * - * * collection_id. - * - * * item_id: Identifier of burned NFT. - * - * * owner: which user has destroyed its tokens - * - * * amount: Always 1 for NFT **/ ItemDestroyed: AugmentedEvent; + /** + * The token property permission of a collection has been set. + **/ PropertyPermissionSet: AugmentedEvent; + /** + * The token property has been deleted. + **/ TokenPropertyDeleted: AugmentedEvent; + /** + * The token property has been added or edited. + **/ TokenPropertySet: AugmentedEvent; /** * Item was transferred - * - * * collection_id: Id of collection to which item is belong - * - * * item_id: Id of an item - * - * * sender: Original owner of item - * - * * recipient: New owner of item - * - * * amount: Always 1 for NFT **/ Transfer: AugmentedEvent; /** @@ -175,11 +184,11 @@ declare module '@polkadot/api-base/types/events' { /** * Downward message is overweight and was placed in the overweight queue. **/ - OverweightEnqueued: AugmentedEvent; + OverweightEnqueued: AugmentedEvent; /** * Downward message from the overweight queue was executed. **/ - OverweightServiced: AugmentedEvent; + OverweightServiced: AugmentedEvent; /** * Downward message is unsupported version of XCM. **/ @@ -187,7 +196,7 @@ declare module '@polkadot/api-base/types/events' { /** * The weight limit for handling downward messages was reached. **/ - WeightExhausted: AugmentedEvent; + WeightExhausted: AugmentedEvent; /** * Generic event **/ @@ -237,6 +246,46 @@ declare module '@polkadot/api-base/types/events' { **/ [key: string]: AugmentedEvent; }; + evmContractHelpers: { + /** + * Collection sponsor was removed. + **/ + ContractSponsorRemoved: AugmentedEvent; + /** + * Contract sponsor was set. + **/ + ContractSponsorSet: AugmentedEvent; + /** + * New sponsor was confirm. + **/ + ContractSponsorshipConfirmed: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; + foreignAssets: { + /** + * The asset registered. + **/ + AssetRegistered: AugmentedEvent; + /** + * The asset updated. + **/ + AssetUpdated: AugmentedEvent; + /** + * The foreign asset registered. + **/ + ForeignAssetRegistered: AugmentedEvent; + /** + * The foreign asset updated. + **/ + ForeignAssetUpdated: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; maintenance: { MaintenanceDisabled: AugmentedEvent; MaintenanceEnabled: AugmentedEvent; @@ -249,7 +298,7 @@ declare module '@polkadot/api-base/types/events' { /** * Downward messages were processed using the given weight. **/ - DownwardMessagesProcessed: AugmentedEvent; + DownwardMessagesProcessed: AugmentedEvent; /** * Some downward messages have been received and will be processed. **/ @@ -337,7 +386,7 @@ declare module '@polkadot/api-base/types/events' { * * \[ id, pallet index, call index, actual weight, max budgeted weight \] **/ - NotifyOverweight: AugmentedEvent; + NotifyOverweight: AugmentedEvent; /** * A given location which had a version change subscription was dropped owing to an error * migrating the location to our new XCM format. @@ -439,6 +488,10 @@ declare module '@polkadot/api-base/types/events' { * Dispatched some task. **/ Dispatched: AugmentedEvent, id: Option, result: Result], { task: ITuple<[u32, u32]>, id: Option, result: Result }>; + /** + * Scheduled task's priority has changed + **/ + PriorityChanged: AugmentedEvent; /** * Scheduled some task. **/ @@ -450,7 +503,7 @@ declare module '@polkadot/api-base/types/events' { }; structure: { /** - * Executed call on behalf of token + * Executed call on behalf of the token. **/ Executed: AugmentedEvent]>; /** @@ -484,11 +537,11 @@ declare module '@polkadot/api-base/types/events' { /** * An extrinsic failed. **/ - ExtrinsicFailed: AugmentedEvent; + ExtrinsicFailed: AugmentedEvent; /** * An extrinsic completed successfully. **/ - ExtrinsicSuccess: AugmentedEvent; + ExtrinsicSuccess: AugmentedEvent; /** * An account was reaped. **/ @@ -506,6 +559,85 @@ declare module '@polkadot/api-base/types/events' { **/ [key: string]: AugmentedEvent; }; + testUtils: { + ShouldRollback: AugmentedEvent; + ValueIsSet: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; + tokens: { + /** + * A balance was set by root. + **/ + BalanceSet: AugmentedEvent; + /** + * Deposited some balance into an account + **/ + Deposited: AugmentedEvent; + /** + * An account was removed whose balance was non-zero but below + * ExistentialDeposit, resulting in an outright loss. + **/ + DustLost: AugmentedEvent; + /** + * An account was created with some free balance. + **/ + Endowed: AugmentedEvent; + /** + * Some locked funds were unlocked + **/ + LockRemoved: AugmentedEvent; + /** + * Some funds are locked + **/ + LockSet: AugmentedEvent; + /** + * Some balance was reserved (moved from free to reserved). + **/ + Reserved: AugmentedEvent; + /** + * Some reserved balance was repatriated (moved from reserved to + * another account). + **/ + ReserveRepatriated: AugmentedEvent; + /** + * Some balances were slashed (e.g. due to mis-behavior) + **/ + Slashed: AugmentedEvent; + /** + * The total issuance of an currency has been set + **/ + TotalIssuanceSet: AugmentedEvent; + /** + * Transfer succeeded. + **/ + Transfer: AugmentedEvent; + /** + * Some balance was unreserved (moved from reserved to free). + **/ + Unreserved: AugmentedEvent; + /** + * Some balances were withdrawn (e.g. pay for transaction fee) + **/ + Withdrawn: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; + transactionPayment: { + /** + * A transaction fee `actual_fee`, of which `tip` was added to the minimum inclusion fee, + * has been paid by `who`. + **/ + TransactionFeePaid: AugmentedEvent; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; treasury: { /** * Some funds have been allocated. @@ -531,6 +663,10 @@ declare module '@polkadot/api-base/types/events' { * Spending has finished; this is the amount that rolls over until next spend. **/ Rollover: AugmentedEvent; + /** + * A new spend proposal has been approved. + **/ + SpendApproved: AugmentedEvent; /** * We have ended a spend period and will now allocate funds. **/ @@ -542,90 +678,80 @@ declare module '@polkadot/api-base/types/events' { }; unique: { /** - * Address was add to allow list + * Address was added to the allow list * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * user: Address. + * * collection_id: ID of the affected collection. + * * user: Address of the added account. **/ AllowListAddressAdded: AugmentedEvent; /** - * Address was remove from allow list + * Address was removed from the allow list * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * user: Address. + * * collection_id: ID of the affected collection. + * * user: Address of the removed account. **/ AllowListAddressRemoved: AugmentedEvent; /** * Collection admin was added * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * admin: Admin address. + * * collection_id: ID of the affected collection. + * * admin: Admin address. **/ CollectionAdminAdded: AugmentedEvent; /** * Collection admin was removed * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * admin: Admin address. + * * collection_id: ID of the affected collection. + * * admin: Removed admin address. **/ CollectionAdminRemoved: AugmentedEvent; /** - * Collection limits was set + * Collection limits were set * * # Arguments - * - * * collection_id: Globally unique collection identifier. + * * collection_id: ID of the affected collection. **/ CollectionLimitSet: AugmentedEvent; /** - * Collection owned was change + * Collection owned was changed * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * owner: New owner address. + * * collection_id: ID of the affected collection. + * * owner: New owner address. **/ CollectionOwnedChanged: AugmentedEvent; + /** + * Collection permissions were set + * + * # Arguments + * * collection_id: ID of the affected collection. + **/ CollectionPermissionSet: AugmentedEvent; /** * Collection sponsor was removed * * # Arguments - * - * * collection_id: Globally unique collection identifier. + * * collection_id: ID of the affected collection. **/ CollectionSponsorRemoved: AugmentedEvent; /** * Collection sponsor was set * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * owner: New sponsor address. + * * collection_id: ID of the affected collection. + * * owner: New sponsor address. **/ CollectionSponsorSet: AugmentedEvent; /** * New sponsor was confirm * * # Arguments - * - * * collection_id: Globally unique collection identifier. - * - * * sponsor: New sponsor address. + * * collection_id: ID of the affected collection. + * * sponsor: New sponsor address. **/ SponsorshipConfirmed: AugmentedEvent; /** @@ -655,35 +781,45 @@ declare module '@polkadot/api-base/types/events' { /** * Bad XCM format used. **/ - BadFormat: AugmentedEvent]>; + BadFormat: AugmentedEvent], { messageHash: Option }>; /** * Bad XCM version used. **/ - BadVersion: AugmentedEvent]>; + BadVersion: AugmentedEvent], { messageHash: Option }>; /** * Some XCM failed. **/ - Fail: AugmentedEvent, XcmV2TraitsError]>; + Fail: AugmentedEvent, error: XcmV2TraitsError, weight: Weight], { messageHash: Option, error: XcmV2TraitsError, weight: Weight }>; /** * An XCM exceeded the individual message weight budget. **/ - OverweightEnqueued: AugmentedEvent; + OverweightEnqueued: AugmentedEvent; /** * An XCM from the overweight queue was executed with the given actual weight used. **/ - OverweightServiced: AugmentedEvent; + OverweightServiced: AugmentedEvent; /** * Some XCM was executed ok. **/ - Success: AugmentedEvent]>; + Success: AugmentedEvent, weight: Weight], { messageHash: Option, weight: Weight }>; /** * An upward message was sent to the relay chain. **/ - UpwardMessageSent: AugmentedEvent]>; + UpwardMessageSent: AugmentedEvent], { messageHash: Option }>; /** * An HRMP message was sent to a sibling parachain. **/ - XcmpMessageSent: AugmentedEvent]>; + XcmpMessageSent: AugmentedEvent], { messageHash: Option }>; + /** + * Generic event + **/ + [key: string]: AugmentedEvent; + }; + xTokens: { + /** + * Transferred `MultiAsset` with fee. + **/ + TransferredMultiAssets: AugmentedEvent; /** * Generic event **/ diff --git a/tests/src/interfaces/augment-api-query.ts b/tests/src/interfaces/augment-api-query.ts index 89ad50061d..2e642e7fda 100644 --- a/tests/src/interfaces/augment-api-query.ts +++ b/tests/src/interfaces/augment-api-query.ts @@ -1,15 +1,64 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ -import type { ApiTypes } from '@polkadot/api-base/types'; -import type { BTreeMap, Bytes, Option, U256, U8aFixed, Vec, bool, u128, u16, u32, u64 } from '@polkadot/types-codec'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/api-base/types/storage'; + +import type { ApiTypes, AugmentedQuery, QueryableStorageEntry } from '@polkadot/api-base/types'; +import type { BTreeMap, Bytes, Option, U256, U8aFixed, Vec, bool, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; import type { AnyNumber, ITuple } from '@polkadot/types-codec/types'; -import type { AccountId32, H160, H256 } from '@polkadot/types/interfaces/runtime'; -import type { CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueQueueConfigData, EthereumBlock, EthereumLog, EthereumReceiptReceiptV3, EthereumTransactionTransactionV2, FpRpcTransactionStatus, FrameSupportWeightsPerDispatchClassU64, FrameSystemAccountInfo, FrameSystemEventRecord, FrameSystemLastRuntimeUpgradeInfo, FrameSystemPhase, OrmlVestingVestingSchedule, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesReleases, PalletBalancesReserveData, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmContractHelpersSponsoringModeT, PalletNonfungibleItemData, PalletRefungibleItemData, PalletTransactionPaymentReleases, PalletTreasuryProposal, PalletUniqueSchedulerScheduledV3, PhantomTypeUpDataStructs, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, SpRuntimeDigest, SpTrieStorageProof, UpDataStructsCollection, UpDataStructsCollectionStats, UpDataStructsProperties, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsTokenChild } from '@polkadot/types/lookup'; +import type { AccountId32, H160, H256, Weight } from '@polkadot/types/interfaces/runtime'; +import type { CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueQueueConfigData, EthereumBlock, EthereumLog, EthereumReceiptReceiptV3, EthereumTransactionTransactionV2, FpRpcTransactionStatus, FrameSupportDispatchPerDispatchClassWeight, FrameSystemAccountInfo, FrameSystemEventRecord, FrameSystemLastRuntimeUpgradeInfo, FrameSystemPhase, OrmlTokensAccountData, OrmlTokensBalanceLock, OrmlTokensReserveData, OrmlVestingVestingSchedule, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesReleases, PalletBalancesReserveData, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmContractHelpersSponsoringModeT, PalletForeignAssetsAssetIds, PalletForeignAssetsModuleAssetMetadata, PalletNonfungibleItemData, PalletRefungibleItemData, PalletTransactionPaymentReleases, PalletTreasuryProposal, PalletUniqueSchedulerScheduledV3, PhantomTypeUpDataStructs, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, SpRuntimeDigest, SpTrieStorageProof, UpDataStructsCollection, UpDataStructsCollectionStats, UpDataStructsProperties, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsSponsorshipStateBasicCrossAccountIdRepr, UpDataStructsTokenChild, XcmV1MultiLocation } from '@polkadot/types/lookup'; import type { Observable } from '@polkadot/types/types'; +export type __AugmentedQuery = AugmentedQuery unknown>; +export type __QueryableStorageEntry = QueryableStorageEntry; + declare module '@polkadot/api-base/types/storage' { - export interface AugmentedQueries { + interface AugmentedQueries { + appPromotion: { + /** + * Stores the `admin` account. Some extrinsics can only be executed if they were signed by `admin`. + **/ + admin: AugmentedQuery Observable>, []> & QueryableStorageEntry; + /** + * Stores a key for record for which the next revenue recalculation would be performed. + * If `None`, then recalculation has not yet been performed or calculations have been completed for all stakers. + **/ + nextCalculatedRecord: AugmentedQuery Observable>>, []> & QueryableStorageEntry; + /** + * Stores amount of stakes for an `Account`. + * + * * **Key** - Staker account. + * * **Value** - Amount of stakes. + **/ + pendingUnstake: AugmentedQuery Observable>>, [u32]> & QueryableStorageEntry; + /** + * Stores the amount of tokens staked by account in the blocknumber. + * + * * **Key1** - Staker account. + * * **Key2** - Relay block number when the stake was made. + * * **(Balance, BlockNumber)** - Balance of the stake. + * The number of the relay block in which we must perform the interest recalculation + **/ + staked: AugmentedQuery Observable>, [AccountId32, u32]> & QueryableStorageEntry; + /** + * Stores amount of stakes for an `Account`. + * + * * **Key** - Staker account. + * * **Value** - Amount of stakes. + **/ + stakesPerAccount: AugmentedQuery Observable, [AccountId32]> & QueryableStorageEntry; + /** + * Stores the total staked amount. + **/ + totalStaked: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Generic query + **/ + [key: string]: QueryableStorageEntry; + }; balances: { /** * The Balances pallet example of storing the balance of an account. @@ -69,28 +118,40 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; common: { + /** + * Storage of the amount of collection admins. + **/ adminAmount: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; /** - * Allowlisted collection users + * Allowlisted collection users. **/ allowlist: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; /** - * Collection info + * Storage of collection info. **/ collectionById: AugmentedQuery Observable>, [u32]> & QueryableStorageEntry; /** - * Collection properties + * Storage of collection properties. **/ collectionProperties: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; + /** + * Storage of token property permissions of a collection. + **/ collectionPropertyPermissions: AugmentedQuery Observable>, [u32]> & QueryableStorageEntry; + /** + * Storage of the count of created collections. Essentially contains the last collection ID. + **/ createdCollectionCount: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Storage of the count of deleted collections. + **/ destroyedCollectionCount: AugmentedQuery Observable, []> & QueryableStorageEntry; /** - * Not used by code, exists only to provide some types to metadata + * Not used by code, exists only to provide some types to metadata. **/ dummyStorageValue: AugmentedQuery Observable>>, []> & QueryableStorageEntry; /** - * List of collection admins + * List of collection admins. **/ isAdmin: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; /** @@ -98,6 +159,14 @@ declare module '@polkadot/api-base/types/storage' { **/ [key: string]: QueryableStorageEntry; }; + configuration: { + minGasPriceOverride: AugmentedQuery Observable, []> & QueryableStorageEntry; + weightToFeeCoefficientOverride: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Generic query + **/ + [key: string]: QueryableStorageEntry; + }; dmpQueue: { /** * The configuration. @@ -167,12 +236,67 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; evmContractHelpers: { + /** + * Storage for users that allowed for sponsorship. + * + * ### Usage + * Prefer to delete record from storage if user no more allowed for sponsorship. + * + * * **Key1** - contract address. + * * **Key2** - user that allowed for sponsorship. + * * **Value** - allowance for sponsorship. + **/ allowlist: AugmentedQuery Observable, [H160, H160]> & QueryableStorageEntry; + /** + * Storege for contracts with [`Allowlisted`](SponsoringModeT::Allowlisted) sponsoring mode. + * + * ### Usage + * Prefer to delete collection from storage if mode chaged to non `Allowlisted`, than set **Value** to **false**. + * + * * **Key** - contract address. + * * **Value** - is contract in [`Allowlisted`](SponsoringModeT::Allowlisted) mode. + **/ allowlistEnabled: AugmentedQuery Observable, [H160]> & QueryableStorageEntry; + /** + * Store owner for contract. + * + * * **Key** - contract address. + * * **Value** - owner for contract. + **/ owner: AugmentedQuery Observable, [H160]> & QueryableStorageEntry; selfSponsoring: AugmentedQuery Observable, [H160]> & QueryableStorageEntry; sponsorBasket: AugmentedQuery Observable>, [H160, H160]> & QueryableStorageEntry; + /** + * Store for contract sponsorship state. + * + * * **Key** - contract address. + * * **Value** - sponsorship state. + **/ + sponsoring: AugmentedQuery Observable, [H160]> & QueryableStorageEntry; + /** + * Storage for last sponsored block. + * + * * **Key1** - contract address. + * * **Key2** - sponsored user address. + * * **Value** - last sponsored block number. + **/ + sponsoringFeeLimit: AugmentedQuery Observable>, [H160]> & QueryableStorageEntry; + /** + * Store for sponsoring mode. + * + * ### Usage + * Prefer to delete collection from storage if mode chaged to [`Disabled`](SponsoringModeT::Disabled). + * + * * **Key** - contract address. + * * **Value** - [`sponsoring mode`](SponsoringModeT). + **/ sponsoringMode: AugmentedQuery Observable>, [H160]> & QueryableStorageEntry; + /** + * Storage for sponsoring rate limit in blocks. + * + * * **Key** - contract address. + * * **Value** - amount of sponsored blocks. + **/ sponsoringRateLimit: AugmentedQuery Observable, [H160]> & QueryableStorageEntry; /** * Generic query @@ -186,9 +310,53 @@ declare module '@polkadot/api-base/types/storage' { **/ [key: string]: QueryableStorageEntry; }; + foreignAssets: { + /** + * The storages for assets to fungible collection binding + * + **/ + assetBinding: AugmentedQuery Observable>, [u32]> & QueryableStorageEntry; + /** + * The storages for AssetMetadatas. + * + * AssetMetadatas: map AssetIds => Option + **/ + assetMetadatas: AugmentedQuery Observable>, [PalletForeignAssetsAssetIds]> & QueryableStorageEntry; + /** + * The storages for MultiLocations. + * + * ForeignAssetLocations: map ForeignAssetId => Option + **/ + foreignAssetLocations: AugmentedQuery Observable>, [u32]> & QueryableStorageEntry; + /** + * The storages for CurrencyIds. + * + * LocationToCurrencyIds: map MultiLocation => Option + **/ + locationToCurrencyIds: AugmentedQuery Observable>, [XcmV1MultiLocation]> & QueryableStorageEntry; + /** + * Next available Foreign AssetId ID. + * + * NextForeignAssetId: ForeignAssetId + **/ + nextForeignAssetId: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Generic query + **/ + [key: string]: QueryableStorageEntry; + }; fungible: { + /** + * Storage for assets delegated to a limited extent to other users. + **/ allowance: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; + /** + * Amount of tokens owned by an account inside a collection. + **/ balance: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; + /** + * Total amount of fungible tokens inside a collection. + **/ totalSupply: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; /** * Generic query @@ -229,20 +397,49 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; nonfungible: { + /** + * Amount of tokens owned by an account in a collection. + **/ accountBalance: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; + /** + * Allowance set by a token owner for another user to perform one of certain transactions on a token. + **/ allowance: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; /** - * Used to enumerate tokens owned by account + * Used to enumerate tokens owned by account. **/ owned: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr, u32]> & QueryableStorageEntry; + /** + * Custom data of a token that is serialized to bytes, + * primarily reserved for on-chain operations, + * normally obscured from the external users. + * + * Auxiliary properties are slightly different from + * usual [`TokenProperties`] due to an unlimited number + * and separately stored and written-to key-value pairs. + * + * Currently used to store RMRK data. + **/ tokenAuxProperties: AugmentedQuery Observable>, [u32, u32, UpDataStructsPropertyScope, Bytes]> & QueryableStorageEntry; /** - * Used to enumerate token's children + * Used to enumerate token's children. **/ tokenChildren: AugmentedQuery | [u32 | AnyNumber | Uint8Array, u32 | AnyNumber | Uint8Array]) => Observable, [u32, u32, ITuple<[u32, u32]>]> & QueryableStorageEntry]>; + /** + * Token data, used to partially describe a token. + **/ tokenData: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; + /** + * Map of key-value pairs, describing the metadata of a token. + **/ tokenProperties: AugmentedQuery Observable, [u32, u32]> & QueryableStorageEntry; + /** + * Amount of burnt tokens in a collection. + **/ tokensBurnt: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; + /** + * Total amount of minted tokens in a collection. + **/ tokensMinted: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; /** * Generic query @@ -311,6 +508,10 @@ declare module '@polkadot/api-base/types/storage' { * by the system inherent. **/ lastHrmpMqcHeads: AugmentedQuery Observable>, []> & QueryableStorageEntry; + /** + * The relay chain block number associated with the last parachain block. + **/ + lastRelayChainBlockNumber: AugmentedQuery Observable, []> & QueryableStorageEntry; /** * Validation code that is set by the parachain and is to be communicated to collator and * consequently the relay-chain. @@ -361,12 +562,12 @@ declare module '@polkadot/api-base/types/storage' { * The weight we reserve at the beginning of the block for processing DMP messages. This * overrides the amount set in the Config trait. **/ - reservedDmpWeightOverride: AugmentedQuery Observable>, []> & QueryableStorageEntry; + reservedDmpWeightOverride: AugmentedQuery Observable>, []> & QueryableStorageEntry; /** * The weight we reserve at the beginning of the block for processing XCMP messages. This * overrides the amount set in the Config trait. **/ - reservedXcmpWeightOverride: AugmentedQuery Observable>, []> & QueryableStorageEntry; + reservedXcmpWeightOverride: AugmentedQuery Observable>, []> & QueryableStorageEntry; /** * An option which indicates if the relay-chain restricts signalling a validation code upgrade. * In other words, if this is `Some` and [`NewValidationCode`] is `Some` then the produced @@ -407,16 +608,41 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; refungible: { + /** + * Amount of tokens (not pieces) partially owned by an account within a collection. + **/ accountBalance: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; + /** + * Allowance set by a token owner for another user to perform one of certain transactions on a number of pieces of a token. + **/ allowance: AugmentedQuery Observable, [u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; + /** + * Amount of token pieces owned by account. + **/ balance: AugmentedQuery Observable, [u32, u32, PalletEvmAccountBasicCrossAccountIdRepr]> & QueryableStorageEntry; /** - * Used to enumerate tokens owned by account + * Used to enumerate tokens owned by account. **/ owned: AugmentedQuery Observable, [u32, PalletEvmAccountBasicCrossAccountIdRepr, u32]> & QueryableStorageEntry; + /** + * Token data, used to partially describe a token. + **/ tokenData: AugmentedQuery Observable, [u32, u32]> & QueryableStorageEntry; + /** + * Amount of pieces a refungible token is split into. + **/ + tokenProperties: AugmentedQuery Observable, [u32, u32]> & QueryableStorageEntry; + /** + * Amount of tokens burnt in a collection. + **/ tokensBurnt: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; + /** + * Total amount of minted tokens in a collection. + **/ tokensMinted: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; + /** + * Total amount of pieces for token + **/ totalSupply: AugmentedQuery Observable, [u32, u32]> & QueryableStorageEntry; /** * Generic query @@ -424,7 +650,13 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; rmrkCore: { + /** + * Latest yet-unused collection ID. + **/ collectionIndex: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Mapping from RMRK collection ID to Unique's. + **/ uniqueCollectionId: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; /** * Generic query @@ -432,7 +664,13 @@ declare module '@polkadot/api-base/types/storage' { [key: string]: QueryableStorageEntry; }; rmrkEquip: { + /** + * Checkmark that a Base has a Theme NFT named "default". + **/ baseHasDefaultTheme: AugmentedQuery Observable, [u32]> & QueryableStorageEntry; + /** + * Map of a Base ID and a Part ID to an NFT in the Base collection serving as the Part. + **/ inernalPartId: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; /** * Generic query @@ -485,7 +723,7 @@ declare module '@polkadot/api-base/types/storage' { /** * The current weight for the block. **/ - blockWeight: AugmentedQuery Observable, []> & QueryableStorageEntry; + blockWeight: AugmentedQuery Observable, []> & QueryableStorageEntry; /** * Digest of the current block, also part of the block header. **/ @@ -555,6 +793,14 @@ declare module '@polkadot/api-base/types/storage' { **/ [key: string]: QueryableStorageEntry; }; + testUtils: { + enabled: AugmentedQuery Observable, []> & QueryableStorageEntry; + testValue: AugmentedQuery Observable, []> & QueryableStorageEntry; + /** + * Generic query + **/ + [key: string]: QueryableStorageEntry; + }; timestamp: { /** * Did the timestamp get updated in this block? @@ -569,6 +815,34 @@ declare module '@polkadot/api-base/types/storage' { **/ [key: string]: QueryableStorageEntry; }; + tokens: { + /** + * The balance of a token type under an account. + * + * NOTE: If the total is ever zero, decrease account ref account. + * + * NOTE: This is only used in the case that this module is used to store + * balances. + **/ + accounts: AugmentedQuery Observable, [AccountId32, PalletForeignAssetsAssetIds]> & QueryableStorageEntry; + /** + * Any liquidity locks of a token type under an account. + * NOTE: Should only be accessed when setting, changing and freeing a lock. + **/ + locks: AugmentedQuery Observable>, [AccountId32, PalletForeignAssetsAssetIds]> & QueryableStorageEntry; + /** + * Named reserves on some account balances. + **/ + reserves: AugmentedQuery Observable>, [AccountId32, PalletForeignAssetsAssetIds]> & QueryableStorageEntry; + /** + * The total issuance of a token type. + **/ + totalIssuance: AugmentedQuery Observable, [PalletForeignAssetsAssetIds]> & QueryableStorageEntry; + /** + * Generic query + **/ + [key: string]: QueryableStorageEntry; + }; transactionPayment: { nextFeeMultiplier: AugmentedQuery Observable, []> & QueryableStorageEntry; storageVersion: AugmentedQuery Observable, []> & QueryableStorageEntry; @@ -605,24 +879,33 @@ declare module '@polkadot/api-base/types/storage' { * TODO: Off chain worker should remove from this map when collection gets removed **/ createItemBasket: AugmentedQuery | [u32 | AnyNumber | Uint8Array, AccountId32 | string | Uint8Array]) => Observable>, [ITuple<[u32, AccountId32]>]> & QueryableStorageEntry]>; + /** + * Last sponsoring of fungible tokens approval in a collection + **/ fungibleApproveBasket: AugmentedQuery Observable>, [u32, AccountId32]> & QueryableStorageEntry; /** * Collection id (controlled?2), owning user (real) **/ fungibleTransferBasket: AugmentedQuery Observable>, [u32, AccountId32]> & QueryableStorageEntry; /** - * Approval sponsoring + * Last sponsoring of NFT approval in a collection **/ nftApproveBasket: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; /** * Collection id (controlled?2), token id (controlled?2) **/ nftTransferBasket: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; + /** + * Last sponsoring of RFT approval in a collection + **/ refungibleApproveBasket: AugmentedQuery Observable>, [u32, u32, AccountId32]> & QueryableStorageEntry; /** * Collection id (controlled?2), token id (controlled?2) **/ reFungibleTransferBasket: AugmentedQuery Observable>, [u32, u32, AccountId32]> & QueryableStorageEntry; + /** + * Last sponsoring of token property setting // todo:doc rephrase this and the following + **/ tokenPropertyBasket: AugmentedQuery Observable>, [u32, u32]> & QueryableStorageEntry; /** * Variable metadata sponsoring diff --git a/tests/src/interfaces/augment-api-rpc.ts b/tests/src/interfaces/augment-api-rpc.ts index 07469fc4c4..28f0df926c 100644 --- a/tests/src/interfaces/augment-api-rpc.ts +++ b/tests/src/interfaces/augment-api-rpc.ts @@ -1,11 +1,15 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/rpc-core/types/jsonrpc'; + import type { PalletEvmAccountBasicCrossAccountIdRepr, RmrkTraitsBaseBaseInfo, RmrkTraitsCollectionCollectionInfo, RmrkTraitsNftNftChild, RmrkTraitsNftNftInfo, RmrkTraitsPartPartType, RmrkTraitsPropertyPropertyInfo, RmrkTraitsResourceResourceInfo, RmrkTraitsTheme, UpDataStructsCollectionLimits, UpDataStructsCollectionStats, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, UpDataStructsRpcCollection, UpDataStructsTokenChild, UpDataStructsTokenData } from './default'; import type { AugmentedRpc } from '@polkadot/rpc-core/types'; import type { Metadata, StorageKey } from '@polkadot/types'; -import type { Bytes, HashMap, Json, Null, Option, Text, U256, U64, Vec, bool, u128, u32, u64 } from '@polkadot/types-codec'; -import type { AnyNumber, Codec } from '@polkadot/types-codec/types'; +import type { Bytes, HashMap, Json, Null, Option, Text, U256, U64, Vec, bool, f64, u128, u32, u64 } from '@polkadot/types-codec'; +import type { AnyNumber, Codec, ITuple } from '@polkadot/types-codec/types'; import type { ExtrinsicOrHash, ExtrinsicStatus } from '@polkadot/types/interfaces/author'; import type { EpochAuthorship } from '@polkadot/types/interfaces/babe'; import type { BeefySignedCommitment } from '@polkadot/types/interfaces/beefy'; @@ -15,7 +19,7 @@ import type { AuthorityId } from '@polkadot/types/interfaces/consensus'; import type { CodeUploadRequest, CodeUploadResult, ContractCallRequest, ContractExecResult, ContractInstantiateResult, InstantiateRequest } from '@polkadot/types/interfaces/contracts'; import type { BlockStats } from '@polkadot/types/interfaces/dev'; import type { CreatedBlock } from '@polkadot/types/interfaces/engine'; -import type { EthAccount, EthCallRequest, EthFilter, EthFilterChanges, EthLog, EthReceipt, EthRichBlock, EthSubKind, EthSubParams, EthSyncStatus, EthTransaction, EthTransactionRequest, EthWork } from '@polkadot/types/interfaces/eth'; +import type { EthAccount, EthCallRequest, EthFeeHistory, EthFilter, EthFilterChanges, EthLog, EthReceipt, EthRichBlock, EthSubKind, EthSubParams, EthSyncStatus, EthTransaction, EthTransactionRequest, EthWork } from '@polkadot/types/interfaces/eth'; import type { Extrinsic } from '@polkadot/types/interfaces/extrinsics'; import type { EncodedFinalityProofs, JustificationNotification, ReportedRoundStates } from '@polkadot/types/interfaces/grandpa'; import type { MmrLeafBatchProof, MmrLeafProof } from '@polkadot/types/interfaces/mmr'; @@ -27,8 +31,28 @@ import type { MigrationStatusResult, ReadProof, RuntimeVersion, TraceBlockRespon import type { ApplyExtrinsicResult, ChainProperties, ChainType, Health, NetworkState, NodeRole, PeerInfo, SyncState } from '@polkadot/types/interfaces/system'; import type { IExtrinsic, Observable } from '@polkadot/types/types'; +export type __AugmentedRpc = AugmentedRpc<() => unknown>; + declare module '@polkadot/rpc-core/types/jsonrpc' { - export interface RpcInterface { + interface RpcInterface { + appPromotion: { + /** + * Returns the total amount of unstaked tokens + **/ + pendingUnstake: AugmentedRpc<(staker?: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; + /** + * Returns the total amount of unstaked tokens per block + **/ + pendingUnstakePerBlock: AugmentedRpc<(staker: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>>; + /** + * Returns the total amount of staked tokens + **/ + totalStaked: AugmentedRpc<(staker?: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; + /** + * Returns the total amount of staked tokens per block when staked + **/ + totalStakedPerBlock: AugmentedRpc<(staker: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>>; + }; author: { /** * Returns true if the keystore has private keys for the given public key and key type. @@ -57,11 +81,11 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { /** * Submit and subscribe to watch an extrinsic until unsubscribed **/ - submitAndWatchExtrinsic: AugmentedRpc<(extrinsic: IExtrinsic) => Observable>; + submitAndWatchExtrinsic: AugmentedRpc<(extrinsic: Extrinsic | IExtrinsic | string | Uint8Array) => Observable>; /** * Submit a fully formatted extrinsic for block inclusion **/ - submitExtrinsic: AugmentedRpc<(extrinsic: IExtrinsic) => Observable>; + submitExtrinsic: AugmentedRpc<(extrinsic: Extrinsic | IExtrinsic | string | Uint8Array) => Observable>; }; babe: { /** @@ -137,22 +161,27 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { }; contracts: { /** + * @deprecated Use the runtime interface `api.call.contractsApi.call` instead * Executes a call to a contract **/ call: AugmentedRpc<(callRequest: ContractCallRequest | { origin?: any; dest?: any; value?: any; gasLimit?: any; storageDepositLimit?: any; inputData?: any } | string | Uint8Array, at?: BlockHash | string | Uint8Array) => Observable>; /** + * @deprecated Use the runtime interface `api.call.contractsApi.getStorage` instead * Returns the value under a specified storage key in a contract **/ getStorage: AugmentedRpc<(address: AccountId | string | Uint8Array, key: H256 | string | Uint8Array, at?: BlockHash | string | Uint8Array) => Observable>>; /** + * @deprecated Use the runtime interface `api.call.contractsApi.instantiate` instead * Instantiate a new contract **/ instantiate: AugmentedRpc<(request: InstantiateRequest | { origin?: any; value?: any; gasLimit?: any; storageDepositLimit?: any; code?: any; data?: any; salt?: any } | string | Uint8Array, at?: BlockHash | string | Uint8Array) => Observable>; /** + * @deprecated Not available in newer versions of the contracts interfaces * Returns the projected time a given contract will be able to sustain paying its rent **/ rentProjection: AugmentedRpc<(address: AccountId | string | Uint8Array, at?: BlockHash | string | Uint8Array) => Observable>>; /** + * @deprecated Use the runtime interface `api.call.contractsApi.uploadCode` instead * Upload new code without instantiating a contract from it **/ uploadCode: AugmentedRpc<(uploadRequest: CodeUploadRequest | { origin?: any; code?: any; storageDepositLimit?: any } | string | Uint8Array, at?: BlockHash | string | Uint8Array) => Observable>; @@ -198,6 +227,10 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { * Estimate gas needed for execution of given contract. **/ estimateGas: AugmentedRpc<(request: EthCallRequest | { from?: any; to?: any; gasPrice?: any; gas?: any; value?: any; data?: any; nonce?: any } | string | Uint8Array, number?: BlockNumber | AnyNumber | Uint8Array) => Observable>; + /** + * Returns fee history for given block count & reward percentiles + **/ + feeHistory: AugmentedRpc<(blockCount: U256 | AnyNumber | Uint8Array, newestBlock: BlockNumber | AnyNumber | Uint8Array, rewardPercentiles: Option> | null | Uint8Array | Vec | (f64)[]) => Observable>; /** * Returns current gas price. **/ @@ -290,6 +323,10 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { * Returns the number of hashes per second that the node is mining with. **/ hashrate: AugmentedRpc<() => Observable>; + /** + * Returns max priority fee per gas + **/ + maxPriorityFeePerGas: AugmentedRpc<() => Observable>; /** * Returns true if client is actively mining new blocks. **/ @@ -449,7 +486,7 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { /** * Get Theme's keys values **/ - themes: AugmentedRpc<(baseId: u32 | AnyNumber | Uint8Array, themeName: Text | string, keys: Option> | null | object | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; + themes: AugmentedRpc<(baseId: u32 | AnyNumber | Uint8Array, themeName: Text | string, keys: Option> | null | Uint8Array | Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; }; rpc: { /** @@ -483,6 +520,7 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { **/ getChildStorageSize: AugmentedRpc<(childStorageKey: StorageKey | string | Uint8Array | any, childDefinition: StorageKey | string | Uint8Array | any, childType: u32 | AnyNumber | Uint8Array, key: StorageKey | string | Uint8Array | any, at?: BlockHash | string | Uint8Array) => Observable>; /** + * @deprecated Use `api.rpc.state.getKeysPaged` to retrieve keys * Retrieves the keys with a certain prefix **/ getKeys: AugmentedRpc<(key: StorageKey | string | Uint8Array | any, at?: BlockHash | string | Uint8Array) => Observable>>; @@ -495,6 +533,7 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { **/ getMetadata: AugmentedRpc<(at?: BlockHash | string | Uint8Array) => Observable>; /** + * @deprecated Use `api.rpc.state.getKeysPaged` to retrieve keys * Returns the keys with prefix, leave empty to get all the keys (deprecated: Use getKeysPaged) **/ getPairs: AugmentedRpc<(prefix: StorageKey | string | Uint8Array | any, at?: BlockHash | string | Uint8Array) => Observable>>; @@ -537,7 +576,7 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { /** * Provides a way to trace the re-execution of a single block **/ - traceBlock: AugmentedRpc<(block: Hash | string | Uint8Array, targets: Option | null | object | string | Uint8Array, storageKeys: Option | null | object | string | Uint8Array, methods: Option | null | object | string | Uint8Array) => Observable>; + traceBlock: AugmentedRpc<(block: Hash | string | Uint8Array, targets: Option | null | Uint8Array | Text | string, storageKeys: Option | null | Uint8Array | Text | string, methods: Option | null | Uint8Array | Text | string) => Observable>; /** * Check current migration state **/ @@ -629,47 +668,47 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { }; unique: { /** - * Get amount of different user tokens + * Get the amount of any user tokens owned by an account **/ accountBalance: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, account: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Get tokens owned by account + * Get tokens owned by an account in a collection **/ accountTokens: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, account: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get admin list + * Get the list of admin accounts of a collection **/ adminlist: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get allowed amount + * Get the amount of currently possible sponsored transactions on a token for the fee to be taken off a sponsor **/ allowance: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, sender: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, spender: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Check if user is allowed to use collection + * Check if a user is allowed to operate within a collection **/ allowed: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, account: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Get allowlist + * Get the list of accounts allowed to operate within a collection **/ allowlist: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get amount of specific account token + * Get the amount of a specific token owned by an account **/ balance: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, account: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Get collection by specified id + * Get a collection by the specified ID **/ collectionById: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get collection properties + * Get collection properties, optionally limited to the provided keys **/ - collectionProperties: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, propertyKeys?: Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; + collectionProperties: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, propertyKeys?: Option> | null | Uint8Array | Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; /** - * Get collection stats + * Get chain stats about collections **/ collectionStats: AugmentedRpc<(at?: Hash | string | Uint8Array) => Observable>; /** - * Get tokens contained in collection + * Get tokens contained within a collection **/ collectionTokens: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** @@ -681,43 +720,51 @@ declare module '@polkadot/rpc-core/types/jsonrpc' { **/ effectiveCollectionLimits: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get last token id + * Get the last token ID created in a collection **/ lastTokenId: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Get number of blocks when sponsored transaction is available + * Get the number of blocks until sponsoring a transaction is available **/ nextSponsored: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, account: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get property permissions + * Get property permissions, optionally limited to the provided keys **/ - propertyPermissions: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, propertyKeys?: Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; + propertyPermissions: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, propertyKeys?: Option> | null | Uint8Array | Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; /** * Get tokens nested directly into the token **/ tokenChildren: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get token data + * Get token data, including properties, optionally limited to the provided keys, and total pieces for an RFT **/ - tokenData: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, propertyKeys?: Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>; + tokenData: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, propertyKeys?: Option> | null | Uint8Array | Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>; /** - * Check if token exists + * Check if the token exists **/ tokenExists: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** - * Get token owner + * Get the token owner **/ tokenOwner: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get token properties + * Returns 10 tokens owners in no particular order + **/ + tokenOwners: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; + /** + * Get token properties, optionally limited to the provided keys **/ - tokenProperties: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, propertyKeys?: Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; + tokenProperties: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, propertyKeys?: Option> | null | Uint8Array | Vec | (Text | string)[], at?: Hash | string | Uint8Array) => Observable>>; /** - * Get token owner, in case of nested token - find parent recursive + * Get the topmost token owner in the hierarchy of a possibly nested token **/ topmostTokenOwner: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; /** - * Get amount of unique collection tokens + * Get the total amount of pieces of an RFT + **/ + totalPieces: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>>; + /** + * Get the amount of distinctive tokens present in a collection **/ totalSupply: AugmentedRpc<(collection: u32 | AnyNumber | Uint8Array, at?: Hash | string | Uint8Array) => Observable>; /** diff --git a/tests/src/interfaces/augment-api-tx.ts b/tests/src/interfaces/augment-api-tx.ts index 3bd2143e52..08a298b8e2 100644 --- a/tests/src/interfaces/augment-api-tx.ts +++ b/tests/src/interfaces/augment-api-tx.ts @@ -1,14 +1,124 @@ // Auto-generated via `yarn polkadot-types-from-chain`, do not edit /* eslint-disable */ -import type { ApiTypes } from '@polkadot/api-base/types'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/api-base/types/submittable'; + +import type { ApiTypes, AugmentedSubmittable, SubmittableExtrinsic, SubmittableExtrinsicFunction } from '@polkadot/api-base/types'; import type { Bytes, Compact, Option, U256, U8aFixed, Vec, bool, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; import type { AnyNumber, IMethod, ITuple } from '@polkadot/types-codec/types'; -import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill } from '@polkadot/types/interfaces/runtime'; -import type { CumulusPrimitivesParachainInherentParachainInherentData, EthereumTransactionTransactionV2, FrameSupportScheduleMaybeHashed, OrmlVestingVestingSchedule, PalletEvmAccountBasicCrossAccountIdRepr, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsPartEquippableList, RmrkTraitsPartPartType, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCreateCollectionData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, XcmV1MultiLocation, XcmV2WeightLimit, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from '@polkadot/types/lookup'; +import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill, Weight } from '@polkadot/types/interfaces/runtime'; +import type { CumulusPrimitivesParachainInherentParachainInherentData, EthereumTransactionTransactionV2, FrameSupportScheduleMaybeHashed, OrmlVestingVestingSchedule, PalletEvmAccountBasicCrossAccountIdRepr, PalletForeignAssetsAssetIds, PalletForeignAssetsModuleAssetMetadata, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsPartEquippableList, RmrkTraitsPartPartType, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCreateCollectionData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, XcmV1MultiLocation, XcmV2WeightLimit, XcmVersionedMultiAsset, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from '@polkadot/types/lookup'; + +export type __AugmentedSubmittable = AugmentedSubmittable<() => unknown>; +export type __SubmittableExtrinsic = SubmittableExtrinsic; +export type __SubmittableExtrinsicFunction = SubmittableExtrinsicFunction; declare module '@polkadot/api-base/types/submittable' { - export interface AugmentedSubmittables { + interface AugmentedSubmittables { + appPromotion: { + /** + * Recalculates interest for the specified number of stakers. + * If all stakers are not recalculated, the next call of the extrinsic + * will continue the recalculation, from those stakers for whom this + * was not perform in last call. + * + * # Permissions + * + * * Pallet admin + * + * # Arguments + * + * * `stakers_number`: the number of stakers for which recalculation will be performed + **/ + payoutStakers: AugmentedSubmittable<(stakersNumber: Option | null | Uint8Array | u8 | AnyNumber) => SubmittableExtrinsic, [Option]>; + /** + * Sets an address as the the admin. + * + * # Permissions + * + * * Sudo + * + * # Arguments + * + * * `admin`: account of the new admin. + **/ + setAdminAddress: AugmentedSubmittable<(admin: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array) => SubmittableExtrinsic, [PalletEvmAccountBasicCrossAccountIdRepr]>; + /** + * Sets the pallet to be the sponsor for the collection. + * + * # Permissions + * + * * Pallet admin + * + * # Arguments + * + * * `collection_id`: ID of the collection that will be sponsored by `pallet_id` + **/ + sponsorCollection: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; + /** + * Sets the pallet to be the sponsor for the contract. + * + * # Permissions + * + * * Pallet admin + * + * # Arguments + * + * * `contract_id`: the contract address that will be sponsored by `pallet_id` + **/ + sponsorContract: AugmentedSubmittable<(contractId: H160 | string | Uint8Array) => SubmittableExtrinsic, [H160]>; + /** + * Stakes the amount of native tokens. + * Sets `amount` to the locked state. + * The maximum number of stakes for a staker is 10. + * + * # Arguments + * + * * `amount`: in native tokens. + **/ + stake: AugmentedSubmittable<(amount: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u128]>; + /** + * Removes the pallet as the sponsor for the collection. + * Returns [`NoPermission`][`Error::NoPermission`] + * if the pallet wasn't the sponsor. + * + * # Permissions + * + * * Pallet admin + * + * # Arguments + * + * * `collection_id`: ID of the collection that is sponsored by `pallet_id` + **/ + stopSponsoringCollection: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; + /** + * Removes the pallet as the sponsor for the contract. + * Returns [`NoPermission`][`Error::NoPermission`] + * if the pallet wasn't the sponsor. + * + * # Permissions + * + * * Pallet admin + * + * # Arguments + * + * * `contract_id`: the contract address that is sponsored by `pallet_id` + **/ + stopSponsoringContract: AugmentedSubmittable<(contractId: H160 | string | Uint8Array) => SubmittableExtrinsic, [H160]>; + /** + * Unstakes all stakes. + * Moves the sum of all stakes to the `reserved` state. + * After the end of `PendingInterval` this sum becomes completely + * free for further use. + **/ + unstake: AugmentedSubmittable<() => SubmittableExtrinsic, []>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; balances: { /** * Exactly as `transfer`, except the origin must be root and the source account may be @@ -104,6 +214,14 @@ declare module '@polkadot/api-base/types/submittable' { **/ [key: string]: SubmittableExtrinsicFunction; }; + configuration: { + setMinGasPriceOverride: AugmentedSubmittable<(coeff: Option | null | Uint8Array | u64 | AnyNumber) => SubmittableExtrinsic, [Option]>; + setWeightToFeeCoefficientOverride: AugmentedSubmittable<(coeff: Option | null | Uint8Array | u32 | AnyNumber) => SubmittableExtrinsic, [Option]>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; cumulusXcm: { /** * Generic tx @@ -125,7 +243,7 @@ declare module '@polkadot/api-base/types/submittable' { * Events: * - `OverweightServiced`: On success. **/ - serviceOverweight: AugmentedSubmittable<(index: u64 | AnyNumber | Uint8Array, weightLimit: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64, u64]>; + serviceOverweight: AugmentedSubmittable<(index: u64 | AnyNumber | Uint8Array, weightLimit: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64, Weight]>; /** * Generic tx **/ @@ -145,16 +263,16 @@ declare module '@polkadot/api-base/types/submittable' { /** * Issue an EVM call operation. This is similar to a message call transaction in Ethereum. **/ - call: AugmentedSubmittable<(source: H160 | string | Uint8Array, target: H160 | string | Uint8Array, input: Bytes | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | object | string | Uint8Array, nonce: Option | null | object | string | Uint8Array, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, H160, Bytes, U256, u64, U256, Option, Option, Vec]>>]>; + call: AugmentedSubmittable<(source: H160 | string | Uint8Array, target: H160 | string | Uint8Array, input: Bytes | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | Uint8Array | U256 | AnyNumber, nonce: Option | null | Uint8Array | U256 | AnyNumber, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, H160, Bytes, U256, u64, U256, Option, Option, Vec]>>]>; /** * Issue an EVM create operation. This is similar to a contract creation transaction in * Ethereum. **/ - create: AugmentedSubmittable<(source: H160 | string | Uint8Array, init: Bytes | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | object | string | Uint8Array, nonce: Option | null | object | string | Uint8Array, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, Bytes, U256, u64, U256, Option, Option, Vec]>>]>; + create: AugmentedSubmittable<(source: H160 | string | Uint8Array, init: Bytes | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | Uint8Array | U256 | AnyNumber, nonce: Option | null | Uint8Array | U256 | AnyNumber, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, Bytes, U256, u64, U256, Option, Option, Vec]>>]>; /** * Issue an EVM create2 operation. **/ - create2: AugmentedSubmittable<(source: H160 | string | Uint8Array, init: Bytes | string | Uint8Array, salt: H256 | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | object | string | Uint8Array, nonce: Option | null | object | string | Uint8Array, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, Bytes, H256, U256, u64, U256, Option, Option, Vec]>>]>; + create2: AugmentedSubmittable<(source: H160 | string | Uint8Array, init: Bytes | string | Uint8Array, salt: H256 | string | Uint8Array, value: U256 | AnyNumber | Uint8Array, gasLimit: u64 | AnyNumber | Uint8Array, maxFeePerGas: U256 | AnyNumber | Uint8Array, maxPriorityFeePerGas: Option | null | Uint8Array | U256 | AnyNumber, nonce: Option | null | Uint8Array | U256 | AnyNumber, accessList: Vec]>> | ([H160 | string | Uint8Array, Vec | (H256 | string | Uint8Array)[]])[]) => SubmittableExtrinsic, [H160, Bytes, H256, U256, u64, U256, Option, Option, Vec]>>]>; /** * Withdraw balance from EVM into currency/balances pallet. **/ @@ -165,14 +283,35 @@ declare module '@polkadot/api-base/types/submittable' { [key: string]: SubmittableExtrinsicFunction; }; evmMigration: { + /** + * Start contract migration, inserts contract stub at target address, + * and marks account as pending, allowing to insert storage + **/ begin: AugmentedSubmittable<(address: H160 | string | Uint8Array) => SubmittableExtrinsic, [H160]>; + /** + * Finish contract migration, allows it to be called. + * It is not possible to alter contract storage via [`Self::set_data`] + * after this call. + **/ finish: AugmentedSubmittable<(address: H160 | string | Uint8Array, code: Bytes | string | Uint8Array) => SubmittableExtrinsic, [H160, Bytes]>; + /** + * Insert items into contract storage, this method can be called + * multiple times + **/ setData: AugmentedSubmittable<(address: H160 | string | Uint8Array, data: Vec> | ([H256 | string | Uint8Array, H256 | string | Uint8Array])[]) => SubmittableExtrinsic, [H160, Vec>]>; /** * Generic tx **/ [key: string]: SubmittableExtrinsicFunction; }; + foreignAssets: { + registerForeignAsset: AugmentedSubmittable<(owner: AccountId32 | string | Uint8Array, location: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, metadata: PalletForeignAssetsModuleAssetMetadata | { name?: any; symbol?: any; decimals?: any; minimalBalance?: any } | string | Uint8Array) => SubmittableExtrinsic, [AccountId32, XcmVersionedMultiLocation, PalletForeignAssetsModuleAssetMetadata]>; + updateForeignAsset: AugmentedSubmittable<(foreignAssetId: u32 | AnyNumber | Uint8Array, location: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, metadata: PalletForeignAssetsModuleAssetMetadata | { name?: any; symbol?: any; decimals?: any; minimalBalance?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, XcmVersionedMultiLocation, PalletForeignAssetsModuleAssetMetadata]>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; inflation: { /** * This method sets the inflation start date. Can be only called once. @@ -236,7 +375,7 @@ declare module '@polkadot/api-base/types/submittable' { * NOTE: A successful return to this does *not* imply that the `msg` was executed successfully * to completion; only that *some* of it was executed. **/ - execute: AugmentedSubmittable<(message: XcmVersionedXcm | { V0: any } | { V1: any } | { V2: any } | string | Uint8Array, maxWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [XcmVersionedXcm, u64]>; + execute: AugmentedSubmittable<(message: XcmVersionedXcm | { V0: any } | { V1: any } | { V2: any } | string | Uint8Array, maxWeight: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [XcmVersionedXcm, Weight]>; /** * Set a safe XCM version (the version that XCM should be encoded with if the most recent * version a destination can accept is unknown). @@ -244,7 +383,7 @@ declare module '@polkadot/api-base/types/submittable' { * - `origin`: Must be Root. * - `maybe_xcm_version`: The default XCM encoding version, or `None` to disable. **/ - forceDefaultXcmVersion: AugmentedSubmittable<(maybeXcmVersion: Option | null | object | string | Uint8Array) => SubmittableExtrinsic, [Option]>; + forceDefaultXcmVersion: AugmentedSubmittable<(maybeXcmVersion: Option | null | Uint8Array | u32 | AnyNumber) => SubmittableExtrinsic, [Option]>; /** * Ask a location to notify us regarding their XCM version and any changes to it. * @@ -356,105 +495,279 @@ declare module '@polkadot/api-base/types/submittable' { }; rmrkCore: { /** - * Accepts an NFT sent from another account to self or owned NFT + * Accept an NFT sent from another account to self or an owned NFT. + * + * The NFT in question must be pending, and, thus, be [sent](`Pallet::send`) first. + * + * # Permissions: + * - Token-owner-to-be * - * Parameters: + * # Arguments: * - `origin`: sender of the transaction - * - `rmrk_collection_id`: collection id of the nft to be accepted - * - `rmrk_nft_id`: nft id of the nft to be accepted - * - `new_owner`: either origin's account ID or origin-owned NFT, whichever the NFT was - * sent to + * - `rmrk_collection_id`: RMRK collection ID of the NFT to be accepted. + * - `rmrk_nft_id`: ID of the NFT to be accepted. + * - `new_owner`: Either the sender's account ID or a sender-owned NFT, + * whichever the accepted NFT was sent to. **/ acceptNft: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array, newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple | { AccountId: any } | { CollectionAndNftTuple: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsNftAccountIdOrCollectionNftTuple]>; /** + * Accept the addition of a newly created pending resource to an existing NFT. + * + * This transaction is needed when a resource is created and assigned to an NFT + * by a non-owner, i.e. the collection issuer, with one of the + * [`add_...` transactions](Pallet::add_basic_resource). + * + * # Permissions: + * - Token owner + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `rmrk_nft_id`: ID of the NFT with a pending resource to be accepted. + * - `resource_id`: ID of the newly created pending resource. * accept the addition of a new resource to an existing NFT **/ acceptResource: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array, resourceId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u32]>; /** - * accept the removal of a resource of an existing NFT + * Accept the removal of a removal-pending resource from an NFT. + * + * This transaction is needed when a non-owner, i.e. the collection issuer, + * requests a [removal](`Pallet::remove_resource`) of a resource from an NFT. + * + * # Permissions: + * - Token owner + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `rmrk_nft_id`: ID of the NFT with a resource to be removed. + * - `resource_id`: ID of the removal-pending resource. **/ acceptResourceRemoval: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array, resourceId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u32]>; /** - * Create basic resource + * Create and set/propose a basic resource for an NFT. + * + * A basic resource is the simplest, lacking a Base and anything that comes with it. + * See RMRK docs for more information and examples. + * + * # Permissions: + * - Collection issuer - if not the token owner, adding the resource will warrant + * the owner's [acceptance](Pallet::accept_resource). + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `nft_id`: ID of the NFT to assign a resource to. + * - `resource`: Data of the resource to be created. **/ addBasicResource: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, nftId: u32 | AnyNumber | Uint8Array, resource: RmrkTraitsResourceBasicResource | { src?: any; metadata?: any; license?: any; thumb?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsResourceBasicResource]>; /** - * Create composable resource + * Create and set/propose a composable resource for an NFT. + * + * A composable resource links to a Base and has a subset of its Parts it is composed of. + * See RMRK docs for more information and examples. + * + * # Permissions: + * - Collection issuer - if not the token owner, adding the resource will warrant + * the owner's [acceptance](Pallet::accept_resource). + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `nft_id`: ID of the NFT to assign a resource to. + * - `resource`: Data of the resource to be created. **/ addComposableResource: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, nftId: u32 | AnyNumber | Uint8Array, resource: RmrkTraitsResourceComposableResource | { parts?: any; base?: any; src?: any; metadata?: any; license?: any; thumb?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsResourceComposableResource]>; /** - * Create slot resource + * Create and set/propose a slot resource for an NFT. + * + * A slot resource links to a Base and a slot ID in it which it can fit into. + * See RMRK docs for more information and examples. + * + * # Permissions: + * - Collection issuer - if not the token owner, adding the resource will warrant + * the owner's [acceptance](Pallet::accept_resource). + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `nft_id`: ID of the NFT to assign a resource to. + * - `resource`: Data of the resource to be created. **/ addSlotResource: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, nftId: u32 | AnyNumber | Uint8Array, resource: RmrkTraitsResourceSlotResource | { base?: any; src?: any; metadata?: any; slot?: any; license?: any; thumb?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsResourceSlotResource]>; /** - * burn nft + * Burn an NFT, destroying it and its nested tokens up to the specified limit. + * If the burning budget is exceeded, the transaction is reverted. + * + * This is the way to burn a nested token as well. + * + * For more information, see [`burn_recursively`](pallet_nonfungible::pallet::Pallet::burn_recursively). + * + * # Permissions: + * * Token owner + * + * # Arguments: + * - `origin`: sender of the transaction + * - `collection_id`: RMRK ID of the collection in which the NFT to burn belongs to. + * - `nft_id`: ID of the NFT to be destroyed. + * - `max_burns`: Maximum number of tokens to burn, assuming nesting. The transaction + * is reverted if there are more tokens to burn in the nesting tree than this number. + * This is primarily a mechanism of transaction weight control. **/ burnNft: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, nftId: u32 | AnyNumber | Uint8Array, maxBurns: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u32]>; /** - * Change the issuer of a collection + * Change the issuer of a collection. Analogous to Unique's collection's [`owner`](up_data_structs::Collection). + * + * # Permissions: + * * Collection issuer * - * Parameters: + * # Arguments: * - `origin`: sender of the transaction - * - `collection_id`: collection id of the nft to change issuer of - * - `new_issuer`: Collection's new issuer + * - `collection_id`: RMRK collection ID to change the issuer of. + * - `new_issuer`: Collection's new issuer. **/ changeCollectionIssuer: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newIssuer: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, MultiAddress]>; /** - * Create a collection + * Create a new collection of NFTs. + * + * # Permissions: + * * Anyone - will be assigned as the issuer of the collection. + * + * # Arguments: + * - `origin`: sender of the transaction + * - `metadata`: Metadata describing the collection, e.g. IPFS hash. Cannot be changed. + * - `max`: Optional maximum number of tokens. + * - `symbol`: UTF-8 string with token prefix, by which to represent the token in wallets and UIs. + * Analogous to Unique's [`token_prefix`](up_data_structs::Collection). Cannot be changed. **/ - createCollection: AugmentedSubmittable<(metadata: Bytes | string | Uint8Array, max: Option | null | object | string | Uint8Array, symbol: Bytes | string | Uint8Array) => SubmittableExtrinsic, [Bytes, Option, Bytes]>; + createCollection: AugmentedSubmittable<(metadata: Bytes | string | Uint8Array, max: Option | null | Uint8Array | u32 | AnyNumber, symbol: Bytes | string | Uint8Array) => SubmittableExtrinsic, [Bytes, Option, Bytes]>; /** - * destroy collection + * Destroy a collection. + * + * Only empty collections can be destroyed. If it has any tokens, they must be burned first. + * + * # Permissions: + * * Collection issuer + * + * # Arguments: + * - `origin`: sender of the transaction + * - `collection_id`: RMRK ID of the collection to destroy. **/ destroyCollection: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; /** - * lock collection + * "Lock" the collection and prevent new token creation. Cannot be undone. + * + * # Permissions: + * * Collection issuer + * + * # Arguments: + * - `origin`: sender of the transaction + * - `collection_id`: RMRK ID of the collection to lock. **/ lockCollection: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; /** - * Mints an NFT in the specified collection - * Sets metadata and the royalty attribute + * Mint an NFT in a specified collection. + * + * # Permissions: + * * Collection issuer * - * Parameters: - * - `collection_id`: The class of the asset to be minted. - * - `nft_id`: The nft value of the asset to be minted. - * - `recipient`: Receiver of the royalty - * - `royalty`: Permillage reward from each trade for the Recipient - * - `metadata`: Arbitrary data about an nft, e.g. IPFS hash - * - `transferable`: Ability to transfer this NFT + * # Arguments: + * - `origin`: sender of the transaction + * - `owner`: Owner account of the NFT. If set to None, defaults to the sender (collection issuer). + * - `collection_id`: RMRK collection ID for the NFT to be minted within. Cannot be changed. + * - `recipient`: Receiver account of the royalty. Has no effect if the `royalty_amount` is not set. Cannot be changed. + * - `royalty_amount`: Optional permillage reward from each trade for the `recipient`. Cannot be changed. + * - `metadata`: Arbitrary data about an NFT, e.g. IPFS hash. Cannot be changed. + * - `transferable`: Can this NFT be transferred? Cannot be changed. + * - `resources`: Resource data to be added to the NFT immediately after minting. **/ - mintNft: AugmentedSubmittable<(owner: Option | null | object | string | Uint8Array, collectionId: u32 | AnyNumber | Uint8Array, recipient: Option | null | object | string | Uint8Array, royaltyAmount: Option | null | object | string | Uint8Array, metadata: Bytes | string | Uint8Array, transferable: bool | boolean | Uint8Array, resources: Option> | null | object | string | Uint8Array) => SubmittableExtrinsic, [Option, u32, Option, Option, Bytes, bool, Option>]>; + mintNft: AugmentedSubmittable<(owner: Option | null | Uint8Array | AccountId32 | string, collectionId: u32 | AnyNumber | Uint8Array, recipient: Option | null | Uint8Array | AccountId32 | string, royaltyAmount: Option | null | Uint8Array | Permill | AnyNumber, metadata: Bytes | string | Uint8Array, transferable: bool | boolean | Uint8Array, resources: Option> | null | Uint8Array | Vec | (RmrkTraitsResourceResourceTypes | { Basic: any } | { Composable: any } | { Slot: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [Option, u32, Option, Option, Bytes, bool, Option>]>; /** - * Rejects an NFT sent from another account to self or owned NFT + * Reject an NFT sent from another account to self or owned NFT. + * The NFT in question will not be sent back and burnt instead. + * + * The NFT in question must be pending, and, thus, be [sent](`Pallet::send`) first. + * + * # Permissions: + * - Token-owner-to-be-not * - * Parameters: + * # Arguments: * - `origin`: sender of the transaction - * - `rmrk_collection_id`: collection id of the nft to be accepted - * - `rmrk_nft_id`: nft id of the nft to be accepted + * - `rmrk_collection_id`: RMRK ID of the NFT to be rejected. + * - `rmrk_nft_id`: ID of the NFT to be rejected. **/ rejectNft: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32]>; /** - * remove resource + * Remove and erase a resource from an NFT. + * + * If the sender does not own the NFT, then it will be pending confirmation, + * and will have to be [accepted](Pallet::accept_resource_removal) by the token owner. + * + * # Permissions + * - Collection issuer + * + * # Arguments + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK ID of a collection to which the NFT making use of the resource belongs to. + * - `nft_id`: ID of the NFT with a resource to be removed. + * - `resource_id`: ID of the resource to be removed. **/ removeResource: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, nftId: u32 | AnyNumber | Uint8Array, resourceId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u32]>; /** - * Transfers a NFT from an Account or NFT A to another Account or NFT B + * Transfer an NFT from an account/NFT A to another account/NFT B. + * The token must be transferable. Nesting cannot occur deeper than the [`NESTING_BUDGET`]. + * + * If the target owner is an NFT owned by another account, then the NFT will enter + * the pending state and will have to be accepted by the other account. + * + * # Permissions: + * - Token owner * - * Parameters: + * # Arguments: * - `origin`: sender of the transaction - * - `rmrk_collection_id`: collection id of the nft to be transferred - * - `rmrk_nft_id`: nft id of the nft to be transferred - * - `new_owner`: new owner of the nft which can be either an account or a NFT + * - `rmrk_collection_id`: RMRK ID of the collection of the NFT to be transferred. + * - `rmrk_nft_id`: ID of the NFT to be transferred. + * - `new_owner`: New owner of the nft which can be either an account or a NFT. **/ send: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array, newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple | { AccountId: any } | { CollectionAndNftTuple: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsNftAccountIdOrCollectionNftTuple]>; /** - * set a different order of resource priority + * Set a different order of resource priorities for an NFT. Priorities can be used, + * for example, for order of rendering. + * + * Note that the priorities are not updated automatically, and are an empty vector + * by default. There is no pre-set definition for the order to be particular, + * it can be interpreted arbitrarily use-case by use-case. + * + * # Permissions: + * - Token owner + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID of the NFT. + * - `rmrk_nft_id`: ID of the NFT to rearrange resource priorities for. + * - `priorities`: Ordered vector of resource IDs. **/ setPriority: AugmentedSubmittable<(rmrkCollectionId: u32 | AnyNumber | Uint8Array, rmrkNftId: u32 | AnyNumber | Uint8Array, priorities: Vec | (u32 | AnyNumber | Uint8Array)[]) => SubmittableExtrinsic, [u32, u32, Vec]>; /** - * set a custom value on an NFT + * Add or edit a custom user property, a key-value pair, describing the metadata + * of a token or a collection, on either one of these. + * + * Note that in this proxy implementation many details regarding RMRK are stored + * as scoped properties prefixed with "rmrk:", normally inaccessible + * to external transactions and RPCs. + * + * # Permissions: + * - Collection issuer - in case of collection property + * - Token owner - in case of NFT property + * + * # Arguments: + * - `origin`: sender of the transaction + * - `rmrk_collection_id`: RMRK collection ID. + * - `maybe_nft_id`: Optional ID of the NFT. If left empty, then the property is set for the collection. + * - `key`: Key of the custom property to be referenced by. + * - `value`: Value of the custom property to be stored. **/ - setProperty: AugmentedSubmittable<(rmrkCollectionId: Compact | AnyNumber | Uint8Array, maybeNftId: Option | null | object | string | Uint8Array, key: Bytes | string | Uint8Array, value: Bytes | string | Uint8Array) => SubmittableExtrinsic, [Compact, Option, Bytes, Bytes]>; + setProperty: AugmentedSubmittable<(rmrkCollectionId: Compact | AnyNumber | Uint8Array, maybeNftId: Option | null | Uint8Array | u32 | AnyNumber, key: Bytes | string | Uint8Array, value: Bytes | string | Uint8Array) => SubmittableExtrinsic, [Compact, Option, Bytes, Bytes]>; /** * Generic tx **/ @@ -462,32 +775,53 @@ declare module '@polkadot/api-base/types/submittable' { }; rmrkEquip: { /** - * Creates a new Base. - * Modeled after [base interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/base.md) + * Create a new Base. * - * Parameters: - * - origin: Caller, will be assigned as the issuer of the Base - * - base_type: media type, e.g. "svg" - * - symbol: arbitrary client-chosen symbol - * - parts: array of Fixed and Slot parts composing the base, confined in length by - * RmrkPartsLimit + * Modeled after the [Base interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/base.md) + * + * # Permissions + * - Anyone - will be assigned as the issuer of the Base. + * + * # Arguments: + * - `origin`: Caller, will be assigned as the issuer of the Base + * - `base_type`: Arbitrary media type, e.g. "svg". + * - `symbol`: Arbitrary client-chosen symbol. + * - `parts`: Array of Fixed and Slot Parts composing the Base, + * confined in length by [`RmrkPartsLimit`](up_data_structs::RmrkPartsLimit). **/ createBase: AugmentedSubmittable<(baseType: Bytes | string | Uint8Array, symbol: Bytes | string | Uint8Array, parts: Vec | (RmrkTraitsPartPartType | { FixedPart: any } | { SlotPart: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [Bytes, Bytes, Vec]>; + /** + * Update the array of Collections allowed to be equipped to a Base's specified Slot Part. + * + * Modeled after [equippable interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/equippable.md). + * + * # Permissions: + * - Base issuer + * + * # Arguments: + * - `origin`: sender of the transaction + * - `base_id`: Base containing the Slot Part to be updated. + * - `slot_id`: Slot Part whose Equippable List is being updated . + * - `equippables`: List of equippables that will override the current Equippables list. + **/ equippable: AugmentedSubmittable<(baseId: u32 | AnyNumber | Uint8Array, slotId: u32 | AnyNumber | Uint8Array, equippables: RmrkTraitsPartEquippableList | { All: any } | { Empty: any } | { Custom: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, u32, RmrkTraitsPartEquippableList]>; /** - * Adds a Theme to a Base. - * Modeled after [themeadd interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/themeadd.md) - * Themes are stored in the Themes storage + * Add a Theme to a Base. * A Theme named "default" is required prior to adding other Themes. * - * Parameters: - * - origin: The caller of the function, must be issuer of the base - * - base_id: The Base containing the Theme to be updated - * - theme: The Theme to add to the Base. A Theme has a name and properties, which are an + * Modeled after [Themeadd interaction](https://github.com/rmrk-team/rmrk-spec/blob/master/standards/rmrk2.0.0/interactions/themeadd.md). + * + * # Permissions: + * - Base issuer + * + * # Arguments: + * - `origin`: sender of the transaction + * - `base_id`: Base ID containing the Theme to be updated. + * - `theme`: Theme to add to the Base. A Theme has a name and properties, which are an * array of [key, value, inherit]. - * - key: arbitrary BoundedString, defined by client - * - value: arbitrary BoundedString, defined by client - * - inherit: optional bool + * - `key`: Arbitrary BoundedString, defined by client. + * - `value`: Arbitrary BoundedString, defined by client. + * - `inherit`: Optional bool. **/ themeAdd: AugmentedSubmittable<(baseId: u32 | AnyNumber | Uint8Array, theme: RmrkTraitsTheme | { name?: any; properties?: any; inherit?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, RmrkTraitsTheme]>; /** @@ -500,10 +834,11 @@ declare module '@polkadot/api-base/types/submittable' { * Cancel a named scheduled task. **/ cancelNamed: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array) => SubmittableExtrinsic, [U8aFixed]>; + changeNamedPriority: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, priority: u8 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u8]>; /** * Schedule a named task. **/ - scheduleNamed: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, when: u32 | AnyNumber | Uint8Array, maybePeriodic: Option> | null | object | string | Uint8Array, priority: u8 | AnyNumber | Uint8Array, call: FrameSupportScheduleMaybeHashed | { Value: any } | { Hash: any } | string | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u32, Option>, u8, FrameSupportScheduleMaybeHashed]>; + scheduleNamed: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, when: u32 | AnyNumber | Uint8Array, maybePeriodic: Option> | null | Uint8Array | ITuple<[u32, u32]> | [u32 | AnyNumber | Uint8Array, u32 | AnyNumber | Uint8Array], priority: Option | null | Uint8Array | u8 | AnyNumber, call: FrameSupportScheduleMaybeHashed | { Value: any } | { Hash: any } | string | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u32, Option>, Option, FrameSupportScheduleMaybeHashed]>; /** * Schedule a named task after a delay. * @@ -511,7 +846,7 @@ declare module '@polkadot/api-base/types/submittable' { * Same as [`schedule_named`](Self::schedule_named). * # **/ - scheduleNamedAfter: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, after: u32 | AnyNumber | Uint8Array, maybePeriodic: Option> | null | object | string | Uint8Array, priority: u8 | AnyNumber | Uint8Array, call: FrameSupportScheduleMaybeHashed | { Value: any } | { Hash: any } | string | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u32, Option>, u8, FrameSupportScheduleMaybeHashed]>; + scheduleNamedAfter: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, after: u32 | AnyNumber | Uint8Array, maybePeriodic: Option> | null | Uint8Array | ITuple<[u32, u32]> | [u32 | AnyNumber | Uint8Array, u32 | AnyNumber | Uint8Array], priority: Option | null | Uint8Array | u8 | AnyNumber, call: FrameSupportScheduleMaybeHashed | { Value: any } | { Hash: any } | string | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u32, Option>, Option, FrameSupportScheduleMaybeHashed]>; /** * Generic tx **/ @@ -576,7 +911,7 @@ declare module '@polkadot/api-base/types/submittable' { * - The weight of this call is defined by the caller. * # **/ - sudoUncheckedWeight: AugmentedSubmittable<(call: Call | IMethod | string | Uint8Array, weight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Call, u64]>; + sudoUncheckedWeight: AugmentedSubmittable<(call: Call | IMethod | string | Uint8Array, weight: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Call, Weight]>; /** * Generic tx **/ @@ -650,6 +985,18 @@ declare module '@polkadot/api-base/types/submittable' { **/ [key: string]: SubmittableExtrinsicFunction; }; + testUtils: { + enable: AugmentedSubmittable<() => SubmittableExtrinsic, []>; + incTestValue: AugmentedSubmittable<() => SubmittableExtrinsic, []>; + justTakeFee: AugmentedSubmittable<() => SubmittableExtrinsic, []>; + selfCancelingInc: AugmentedSubmittable<(id: U8aFixed | string | Uint8Array, maxTestValue: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [U8aFixed, u32]>; + setTestValue: AugmentedSubmittable<(value: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; + setTestValueAndRollback: AugmentedSubmittable<(value: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; timestamp: { /** * Set the current time. @@ -675,6 +1022,87 @@ declare module '@polkadot/api-base/types/submittable' { **/ [key: string]: SubmittableExtrinsicFunction; }; + tokens: { + /** + * Exactly as `transfer`, except the origin must be root and the source + * account may be specified. + * + * The dispatch origin for this call must be _Root_. + * + * - `source`: The sender of the transfer. + * - `dest`: The recipient of the transfer. + * - `currency_id`: currency type. + * - `amount`: free balance amount to tranfer. + **/ + forceTransfer: AugmentedSubmittable<(source: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, dest: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, amount: Compact | AnyNumber | Uint8Array) => SubmittableExtrinsic, [MultiAddress, MultiAddress, PalletForeignAssetsAssetIds, Compact]>; + /** + * Set the balances of a given account. + * + * This will alter `FreeBalance` and `ReservedBalance` in storage. it + * will also decrease the total issuance of the system + * (`TotalIssuance`). If the new free or reserved balance is below the + * existential deposit, it will reap the `AccountInfo`. + * + * The dispatch origin for this call is `root`. + **/ + setBalance: AugmentedSubmittable<(who: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, newFree: Compact | AnyNumber | Uint8Array, newReserved: Compact | AnyNumber | Uint8Array) => SubmittableExtrinsic, [MultiAddress, PalletForeignAssetsAssetIds, Compact, Compact]>; + /** + * Transfer some liquid free balance to another account. + * + * `transfer` will set the `FreeBalance` of the sender and receiver. + * It will decrease the total issuance of the system by the + * `TransferFee`. If the sender's account is below the existential + * deposit as a result of the transfer, the account will be reaped. + * + * The dispatch origin for this call must be `Signed` by the + * transactor. + * + * - `dest`: The recipient of the transfer. + * - `currency_id`: currency type. + * - `amount`: free balance amount to tranfer. + **/ + transfer: AugmentedSubmittable<(dest: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, amount: Compact | AnyNumber | Uint8Array) => SubmittableExtrinsic, [MultiAddress, PalletForeignAssetsAssetIds, Compact]>; + /** + * Transfer all remaining balance to the given account. + * + * NOTE: This function only attempts to transfer _transferable_ + * balances. This means that any locked, reserved, or existential + * deposits (when `keep_alive` is `true`), will not be transferred by + * this function. To ensure that this function results in a killed + * account, you might need to prepare the account by removing any + * reference counters, storage deposits, etc... + * + * The dispatch origin for this call must be `Signed` by the + * transactor. + * + * - `dest`: The recipient of the transfer. + * - `currency_id`: currency type. + * - `keep_alive`: A boolean to determine if the `transfer_all` + * operation should send all of the funds the account has, causing + * the sender account to be killed (false), or transfer everything + * except at least the existential deposit, which will guarantee to + * keep the sender account alive (true). + **/ + transferAll: AugmentedSubmittable<(dest: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, keepAlive: bool | boolean | Uint8Array) => SubmittableExtrinsic, [MultiAddress, PalletForeignAssetsAssetIds, bool]>; + /** + * Same as the [`transfer`] call, but with a check that the transfer + * will not kill the origin account. + * + * 99% of the time you want [`transfer`] instead. + * + * The dispatch origin for this call must be `Signed` by the + * transactor. + * + * - `dest`: The recipient of the transfer. + * - `currency_id`: currency type. + * - `amount`: free balance amount to tranfer. + **/ + transferKeepAlive: AugmentedSubmittable<(dest: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array, currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, amount: Compact | AnyNumber | Uint8Array) => SubmittableExtrinsic, [MultiAddress, PalletForeignAssetsAssetIds, Compact]>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; treasury: { /** * Approve a proposal. At a later time, the proposal will be allocated to the beneficiary @@ -731,6 +1159,17 @@ declare module '@polkadot/api-base/types/submittable' { * exist altogether, thus there is no way it would have been approved in the first place. **/ removeApproval: AugmentedSubmittable<(proposalId: Compact | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Compact]>; + /** + * Propose and approve a spend of treasury funds. + * + * - `origin`: Must be `SpendOrigin` with the `Success` value being at least `amount`. + * - `amount`: The amount to be transferred from the treasury to the `beneficiary`. + * - `beneficiary`: The destination account for the transfer. + * + * NOTE: For record-keeping purposes, the proposer is deemed to be equivalent to the + * beneficiary. + **/ + spend: AugmentedSubmittable<(amount: Compact | AnyNumber | Uint8Array, beneficiary: MultiAddress | { Id: any } | { Index: any } | { Raw: any } | { Address32: any } | { Address20: any } | string | Uint8Array) => SubmittableExtrinsic, [Compact, MultiAddress]>; /** * Generic tx **/ @@ -738,19 +1177,22 @@ declare module '@polkadot/api-base/types/submittable' { }; unique: { /** - * Adds an admin of the Collection. - * NFT Collection can be controlled by multiple admin addresses (some which can also be servers, for example). Admins can issue and burn NFTs, as well as add and remove other admins, but cannot change NFT or Collection ownership. + * Add an admin to a collection. + * + * NFT Collection can be controlled by multiple admin addresses + * (some which can also be servers, for example). Admins can issue + * and burn NFTs, as well as add and remove other admins, + * but cannot change NFT or Collection ownership. * * # Permissions * - * * Collection Owner. - * * Collection Admin. + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id: ID of the Collection to add admin for. - * - * * new_admin_id: Address of new admin to add. + * * `collection_id`: ID of the Collection to add an admin for. + * * `new_admin`: Address of new admin to add. **/ addCollectionAdmin: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newAdminId: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr]>; /** @@ -758,67 +1200,76 @@ declare module '@polkadot/api-base/types/submittable' { * * # Permissions * - * * Collection Owner - * * Collection Admin + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id. - * - * * address. + * * `collection_id`: ID of the modified collection. + * * `address`: ID of the address to be added to the allowlist. **/ addToAllowList: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, address: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr]>; /** - * Set, change, or remove approved address to transfer the ownership of the NFT. + * Allow a non-permissioned address to transfer or burn an item. * * # Permissions * - * * Collection Owner - * * Collection Admin - * * Current NFT owner + * * Collection owner + * * Collection admin + * * Current item owner * * # Arguments * - * * approved: Address that is approved to transfer this NFT or zero (if needed to remove approval). - * - * * collection_id. - * - * * item_id: ID of the item. + * * `spender`: Account to be approved to make specific transactions on non-owned tokens. + * * `collection_id`: ID of the collection the item belongs to. + * * `item_id`: ID of the item transactions on which are now approved. + * * `amount`: Number of pieces of the item approved for a transaction (maximum of 1 for NFTs). + * Set to 0 to revoke the approval. **/ approve: AugmentedSubmittable<(spender: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, collectionId: u32 | AnyNumber | Uint8Array, itemId: u32 | AnyNumber | Uint8Array, amount: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [PalletEvmAccountBasicCrossAccountIdRepr, u32, u32, u128]>; /** - * Destroys a concrete instance of NFT on behalf of the owner - * See also: [`approve`] + * Destroy a token on behalf of the owner as a non-owner account. * - * # Permissions + * See also: [`approve`][`Pallet::approve`]. * - * * Collection Owner. - * * Collection Admin. - * * Current NFT Owner. + * After this method executes, one approval is removed from the total so that + * the approved address will not be able to transfer this item again from this owner. * - * # Arguments + * # Permissions * - * * collection_id: ID of the collection. + * * Collection owner + * * Collection admin + * * Current token owner + * * Address approved by current item owner * - * * item_id: ID of NFT to burn. + * # Arguments * - * * from: owner of item + * * `from`: The owner of the burning item. + * * `collection_id`: ID of the collection to which the item belongs. + * * `item_id`: ID of item to burn. + * * `value`: Number of pieces to burn. + * * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + * * Fungible Mode: The desired number of pieces to burn. + * * Re-Fungible Mode: The desired number of pieces to burn. **/ burnFrom: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, from: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, itemId: u32 | AnyNumber | Uint8Array, value: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr, u32, u128]>; /** - * Destroys a concrete instance of NFT. + * Destroy an item. * * # Permissions * - * * Collection Owner. - * * Collection Admin. - * * Current NFT Owner. + * * Collection owner + * * Collection admin + * * Current item owner * * # Arguments * - * * collection_id: ID of the collection. - * - * * item_id: ID of NFT to burn. + * * `collection_id`: ID of the collection to which the item belongs. + * * `item_id`: ID of item to burn. + * * `value`: Number of pieces of the item to destroy. + * * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + * * Fungible Mode: The desired number of pieces to burn. + * * Re-Fungible Mode: The desired number of pieces to burn. **/ burnItem: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, itemId: u32 | AnyNumber | Uint8Array, value: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u128]>; /** @@ -826,131 +1277,200 @@ declare module '@polkadot/api-base/types/submittable' { * * # Permissions * - * * Collection Owner. + * * Collection owner * * # Arguments * - * * collection_id. - * - * * new_owner. + * * `collection_id`: ID of the modified collection. + * * `new_owner`: ID of the account that will become the owner. **/ changeCollectionOwner: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newOwner: AccountId32 | string | Uint8Array) => SubmittableExtrinsic, [u32, AccountId32]>; /** + * Confirm own sponsorship of a collection, becoming the sponsor. + * + * An invitation must be pending, see [`set_collection_sponsor`][`Pallet::set_collection_sponsor`]. + * Sponsor can pay the fees of a transaction instead of the sender, + * but only within specified limits. + * * # Permissions * - * * Sponsor. + * * Sponsor-to-be * * # Arguments * - * * collection_id. + * * `collection_id`: ID of the collection with the pending sponsor. **/ confirmSponsorship: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; /** - * This method creates a Collection of NFTs. Each Token may have multiple properties encoded as an array of bytes of certain length. The initial owner of the collection is set to the address that signed the transaction and can be changed later. + * Create a collection of tokens. * - * # Permissions + * Each Token may have multiple properties encoded as an array of bytes + * of certain length. The initial owner of the collection is set + * to the address that signed the transaction and can be changed later. * - * * Anyone. - * - * # Arguments + * Prefer the more advanced [`create_collection_ex`][`Pallet::create_collection_ex`] instead. * - * * collection_name: UTF-16 string with collection name (limit 64 characters), will be stored as zero-terminated. + * # Permissions * - * * collection_description: UTF-16 string with collection description (limit 256 characters), will be stored as zero-terminated. + * * Anyone - becomes the owner of the new collection. * - * * token_prefix: UTF-8 string with token prefix. + * # Arguments * - * * mode: [CollectionMode] collection type and type dependent data. + * * `collection_name`: Wide-character string with collection name + * (limit [`MAX_COLLECTION_NAME_LENGTH`]). + * * `collection_description`: Wide-character string with collection description + * (limit [`MAX_COLLECTION_DESCRIPTION_LENGTH`]). + * * `token_prefix`: Byte string containing the token prefix to mark a collection + * to which a token belongs (limit [`MAX_TOKEN_PREFIX_LENGTH`]). + * * `mode`: Type of items stored in the collection and type dependent data. **/ createCollection: AugmentedSubmittable<(collectionName: Vec | (u16 | AnyNumber | Uint8Array)[], collectionDescription: Vec | (u16 | AnyNumber | Uint8Array)[], tokenPrefix: Bytes | string | Uint8Array, mode: UpDataStructsCollectionMode | { NFT: any } | { Fungible: any } | { ReFungible: any } | string | Uint8Array) => SubmittableExtrinsic, [Vec, Vec, Bytes, UpDataStructsCollectionMode]>; /** - * This method creates a collection + * Create a collection with explicit parameters. + * + * Prefer it to the deprecated [`create_collection`][`Pallet::create_collection`] method. + * + * # Permissions + * + * * Anyone - becomes the owner of the new collection. * - * Prefer it to deprecated [`created_collection`] method + * # Arguments + * + * * `data`: Explicit data of a collection used for its creation. **/ createCollectionEx: AugmentedSubmittable<(data: UpDataStructsCreateCollectionData | { mode?: any; access?: any; name?: any; description?: any; tokenPrefix?: any; pendingSponsor?: any; limits?: any; permissions?: any; tokenPropertyPermissions?: any; properties?: any } | string | Uint8Array) => SubmittableExtrinsic, [UpDataStructsCreateCollectionData]>; /** - * This method creates a concrete instance of NFT Collection created with CreateCollection method. + * Mint an item within a collection. + * + * A collection must exist first, see [`create_collection_ex`][`Pallet::create_collection_ex`]. * * # Permissions * - * * Collection Owner. - * * Collection Admin. + * * Collection owner + * * Collection admin * * Anyone if * * Allow List is enabled, and * * Address is added to allow list, and - * * MintPermission is enabled (see SetMintPermission method) + * * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) * * # Arguments * - * * collection_id: ID of the collection. + * * `collection_id`: ID of the collection to which an item would belong. + * * `owner`: Address of the initial owner of the item. + * * `data`: Token data describing the item to store on chain. + **/ + createItem: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, owner: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, data: UpDataStructsCreateItemData | { NFT: any } | { Fungible: any } | { ReFungible: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr, UpDataStructsCreateItemData]>; + /** + * Create multiple items within a collection. + * + * A collection must exist first, see [`create_collection_ex`][`Pallet::create_collection_ex`]. + * + * # Permissions + * + * * Collection owner + * * Collection admin + * * Anyone if + * * Allow List is enabled, and + * * Address is added to the allow list, and + * * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) * - * * owner: Address, initial owner of the NFT. + * # Arguments * - * * data: Token data to store on chain. + * * `collection_id`: ID of the collection to which the tokens would belong. + * * `owner`: Address of the initial owner of the tokens. + * * `items_data`: Vector of data describing each item to be created. **/ - createItem: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, owner: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, data: UpDataStructsCreateItemData | { NFT: any } | { Fungible: any } | { ReFungible: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr, UpDataStructsCreateItemData]>; + createMultipleItems: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, owner: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, itemsData: Vec | (UpDataStructsCreateItemData | { NFT: any } | { Fungible: any } | { ReFungible: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr, Vec]>; /** - * This method creates multiple items in a collection created with CreateCollection method. + * Create multiple items within a collection with explicitly specified initial parameters. * * # Permissions * - * * Collection Owner. - * * Collection Admin. + * * Collection owner + * * Collection admin * * Anyone if * * Allow List is enabled, and * * Address is added to allow list, and - * * MintPermission is enabled (see SetMintPermission method) + * * MintPermission is enabled (see [`set_collection_permissions`][`Pallet::set_collection_permissions`]) * * # Arguments * - * * collection_id: ID of the collection. + * * `collection_id`: ID of the collection to which the tokens would belong. + * * `data`: Explicit item creation data. + **/ + createMultipleItemsEx: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, data: UpDataStructsCreateItemExData | { NFT: any } | { Fungible: any } | { RefungibleMultipleItems: any } | { RefungibleMultipleOwners: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, UpDataStructsCreateItemExData]>; + /** + * Delete specified collection properties. + * + * # Permissions * - * * itemsData: Array items properties. Each property is an array of bytes itself, see [create_item]. + * * Collection Owner + * * Collection Admin * - * * owner: Address, initial owner of the NFT. + * # Arguments + * + * * `collection_id`: ID of the modified collection. + * * `property_keys`: Vector of keys of the properties to be deleted. + * Keys support Latin letters, `-`, `_`, and `.` as symbols. **/ - createMultipleItems: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, owner: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, itemsData: Vec | (UpDataStructsCreateItemData | { NFT: any } | { Fungible: any } | { ReFungible: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr, Vec]>; - createMultipleItemsEx: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, data: UpDataStructsCreateItemExData | { NFT: any } | { Fungible: any } | { RefungibleMultipleItems: any } | { RefungibleMultipleOwners: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, UpDataStructsCreateItemExData]>; deleteCollectionProperties: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, propertyKeys: Vec | (Bytes | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, Vec]>; + /** + * Delete specified token properties. Currently properties only work with NFTs. + * + * # Permissions + * + * * Depends on collection's token property permissions and specified property mutability: + * * Collection owner + * * Collection admin + * * Token owner + * + * # Arguments + * + * * `collection_id`: ID of the collection to which the token belongs. + * * `token_id`: ID of the modified token. + * * `property_keys`: Vector of keys of the properties to be deleted. + * Keys support Latin letters, `-`, `_`, and `.` as symbols. + **/ deleteTokenProperties: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, propertyKeys: Vec | (Bytes | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, u32, Vec]>; /** - * Destroys collection if no tokens within this collection + * Destroy a collection if no tokens exist within. * * # Permissions * - * * Collection Owner. + * * Collection owner * * # Arguments * - * * collection_id: collection to destroy. + * * `collection_id`: Collection to destroy. **/ destroyCollection: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; /** - * Remove admin address of the Collection. An admin address can remove itself. List of admins may become empty, in which case only Collection Owner will be able to add an Admin. + * Remove admin of a collection. + * + * An admin address can remove itself. List of admins may become empty, + * in which case only Collection Owner will be able to add an Admin. * * # Permissions * - * * Collection Owner. - * * Collection Admin. + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id: ID of the Collection to remove admin for. - * - * * account_id: Address of admin to remove. + * * `collection_id`: ID of the collection to remove the admin for. + * * `account_id`: Address of the admin to remove. **/ removeCollectionAdmin: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, accountId: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr]>; /** - * Switch back to pay-per-own-transaction model. + * Remove a collection's a sponsor, making everyone pay for their own transactions. * * # Permissions * - * * Collection owner. + * * Collection owner * * # Arguments * - * * collection_id. + * * `collection_id`: ID of the collection with the sponsor to remove. **/ removeCollectionSponsor: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32]>; /** @@ -958,46 +1478,140 @@ declare module '@polkadot/api-base/types/submittable' { * * # Permissions * - * * Collection Owner - * * Collection Admin + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id. - * - * * address. + * * `collection_id`: ID of the modified collection. + * * `address`: ID of the address to be removed from the allowlist. **/ removeFromAllowList: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, address: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, PalletEvmAccountBasicCrossAccountIdRepr]>; - repartition: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, token: u32 | AnyNumber | Uint8Array, amount: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u128]>; + /** + * Re-partition a refungible token, while owning all of its parts/pieces. + * + * # Permissions + * + * * Token owner (must own every part) + * + * # Arguments + * + * * `collection_id`: ID of the collection the RFT belongs to. + * * `token_id`: ID of the RFT. + * * `amount`: New number of parts/pieces into which the token shall be partitioned. + **/ + repartition: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, amount: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u32, u32, u128]>; + /** + * Set specific limits of a collection. Empty, or None fields mean chain default. + * + * # Permissions + * + * * Collection owner + * * Collection admin + * + * # Arguments + * + * * `collection_id`: ID of the modified collection. + * * `new_limit`: New limits of the collection. Fields that are not set (None) + * will not overwrite the old ones. + **/ setCollectionLimits: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newLimit: UpDataStructsCollectionLimits | { accountTokenOwnershipLimit?: any; sponsoredDataSize?: any; sponsoredDataRateLimit?: any; tokenLimit?: any; sponsorTransferTimeout?: any; sponsorApproveTimeout?: any; ownerCanTransfer?: any; ownerCanDestroy?: any; transfersEnabled?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, UpDataStructsCollectionLimits]>; - setCollectionPermissions: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newLimit: UpDataStructsCollectionPermissions | { access?: any; mintMode?: any; nesting?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, UpDataStructsCollectionPermissions]>; - setCollectionProperties: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, properties: Vec | (UpDataStructsProperty | { key?: any; value?: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, Vec]>; /** + * Set specific permissions of a collection. Empty, or None fields mean chain default. + * * # Permissions * - * * Collection Owner + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id. + * * `collection_id`: ID of the modified collection. + * * `new_permission`: New permissions of the collection. Fields that are not set (None) + * will not overwrite the old ones. + **/ + setCollectionPermissions: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newPermission: UpDataStructsCollectionPermissions | { access?: any; mintMode?: any; nesting?: any } | string | Uint8Array) => SubmittableExtrinsic, [u32, UpDataStructsCollectionPermissions]>; + /** + * Add or change collection properties. + * + * # Permissions * - * * new_sponsor. + * * Collection owner + * * Collection admin + * + * # Arguments + * + * * `collection_id`: ID of the modified collection. + * * `properties`: Vector of key-value pairs stored as the collection's metadata. + * Keys support Latin letters, `-`, `_`, and `.` as symbols. + **/ + setCollectionProperties: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, properties: Vec | (UpDataStructsProperty | { key?: any; value?: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, Vec]>; + /** + * Set (invite) a new collection sponsor. + * + * If successful, confirmation from the sponsor-to-be will be pending. + * + * # Permissions + * + * * Collection owner + * * Collection admin + * + * # Arguments + * + * * `collection_id`: ID of the modified collection. + * * `new_sponsor`: ID of the account of the sponsor-to-be. **/ setCollectionSponsor: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, newSponsor: AccountId32 | string | Uint8Array) => SubmittableExtrinsic, [u32, AccountId32]>; + /** + * Add or change token properties according to collection's permissions. + * Currently properties only work with NFTs. + * + * # Permissions + * + * * Depends on collection's token property permissions and specified property mutability: + * * Collection owner + * * Collection admin + * * Token owner + * + * See [`set_token_property_permissions`][`Pallet::set_token_property_permissions`]. + * + * # Arguments + * + * * `collection_id: ID of the collection to which the token belongs. + * * `token_id`: ID of the modified token. + * * `properties`: Vector of key-value pairs stored as the token's metadata. + * Keys support Latin letters, `-`, `_`, and `.` as symbols. + **/ setTokenProperties: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, tokenId: u32 | AnyNumber | Uint8Array, properties: Vec | (UpDataStructsProperty | { key?: any; value?: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, u32, Vec]>; - setTokenPropertyPermissions: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, propertyPermissions: Vec | (UpDataStructsPropertyKeyPermission | { key?: any; permission?: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, Vec]>; /** - * Set transfers_enabled value for particular collection + * Add or change token property permissions of a collection. + * + * Without a permission for a particular key, a property with that key + * cannot be created in a token. * * # Permissions * - * * Collection Owner. + * * Collection owner + * * Collection admin * * # Arguments * - * * collection_id: ID of the collection. + * * `collection_id`: ID of the modified collection. + * * `property_permissions`: Vector of permissions for property keys. + * Keys support Latin letters, `-`, `_`, and `.` as symbols. + **/ + setTokenPropertyPermissions: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, propertyPermissions: Vec | (UpDataStructsPropertyKeyPermission | { key?: any; permission?: any } | string | Uint8Array)[]) => SubmittableExtrinsic, [u32, Vec]>; + /** + * Completely allow or disallow transfers for a particular collection. * - * * value: New flag value. + * # Permissions + * + * * Collection owner + * + * # Arguments + * + * * `collection_id`: ID of the collection. + * * `value`: New value of the flag, are transfers allowed? **/ setTransfersEnabledFlag: AugmentedSubmittable<(collectionId: u32 | AnyNumber | Uint8Array, value: bool | boolean | Uint8Array) => SubmittableExtrinsic, [u32, bool]>; /** @@ -1005,47 +1619,50 @@ declare module '@polkadot/api-base/types/submittable' { * * # Permissions * - * * Collection Owner - * * Collection Admin - * * Current NFT owner + * * Collection owner + * * Collection admin + * * Current token owner * * # Arguments * - * * recipient: Address of token recipient. - * - * * collection_id. - * - * * item_id: ID of the item + * * `recipient`: Address of token recipient. + * * `collection_id`: ID of the collection the item belongs to. + * * `item_id`: ID of the item. * * Non-Fungible Mode: Required. * * Fungible Mode: Ignored. * * Re-Fungible Mode: Required. * - * * value: Amount to transfer. - * * Non-Fungible Mode: Ignored - * * Fungible Mode: Must specify transferred amount - * * Re-Fungible Mode: Must specify transferred portion (between 0 and 1) + * * `value`: Amount to transfer. + * * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + * * Fungible Mode: The desired number of pieces to transfer. + * * Re-Fungible Mode: The desired number of pieces to transfer. **/ transfer: AugmentedSubmittable<(recipient: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, collectionId: u32 | AnyNumber | Uint8Array, itemId: u32 | AnyNumber | Uint8Array, value: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [PalletEvmAccountBasicCrossAccountIdRepr, u32, u32, u128]>; /** - * Change ownership of a NFT on behalf of the owner. See Approve method for additional information. After this method executes, the approval is removed so that the approved address will not be able to transfer this NFT again from this owner. - * - * # Permissions - * * Collection Owner - * * Collection Admin - * * Current NFT owner - * * Address approved by current NFT owner + * Change ownership of an item on behalf of the owner as a non-owner account. * - * # Arguments + * See the [`approve`][`Pallet::approve`] method for additional information. * - * * from: Address that owns token. + * After this method executes, one approval is removed from the total so that + * the approved address will not be able to transfer this item again from this owner. * - * * recipient: Address of token recipient. + * # Permissions * - * * collection_id. + * * Collection owner + * * Collection admin + * * Current item owner + * * Address approved by current item owner * - * * item_id: ID of the item. + * # Arguments * - * * value: Amount to transfer. + * * `from`: Address that currently owns the token. + * * `recipient`: Address of the new token-owner-to-be. + * * `collection_id`: ID of the collection the item. + * * `item_id`: ID of the item to be transferred. + * * `value`: Amount to transfer. + * * Non-Fungible Mode: An NFT is indivisible, there is always 1 corresponding to an ID. + * * Fungible Mode: The desired number of pieces to transfer. + * * Re-Fungible Mode: The desired number of pieces to transfer. **/ transferFrom: AugmentedSubmittable<(from: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, recipient: PalletEvmAccountBasicCrossAccountIdRepr | { Substrate: any } | { Ethereum: any } | string | Uint8Array, collectionId: u32 | AnyNumber | Uint8Array, itemId: u32 | AnyNumber | Uint8Array, value: u128 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr, u32, u32, u128]>; /** @@ -1087,7 +1704,7 @@ declare module '@polkadot/api-base/types/submittable' { * Events: * - `OverweightServiced`: On success. **/ - serviceOverweight: AugmentedSubmittable<(index: u64 | AnyNumber | Uint8Array, weightLimit: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64, u64]>; + serviceOverweight: AugmentedSubmittable<(index: u64 | AnyNumber | Uint8Array, weightLimit: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64, Weight]>; /** * Suspends all XCM executions for the XCMP queue, regardless of the sender's origin. * @@ -1124,7 +1741,7 @@ declare module '@polkadot/api-base/types/submittable' { * - `origin`: Must pass `Root`. * - `new`: Desired value for `QueueConfigData.threshold_weight` **/ - updateThresholdWeight: AugmentedSubmittable<(updated: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64]>; + updateThresholdWeight: AugmentedSubmittable<(updated: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Weight]>; /** * Overwrites the speed to which the available weight approaches the maximum weight. * A lower number results in a faster progression. A value of 1 makes the entire weight available initially. @@ -1132,7 +1749,7 @@ declare module '@polkadot/api-base/types/submittable' { * - `origin`: Must pass `Root`. * - `new`: Desired value for `QueueConfigData.weight_restrict_decay`. **/ - updateWeightRestrictDecay: AugmentedSubmittable<(updated: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64]>; + updateWeightRestrictDecay: AugmentedSubmittable<(updated: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Weight]>; /** * Overwrite the maximum amount of weight any individual message may consume. * Messages above this weight go into the overweight queue and may only be serviced explicitly. @@ -1140,7 +1757,127 @@ declare module '@polkadot/api-base/types/submittable' { * - `origin`: Must pass `Root`. * - `new`: Desired value for `QueueConfigData.xcmp_max_individual_weight`. **/ - updateXcmpMaxIndividualWeight: AugmentedSubmittable<(updated: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [u64]>; + updateXcmpMaxIndividualWeight: AugmentedSubmittable<(updated: Weight | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Weight]>; + /** + * Generic tx + **/ + [key: string]: SubmittableExtrinsicFunction; + }; + xTokens: { + /** + * Transfer native currencies. + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transfer: AugmentedSubmittable<(currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, amount: u128 | AnyNumber | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [PalletForeignAssetsAssetIds, u128, XcmVersionedMultiLocation, u64]>; + /** + * Transfer `MultiAsset`. + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transferMultiasset: AugmentedSubmittable<(asset: XcmVersionedMultiAsset | { V0: any } | { V1: any } | string | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [XcmVersionedMultiAsset, XcmVersionedMultiLocation, u64]>; + /** + * Transfer several `MultiAsset` specifying the item to be used as fee + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * `fee_item` is index of the MultiAssets that we want to use for + * payment + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transferMultiassets: AugmentedSubmittable<(assets: XcmVersionedMultiAssets | { V0: any } | { V1: any } | string | Uint8Array, feeItem: u32 | AnyNumber | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [XcmVersionedMultiAssets, u32, XcmVersionedMultiLocation, u64]>; + /** + * Transfer `MultiAsset` specifying the fee and amount as separate. + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * `fee` is the multiasset to be spent to pay for execution in + * destination chain. Both fee and amount will be subtracted form the + * callers balance For now we only accept fee and asset having the same + * `MultiLocation` id. + * + * If `fee` is not high enough to cover for the execution costs in the + * destination chain, then the assets will be trapped in the + * destination chain + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transferMultiassetWithFee: AugmentedSubmittable<(asset: XcmVersionedMultiAsset | { V0: any } | { V1: any } | string | Uint8Array, fee: XcmVersionedMultiAsset | { V0: any } | { V1: any } | string | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [XcmVersionedMultiAsset, XcmVersionedMultiAsset, XcmVersionedMultiLocation, u64]>; + /** + * Transfer several currencies specifying the item to be used as fee + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * `fee_item` is index of the currencies tuple that we want to use for + * payment + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transferMulticurrencies: AugmentedSubmittable<(currencies: Vec> | ([PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, u128 | AnyNumber | Uint8Array])[], feeItem: u32 | AnyNumber | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [Vec>, u32, XcmVersionedMultiLocation, u64]>; + /** + * Transfer native currencies specifying the fee and amount as + * separate. + * + * `dest_weight` is the weight for XCM execution on the dest chain, and + * it would be charged from the transferred assets. If set below + * requirements, the execution may fail and assets wouldn't be + * received. + * + * `fee` is the amount to be spent to pay for execution in destination + * chain. Both fee and amount will be subtracted form the callers + * balance. + * + * If `fee` is not high enough to cover for the execution costs in the + * destination chain, then the assets will be trapped in the + * destination chain + * + * It's a no-op if any error on local XCM execution or message sending. + * Note sending assets out per se doesn't guarantee they would be + * received. Receiving depends on if the XCM message could be delivered + * by the network, and if the receiving chain would handle + * messages correctly. + **/ + transferWithFee: AugmentedSubmittable<(currencyId: PalletForeignAssetsAssetIds | { ForeignAssetId: any } | { NativeAssetId: any } | string | Uint8Array, amount: u128 | AnyNumber | Uint8Array, fee: u128 | AnyNumber | Uint8Array, dest: XcmVersionedMultiLocation | { V0: any } | { V1: any } | string | Uint8Array, destWeight: u64 | AnyNumber | Uint8Array) => SubmittableExtrinsic, [PalletForeignAssetsAssetIds, u128, u128, XcmVersionedMultiLocation, u64]>; /** * Generic tx **/ diff --git a/tests/src/interfaces/augment-api.ts b/tests/src/interfaces/augment-api.ts index 921d2f824d..7cafd228bd 100644 --- a/tests/src/interfaces/augment-api.ts +++ b/tests/src/interfaces/augment-api.ts @@ -7,3 +7,4 @@ import './augment-api-events'; import './augment-api-query'; import './augment-api-tx'; import './augment-api-rpc'; +import './augment-api-runtime'; diff --git a/tests/src/interfaces/augment-types.ts b/tests/src/interfaces/augment-types.ts index 3a4a2f6b0f..86e965ba4d 100644 --- a/tests/src/interfaces/augment-types.ts +++ b/tests/src/interfaces/augment-types.ts @@ -1,17 +1,23 @@ // Auto-generated via `yarn polkadot-types-from-defs`, do not edit /* eslint-disable */ -import type { CumulusPalletDmpQueueCall, CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueueError, CumulusPalletDmpQueueEvent, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemCall, CumulusPalletParachainSystemError, CumulusPalletParachainSystemEvent, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmCall, CumulusPalletXcmError, CumulusPalletXcmEvent, CumulusPalletXcmOrigin, CumulusPalletXcmpQueueCall, CumulusPalletXcmpQueueError, CumulusPalletXcmpQueueEvent, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueInboundState, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueOutboundState, CumulusPalletXcmpQueueQueueConfigData, CumulusPrimitivesParachainInherentParachainInherentData, EthbloomBloom, EthereumBlock, EthereumHeader, EthereumLog, EthereumReceiptEip658ReceiptData, EthereumReceiptReceiptV3, EthereumTransactionAccessListItem, EthereumTransactionEip1559Transaction, EthereumTransactionEip2930Transaction, EthereumTransactionLegacyTransaction, EthereumTransactionTransactionAction, EthereumTransactionTransactionSignature, EthereumTransactionTransactionV2, EthereumTypesHashH64, EvmCoreErrorExitError, EvmCoreErrorExitFatal, EvmCoreErrorExitReason, EvmCoreErrorExitRevert, EvmCoreErrorExitSucceed, FpRpcTransactionStatus, FrameSupportDispatchRawOrigin, FrameSupportPalletId, FrameSupportScheduleLookupError, FrameSupportScheduleMaybeHashed, FrameSupportTokensMiscBalanceStatus, FrameSupportWeightsDispatchClass, FrameSupportWeightsDispatchInfo, FrameSupportWeightsPays, FrameSupportWeightsPerDispatchClassU32, FrameSupportWeightsPerDispatchClassU64, FrameSupportWeightsPerDispatchClassWeightsPerClass, FrameSupportWeightsRuntimeDbWeight, FrameSystemAccountInfo, FrameSystemCall, FrameSystemError, FrameSystemEvent, FrameSystemEventRecord, FrameSystemExtensionsCheckGenesis, FrameSystemExtensionsCheckNonce, FrameSystemExtensionsCheckSpecVersion, FrameSystemExtensionsCheckWeight, FrameSystemLastRuntimeUpgradeInfo, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, FrameSystemLimitsWeightsPerClass, FrameSystemPhase, OpalRuntimeCheckMaintenance, OpalRuntimeOriginCaller, OpalRuntimeRuntime, OrmlVestingModuleCall, OrmlVestingModuleError, OrmlVestingModuleEvent, OrmlVestingVestingSchedule, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesCall, PalletBalancesError, PalletBalancesEvent, PalletBalancesReasons, PalletBalancesReleases, PalletBalancesReserveData, PalletCommonError, PalletCommonEvent, PalletEthereumCall, PalletEthereumError, PalletEthereumEvent, PalletEthereumFakeTransactionFinalizer, PalletEthereumRawOrigin, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmCall, PalletEvmCoderSubstrateError, PalletEvmContractHelpersError, PalletEvmContractHelpersSponsoringModeT, PalletEvmError, PalletEvmEvent, PalletEvmMigrationCall, PalletEvmMigrationError, PalletFungibleError, PalletInflationCall, PalletMaintenanceCall, PalletMaintenanceError, PalletMaintenanceEvent, PalletNonfungibleError, PalletNonfungibleItemData, PalletRefungibleError, PalletRefungibleItemData, PalletRmrkCoreCall, PalletRmrkCoreError, PalletRmrkCoreEvent, PalletRmrkEquipCall, PalletRmrkEquipError, PalletRmrkEquipEvent, PalletStructureCall, PalletStructureError, PalletStructureEvent, PalletSudoCall, PalletSudoError, PalletSudoEvent, PalletTemplateTransactionPaymentCall, PalletTemplateTransactionPaymentChargeTransactionPayment, PalletTimestampCall, PalletTransactionPaymentReleases, PalletTreasuryCall, PalletTreasuryError, PalletTreasuryEvent, PalletTreasuryProposal, PalletUniqueCall, PalletUniqueError, PalletUniqueRawEvent, PalletUniqueSchedulerCall, PalletUniqueSchedulerError, PalletUniqueSchedulerEvent, PalletUniqueSchedulerScheduledV3, PalletXcmCall, PalletXcmError, PalletXcmEvent, PalletXcmOrigin, PhantomTypeUpDataStructs, PolkadotCorePrimitivesInboundDownwardMessage, PolkadotCorePrimitivesInboundHrmpMessage, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotParachainPrimitivesXcmpMessageFormat, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2AbridgedHrmpChannel, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, RmrkTraitsBaseBaseInfo, RmrkTraitsCollectionCollectionInfo, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsNftNftChild, RmrkTraitsNftNftInfo, RmrkTraitsNftRoyaltyInfo, RmrkTraitsPartEquippableList, RmrkTraitsPartFixedPart, RmrkTraitsPartPartType, RmrkTraitsPartSlotPart, RmrkTraitsPropertyPropertyInfo, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceInfo, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, RmrkTraitsThemeThemeProperty, SpCoreEcdsaSignature, SpCoreEd25519Signature, SpCoreSr25519Signature, SpCoreVoid, SpRuntimeArithmeticError, SpRuntimeDigest, SpRuntimeDigestDigestItem, SpRuntimeDispatchError, SpRuntimeModuleError, SpRuntimeMultiSignature, SpRuntimeTokenError, SpRuntimeTransactionalError, SpTrieStorageProof, SpVersionRuntimeVersion, UpDataStructsAccessMode, UpDataStructsCollection, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCollectionStats, UpDataStructsCreateCollectionData, UpDataStructsCreateFungibleData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsCreateNftData, UpDataStructsCreateNftExData, UpDataStructsCreateReFungibleData, UpDataStructsCreateRefungibleExData, UpDataStructsNestingPermissions, UpDataStructsOwnerRestrictedSet, UpDataStructsProperties, UpDataStructsPropertiesMapBoundedVec, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsRpcCollection, UpDataStructsSponsoringRateLimit, UpDataStructsSponsorshipState, UpDataStructsTokenChild, UpDataStructsTokenData, XcmDoubleEncoded, XcmV0Junction, XcmV0JunctionBodyId, XcmV0JunctionBodyPart, XcmV0JunctionNetworkId, XcmV0MultiAsset, XcmV0MultiLocation, XcmV0Order, XcmV0OriginKind, XcmV0Response, XcmV0Xcm, XcmV1Junction, XcmV1MultiAsset, XcmV1MultiLocation, XcmV1MultiassetAssetId, XcmV1MultiassetAssetInstance, XcmV1MultiassetFungibility, XcmV1MultiassetMultiAssetFilter, XcmV1MultiassetMultiAssets, XcmV1MultiassetWildFungibility, XcmV1MultiassetWildMultiAsset, XcmV1MultilocationJunctions, XcmV1Order, XcmV1Response, XcmV1Xcm, XcmV2Instruction, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2WeightLimit, XcmV2Xcm, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from './default'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/types/types/registry'; + +import type { CumulusPalletDmpQueueCall, CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueueError, CumulusPalletDmpQueueEvent, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemCall, CumulusPalletParachainSystemError, CumulusPalletParachainSystemEvent, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmCall, CumulusPalletXcmError, CumulusPalletXcmEvent, CumulusPalletXcmOrigin, CumulusPalletXcmpQueueCall, CumulusPalletXcmpQueueError, CumulusPalletXcmpQueueEvent, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueInboundState, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueOutboundState, CumulusPalletXcmpQueueQueueConfigData, CumulusPrimitivesParachainInherentParachainInherentData, EthbloomBloom, EthereumBlock, EthereumHeader, EthereumLog, EthereumReceiptEip658ReceiptData, EthereumReceiptReceiptV3, EthereumTransactionAccessListItem, EthereumTransactionEip1559Transaction, EthereumTransactionEip2930Transaction, EthereumTransactionLegacyTransaction, EthereumTransactionTransactionAction, EthereumTransactionTransactionSignature, EthereumTransactionTransactionV2, EthereumTypesHashH64, EvmCoreErrorExitError, EvmCoreErrorExitFatal, EvmCoreErrorExitReason, EvmCoreErrorExitRevert, EvmCoreErrorExitSucceed, FpRpcTransactionStatus, FrameSupportDispatchDispatchClass, FrameSupportDispatchDispatchInfo, FrameSupportDispatchPays, FrameSupportDispatchPerDispatchClassU32, FrameSupportDispatchPerDispatchClassWeight, FrameSupportDispatchPerDispatchClassWeightsPerClass, FrameSupportDispatchRawOrigin, FrameSupportPalletId, FrameSupportScheduleLookupError, FrameSupportScheduleMaybeHashed, FrameSupportTokensMiscBalanceStatus, FrameSystemAccountInfo, FrameSystemCall, FrameSystemError, FrameSystemEvent, FrameSystemEventRecord, FrameSystemExtensionsCheckGenesis, FrameSystemExtensionsCheckNonce, FrameSystemExtensionsCheckSpecVersion, FrameSystemExtensionsCheckTxVersion, FrameSystemExtensionsCheckWeight, FrameSystemLastRuntimeUpgradeInfo, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, FrameSystemLimitsWeightsPerClass, FrameSystemPhase, OpalRuntimeOriginCaller, OpalRuntimeRuntime, OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance, OrmlTokensAccountData, OrmlTokensBalanceLock, OrmlTokensModuleCall, OrmlTokensModuleError, OrmlTokensModuleEvent, OrmlTokensReserveData, OrmlVestingModuleCall, OrmlVestingModuleError, OrmlVestingModuleEvent, OrmlVestingVestingSchedule, OrmlXtokensModuleCall, OrmlXtokensModuleError, OrmlXtokensModuleEvent, PalletAppPromotionCall, PalletAppPromotionError, PalletAppPromotionEvent, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesCall, PalletBalancesError, PalletBalancesEvent, PalletBalancesReasons, PalletBalancesReleases, PalletBalancesReserveData, PalletCommonError, PalletCommonEvent, PalletConfigurationCall, PalletEthereumCall, PalletEthereumError, PalletEthereumEvent, PalletEthereumFakeTransactionFinalizer, PalletEthereumRawOrigin, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmCall, PalletEvmCoderSubstrateError, PalletEvmContractHelpersError, PalletEvmContractHelpersEvent, PalletEvmContractHelpersSponsoringModeT, PalletEvmError, PalletEvmEvent, PalletEvmMigrationCall, PalletEvmMigrationError, PalletForeignAssetsAssetIds, PalletForeignAssetsModuleAssetMetadata, PalletForeignAssetsModuleCall, PalletForeignAssetsModuleError, PalletForeignAssetsModuleEvent, PalletForeignAssetsNativeCurrency, PalletFungibleError, PalletInflationCall, PalletMaintenanceCall, PalletMaintenanceError, PalletMaintenanceEvent, PalletNonfungibleError, PalletNonfungibleItemData, PalletRefungibleError, PalletRefungibleItemData, PalletRmrkCoreCall, PalletRmrkCoreError, PalletRmrkCoreEvent, PalletRmrkEquipCall, PalletRmrkEquipError, PalletRmrkEquipEvent, PalletStructureCall, PalletStructureError, PalletStructureEvent, PalletSudoCall, PalletSudoError, PalletSudoEvent, PalletTemplateTransactionPaymentCall, PalletTemplateTransactionPaymentChargeTransactionPayment, PalletTestUtilsCall, PalletTestUtilsError, PalletTestUtilsEvent, PalletTimestampCall, PalletTransactionPaymentEvent, PalletTransactionPaymentReleases, PalletTreasuryCall, PalletTreasuryError, PalletTreasuryEvent, PalletTreasuryProposal, PalletUniqueCall, PalletUniqueError, PalletUniqueRawEvent, PalletUniqueSchedulerCall, PalletUniqueSchedulerError, PalletUniqueSchedulerEvent, PalletUniqueSchedulerScheduledV3, PalletXcmCall, PalletXcmError, PalletXcmEvent, PalletXcmOrigin, PhantomTypeUpDataStructs, PolkadotCorePrimitivesInboundDownwardMessage, PolkadotCorePrimitivesInboundHrmpMessage, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotParachainPrimitivesXcmpMessageFormat, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2AbridgedHrmpChannel, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, RmrkTraitsBaseBaseInfo, RmrkTraitsCollectionCollectionInfo, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsNftNftChild, RmrkTraitsNftNftInfo, RmrkTraitsNftRoyaltyInfo, RmrkTraitsPartEquippableList, RmrkTraitsPartFixedPart, RmrkTraitsPartPartType, RmrkTraitsPartSlotPart, RmrkTraitsPropertyPropertyInfo, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceInfo, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, RmrkTraitsThemeThemeProperty, SpCoreEcdsaSignature, SpCoreEd25519Signature, SpCoreSr25519Signature, SpCoreVoid, SpRuntimeArithmeticError, SpRuntimeDigest, SpRuntimeDigestDigestItem, SpRuntimeDispatchError, SpRuntimeModuleError, SpRuntimeMultiSignature, SpRuntimeTokenError, SpRuntimeTransactionalError, SpTrieStorageProof, SpVersionRuntimeVersion, SpWeightsRuntimeDbWeight, UpDataStructsAccessMode, UpDataStructsCollection, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCollectionStats, UpDataStructsCreateCollectionData, UpDataStructsCreateFungibleData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsCreateNftData, UpDataStructsCreateNftExData, UpDataStructsCreateReFungibleData, UpDataStructsCreateRefungibleExMultipleOwners, UpDataStructsCreateRefungibleExSingleOwner, UpDataStructsNestingPermissions, UpDataStructsOwnerRestrictedSet, UpDataStructsProperties, UpDataStructsPropertiesMapBoundedVec, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsRpcCollection, UpDataStructsRpcCollectionFlags, UpDataStructsSponsoringRateLimit, UpDataStructsSponsorshipStateAccountId32, UpDataStructsSponsorshipStateBasicCrossAccountIdRepr, UpDataStructsTokenChild, UpDataStructsTokenData, XcmDoubleEncoded, XcmV0Junction, XcmV0JunctionBodyId, XcmV0JunctionBodyPart, XcmV0JunctionNetworkId, XcmV0MultiAsset, XcmV0MultiLocation, XcmV0Order, XcmV0OriginKind, XcmV0Response, XcmV0Xcm, XcmV1Junction, XcmV1MultiAsset, XcmV1MultiLocation, XcmV1MultiassetAssetId, XcmV1MultiassetAssetInstance, XcmV1MultiassetFungibility, XcmV1MultiassetMultiAssetFilter, XcmV1MultiassetMultiAssets, XcmV1MultiassetWildFungibility, XcmV1MultiassetWildMultiAsset, XcmV1MultilocationJunctions, XcmV1Order, XcmV1Response, XcmV1Xcm, XcmV2Instruction, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2WeightLimit, XcmV2Xcm, XcmVersionedMultiAsset, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from './default'; import type { Data, StorageKey } from '@polkadot/types'; -import type { BitVec, Bool, Bytes, I128, I16, I256, I32, I64, I8, Json, Null, OptionBool, Raw, Text, Type, U128, U16, U256, U32, U64, U8, USize, bool, i128, i16, i256, i32, i64, i8, u128, u16, u256, u32, u64, u8, usize } from '@polkadot/types-codec'; +import type { BitVec, Bool, Bytes, F32, F64, I128, I16, I256, I32, I64, I8, Json, Null, OptionBool, Raw, Text, Type, U128, U16, U256, U32, U64, U8, USize, bool, f32, f64, i128, i16, i256, i32, i64, i8, u128, u16, u256, u32, u64, u8, usize } from '@polkadot/types-codec'; import type { AssetApproval, AssetApprovalKey, AssetBalance, AssetDestroyWitness, AssetDetails, AssetMetadata, TAssetBalance, TAssetDepositBalance } from '@polkadot/types/interfaces/assets'; import type { BlockAttestations, IncludedBlocks, MoreAttestations } from '@polkadot/types/interfaces/attestations'; import type { RawAuraPreDigest } from '@polkadot/types/interfaces/aura'; import type { ExtrinsicOrHash, ExtrinsicStatus } from '@polkadot/types/interfaces/author'; import type { UncleEntryItem } from '@polkadot/types/interfaces/authorship'; -import type { AllowedSlots, BabeAuthorityWeight, BabeBlockWeight, BabeEpochConfiguration, BabeEquivocationProof, BabeWeight, EpochAuthorship, MaybeRandomness, MaybeVrf, NextConfigDescriptor, NextConfigDescriptorV1, Randomness, RawBabePreDigest, RawBabePreDigestCompat, RawBabePreDigestPrimary, RawBabePreDigestPrimaryTo159, RawBabePreDigestSecondaryPlain, RawBabePreDigestSecondaryTo159, RawBabePreDigestSecondaryVRF, RawBabePreDigestTo159, SlotNumber, VrfData, VrfOutput, VrfProof } from '@polkadot/types/interfaces/babe'; +import type { AllowedSlots, BabeAuthorityWeight, BabeBlockWeight, BabeEpochConfiguration, BabeEquivocationProof, BabeGenesisConfiguration, BabeGenesisConfigurationV1, BabeWeight, Epoch, EpochAuthorship, MaybeRandomness, MaybeVrf, NextConfigDescriptor, NextConfigDescriptorV1, OpaqueKeyOwnershipProof, Randomness, RawBabePreDigest, RawBabePreDigestCompat, RawBabePreDigestPrimary, RawBabePreDigestPrimaryTo159, RawBabePreDigestSecondaryPlain, RawBabePreDigestSecondaryTo159, RawBabePreDigestSecondaryVRF, RawBabePreDigestTo159, SlotNumber, VrfData, VrfOutput, VrfProof } from '@polkadot/types/interfaces/babe'; import type { AccountData, BalanceLock, BalanceLockTo212, BalanceStatus, Reasons, ReserveData, ReserveIdentifier, VestingSchedule, WithdrawReasons } from '@polkadot/types/interfaces/balances'; -import type { BeefyCommitment, BeefyId, BeefyNextAuthoritySet, BeefyPayload, BeefySignedCommitment, MmrRootHash, ValidatorSetId } from '@polkadot/types/interfaces/beefy'; +import type { BeefyAuthoritySet, BeefyCommitment, BeefyId, BeefyNextAuthoritySet, BeefyPayload, BeefyPayloadId, BeefySignedCommitment, MmrRootHash, ValidatorSet, ValidatorSetId } from '@polkadot/types/interfaces/beefy'; +import type { BenchmarkBatch, BenchmarkConfig, BenchmarkList, BenchmarkMetadata, BenchmarkParameter, BenchmarkResult } from '@polkadot/types/interfaces/benchmark'; +import type { CheckInherentsResult, InherentData, InherentIdentifier } from '@polkadot/types/interfaces/blockbuilder'; import type { BridgeMessageId, BridgedBlockHash, BridgedBlockNumber, BridgedHeader, CallOrigin, ChainId, DeliveredMessages, DispatchFeePayment, InboundLaneData, InboundRelayer, InitializationData, LaneId, MessageData, MessageKey, MessageNonce, MessagesDeliveryProofOf, MessagesProofOf, OperatingMode, OutboundLaneData, OutboundMessageFee, OutboundPayload, Parameter, RelayerId, UnrewardedRelayer, UnrewardedRelayersState } from '@polkadot/types/interfaces/bridges'; import type { BlockHash } from '@polkadot/types/interfaces/chain'; import type { PrefixedStorageKey } from '@polkadot/types/interfaces/childstate'; @@ -19,15 +25,15 @@ import type { StatementKind } from '@polkadot/types/interfaces/claims'; import type { CollectiveOrigin, MemberCount, ProposalIndex, Votes, VotesTo230 } from '@polkadot/types/interfaces/collective'; import type { AuthorityId, RawVRFOutput } from '@polkadot/types/interfaces/consensus'; import type { AliveContractInfo, CodeHash, CodeSource, CodeUploadRequest, CodeUploadResult, CodeUploadResultValue, ContractCallFlags, ContractCallRequest, ContractExecResult, ContractExecResultOk, ContractExecResultResult, ContractExecResultSuccessTo255, ContractExecResultSuccessTo260, ContractExecResultTo255, ContractExecResultTo260, ContractExecResultTo267, ContractInfo, ContractInstantiateResult, ContractInstantiateResultTo267, ContractInstantiateResultTo299, ContractReturnFlags, ContractStorageKey, DeletedContract, ExecReturnValue, Gas, HostFnWeights, HostFnWeightsTo264, InstantiateRequest, InstantiateRequestV1, InstantiateRequestV2, InstantiateReturnValue, InstantiateReturnValueOk, InstantiateReturnValueTo267, InstructionWeights, Limits, LimitsTo264, PrefabWasmModule, RentProjection, Schedule, ScheduleTo212, ScheduleTo258, ScheduleTo264, SeedOf, StorageDeposit, TombstoneContractInfo, TrieId } from '@polkadot/types/interfaces/contracts'; -import type { ContractConstructorSpecLatest, ContractConstructorSpecV0, ContractConstructorSpecV1, ContractConstructorSpecV2, ContractConstructorSpecV3, ContractContractSpecV0, ContractContractSpecV1, ContractContractSpecV2, ContractContractSpecV3, ContractCryptoHasher, ContractDiscriminant, ContractDisplayName, ContractEventParamSpecLatest, ContractEventParamSpecV0, ContractEventParamSpecV2, ContractEventSpecLatest, ContractEventSpecV0, ContractEventSpecV1, ContractEventSpecV2, ContractLayoutArray, ContractLayoutCell, ContractLayoutEnum, ContractLayoutHash, ContractLayoutHashingStrategy, ContractLayoutKey, ContractLayoutStruct, ContractLayoutStructField, ContractMessageParamSpecLatest, ContractMessageParamSpecV0, ContractMessageParamSpecV2, ContractMessageSpecLatest, ContractMessageSpecV0, ContractMessageSpecV1, ContractMessageSpecV2, ContractMetadata, ContractMetadataLatest, ContractMetadataV0, ContractMetadataV1, ContractMetadataV2, ContractMetadataV3, ContractProject, ContractProjectContract, ContractProjectInfo, ContractProjectSource, ContractProjectV0, ContractSelector, ContractStorageLayout, ContractTypeSpec } from '@polkadot/types/interfaces/contractsAbi'; +import type { ContractConstructorSpecLatest, ContractConstructorSpecV0, ContractConstructorSpecV1, ContractConstructorSpecV2, ContractConstructorSpecV3, ContractContractSpecV0, ContractContractSpecV1, ContractContractSpecV2, ContractContractSpecV3, ContractContractSpecV4, ContractCryptoHasher, ContractDiscriminant, ContractDisplayName, ContractEventParamSpecLatest, ContractEventParamSpecV0, ContractEventParamSpecV2, ContractEventSpecLatest, ContractEventSpecV0, ContractEventSpecV1, ContractEventSpecV2, ContractLayoutArray, ContractLayoutCell, ContractLayoutEnum, ContractLayoutHash, ContractLayoutHashingStrategy, ContractLayoutKey, ContractLayoutStruct, ContractLayoutStructField, ContractMessageParamSpecLatest, ContractMessageParamSpecV0, ContractMessageParamSpecV2, ContractMessageSpecLatest, ContractMessageSpecV0, ContractMessageSpecV1, ContractMessageSpecV2, ContractMetadata, ContractMetadataLatest, ContractMetadataV0, ContractMetadataV1, ContractMetadataV2, ContractMetadataV3, ContractMetadataV4, ContractProject, ContractProjectContract, ContractProjectInfo, ContractProjectSource, ContractProjectV0, ContractSelector, ContractStorageLayout, ContractTypeSpec } from '@polkadot/types/interfaces/contractsAbi'; import type { FundIndex, FundInfo, LastContribution, TrieIndex } from '@polkadot/types/interfaces/crowdloan'; -import type { ConfigData, MessageId, OverweightIndex, PageCounter, PageIndexData } from '@polkadot/types/interfaces/cumulus'; +import type { CollationInfo, CollationInfoV1, ConfigData, MessageId, OverweightIndex, PageCounter, PageIndexData } from '@polkadot/types/interfaces/cumulus'; import type { AccountVote, AccountVoteSplit, AccountVoteStandard, Conviction, Delegations, PreimageStatus, PreimageStatusAvailable, PriorLock, PropIndex, Proposal, ProxyState, ReferendumIndex, ReferendumInfo, ReferendumInfoFinished, ReferendumInfoTo239, ReferendumStatus, Tally, Voting, VotingDelegating, VotingDirect, VotingDirectVote } from '@polkadot/types/interfaces/democracy'; import type { BlockStats } from '@polkadot/types/interfaces/dev'; import type { ApprovalFlag, DefunctVoter, Renouncing, SetIndex, Vote, VoteIndex, VoteThreshold, VoterInfo } from '@polkadot/types/interfaces/elections'; import type { CreatedBlock, ImportedAux } from '@polkadot/types/interfaces/engine'; -import type { BlockV0, BlockV1, BlockV2, EIP1559Transaction, EIP2930Transaction, EthAccessList, EthAccessListItem, EthAccount, EthAddress, EthBlock, EthBloom, EthCallRequest, EthFilter, EthFilterAddress, EthFilterChanges, EthFilterTopic, EthFilterTopicEntry, EthFilterTopicInner, EthHeader, EthLog, EthReceipt, EthRichBlock, EthRichHeader, EthStorageProof, EthSubKind, EthSubParams, EthSubResult, EthSyncInfo, EthSyncStatus, EthTransaction, EthTransactionAction, EthTransactionCondition, EthTransactionRequest, EthTransactionSignature, EthTransactionStatus, EthWork, EthereumAccountId, EthereumAddress, EthereumLookupSource, EthereumSignature, LegacyTransaction, TransactionV0, TransactionV1, TransactionV2 } from '@polkadot/types/interfaces/eth'; -import type { EvmAccount, EvmLog, EvmVicinity, ExitError, ExitFatal, ExitReason, ExitRevert, ExitSucceed } from '@polkadot/types/interfaces/evm'; +import type { BlockV0, BlockV1, BlockV2, EIP1559Transaction, EIP2930Transaction, EthAccessList, EthAccessListItem, EthAccount, EthAddress, EthBlock, EthBloom, EthCallRequest, EthFeeHistory, EthFilter, EthFilterAddress, EthFilterChanges, EthFilterTopic, EthFilterTopicEntry, EthFilterTopicInner, EthHeader, EthLog, EthReceipt, EthReceiptV0, EthReceiptV3, EthRichBlock, EthRichHeader, EthStorageProof, EthSubKind, EthSubParams, EthSubResult, EthSyncInfo, EthSyncStatus, EthTransaction, EthTransactionAction, EthTransactionCondition, EthTransactionRequest, EthTransactionSignature, EthTransactionStatus, EthWork, EthereumAccountId, EthereumAddress, EthereumLookupSource, EthereumSignature, LegacyTransaction, TransactionV0, TransactionV1, TransactionV2 } from '@polkadot/types/interfaces/eth'; +import type { EvmAccount, EvmCallInfo, EvmCreateInfo, EvmLog, EvmVicinity, ExitError, ExitFatal, ExitReason, ExitRevert, ExitSucceed } from '@polkadot/types/interfaces/evm'; import type { AnySignature, EcdsaSignature, Ed25519Signature, Era, Extrinsic, ExtrinsicEra, ExtrinsicPayload, ExtrinsicPayloadUnknown, ExtrinsicPayloadV4, ExtrinsicSignature, ExtrinsicSignatureV4, ExtrinsicUnknown, ExtrinsicV4, ImmortalEra, MortalEra, MultiSignature, Signature, SignerPayload, Sr25519Signature } from '@polkadot/types/interfaces/extrinsics'; import type { AssetOptions, Owner, PermissionLatest, PermissionVersions, PermissionsV1 } from '@polkadot/types/interfaces/genericAsset'; import type { ActiveGilt, ActiveGiltsTotal, ActiveIndex, GiltBid } from '@polkadot/types/interfaces/gilt'; @@ -35,35 +41,37 @@ import type { AuthorityIndex, AuthorityList, AuthoritySet, AuthoritySetChange, A import type { IdentityFields, IdentityInfo, IdentityInfoAdditional, IdentityInfoTo198, IdentityJudgement, RegistrarIndex, RegistrarInfo, Registration, RegistrationJudgement, RegistrationTo198 } from '@polkadot/types/interfaces/identity'; import type { AuthIndex, AuthoritySignature, Heartbeat, HeartbeatTo244, OpaqueMultiaddr, OpaqueNetworkState, OpaquePeerId } from '@polkadot/types/interfaces/imOnline'; import type { CallIndex, LotteryConfig } from '@polkadot/types/interfaces/lottery'; -import type { ErrorMetadataLatest, ErrorMetadataV10, ErrorMetadataV11, ErrorMetadataV12, ErrorMetadataV13, ErrorMetadataV14, ErrorMetadataV9, EventMetadataLatest, EventMetadataV10, EventMetadataV11, EventMetadataV12, EventMetadataV13, EventMetadataV14, EventMetadataV9, ExtrinsicMetadataLatest, ExtrinsicMetadataV11, ExtrinsicMetadataV12, ExtrinsicMetadataV13, ExtrinsicMetadataV14, FunctionArgumentMetadataLatest, FunctionArgumentMetadataV10, FunctionArgumentMetadataV11, FunctionArgumentMetadataV12, FunctionArgumentMetadataV13, FunctionArgumentMetadataV14, FunctionArgumentMetadataV9, FunctionMetadataLatest, FunctionMetadataV10, FunctionMetadataV11, FunctionMetadataV12, FunctionMetadataV13, FunctionMetadataV14, FunctionMetadataV9, MetadataAll, MetadataLatest, MetadataV10, MetadataV11, MetadataV12, MetadataV13, MetadataV14, MetadataV9, ModuleConstantMetadataV10, ModuleConstantMetadataV11, ModuleConstantMetadataV12, ModuleConstantMetadataV13, ModuleConstantMetadataV9, ModuleMetadataV10, ModuleMetadataV11, ModuleMetadataV12, ModuleMetadataV13, ModuleMetadataV9, PalletCallMetadataLatest, PalletCallMetadataV14, PalletConstantMetadataLatest, PalletConstantMetadataV14, PalletErrorMetadataLatest, PalletErrorMetadataV14, PalletEventMetadataLatest, PalletEventMetadataV14, PalletMetadataLatest, PalletMetadataV14, PalletStorageMetadataLatest, PalletStorageMetadataV14, PortableType, PortableTypeV14, SignedExtensionMetadataLatest, SignedExtensionMetadataV14, StorageEntryMetadataLatest, StorageEntryMetadataV10, StorageEntryMetadataV11, StorageEntryMetadataV12, StorageEntryMetadataV13, StorageEntryMetadataV14, StorageEntryMetadataV9, StorageEntryModifierLatest, StorageEntryModifierV10, StorageEntryModifierV11, StorageEntryModifierV12, StorageEntryModifierV13, StorageEntryModifierV14, StorageEntryModifierV9, StorageEntryTypeLatest, StorageEntryTypeV10, StorageEntryTypeV11, StorageEntryTypeV12, StorageEntryTypeV13, StorageEntryTypeV14, StorageEntryTypeV9, StorageHasher, StorageHasherV10, StorageHasherV11, StorageHasherV12, StorageHasherV13, StorageHasherV14, StorageHasherV9, StorageMetadataV10, StorageMetadataV11, StorageMetadataV12, StorageMetadataV13, StorageMetadataV9 } from '@polkadot/types/interfaces/metadata'; -import type { MmrLeafBatchProof, MmrLeafProof } from '@polkadot/types/interfaces/mmr'; +import type { ErrorMetadataLatest, ErrorMetadataV10, ErrorMetadataV11, ErrorMetadataV12, ErrorMetadataV13, ErrorMetadataV14, ErrorMetadataV9, EventMetadataLatest, EventMetadataV10, EventMetadataV11, EventMetadataV12, EventMetadataV13, EventMetadataV14, EventMetadataV9, ExtrinsicMetadataLatest, ExtrinsicMetadataV11, ExtrinsicMetadataV12, ExtrinsicMetadataV13, ExtrinsicMetadataV14, FunctionArgumentMetadataLatest, FunctionArgumentMetadataV10, FunctionArgumentMetadataV11, FunctionArgumentMetadataV12, FunctionArgumentMetadataV13, FunctionArgumentMetadataV14, FunctionArgumentMetadataV9, FunctionMetadataLatest, FunctionMetadataV10, FunctionMetadataV11, FunctionMetadataV12, FunctionMetadataV13, FunctionMetadataV14, FunctionMetadataV9, MetadataAll, MetadataLatest, MetadataV10, MetadataV11, MetadataV12, MetadataV13, MetadataV14, MetadataV9, ModuleConstantMetadataV10, ModuleConstantMetadataV11, ModuleConstantMetadataV12, ModuleConstantMetadataV13, ModuleConstantMetadataV9, ModuleMetadataV10, ModuleMetadataV11, ModuleMetadataV12, ModuleMetadataV13, ModuleMetadataV9, OpaqueMetadata, PalletCallMetadataLatest, PalletCallMetadataV14, PalletConstantMetadataLatest, PalletConstantMetadataV14, PalletErrorMetadataLatest, PalletErrorMetadataV14, PalletEventMetadataLatest, PalletEventMetadataV14, PalletMetadataLatest, PalletMetadataV14, PalletStorageMetadataLatest, PalletStorageMetadataV14, PortableType, PortableTypeV14, SignedExtensionMetadataLatest, SignedExtensionMetadataV14, StorageEntryMetadataLatest, StorageEntryMetadataV10, StorageEntryMetadataV11, StorageEntryMetadataV12, StorageEntryMetadataV13, StorageEntryMetadataV14, StorageEntryMetadataV9, StorageEntryModifierLatest, StorageEntryModifierV10, StorageEntryModifierV11, StorageEntryModifierV12, StorageEntryModifierV13, StorageEntryModifierV14, StorageEntryModifierV9, StorageEntryTypeLatest, StorageEntryTypeV10, StorageEntryTypeV11, StorageEntryTypeV12, StorageEntryTypeV13, StorageEntryTypeV14, StorageEntryTypeV9, StorageHasher, StorageHasherV10, StorageHasherV11, StorageHasherV12, StorageHasherV13, StorageHasherV14, StorageHasherV9, StorageMetadataV10, StorageMetadataV11, StorageMetadataV12, StorageMetadataV13, StorageMetadataV9 } from '@polkadot/types/interfaces/metadata'; +import type { MmrBatchProof, MmrEncodableOpaqueLeaf, MmrError, MmrLeafBatchProof, MmrLeafIndex, MmrLeafProof, MmrNodeIndex, MmrProof } from '@polkadot/types/interfaces/mmr'; +import type { NpApiError } from '@polkadot/types/interfaces/nompools'; import type { StorageKind } from '@polkadot/types/interfaces/offchain'; import type { DeferredOffenceOf, Kind, OffenceDetails, Offender, OpaqueTimeSlot, ReportIdOf, Reporter } from '@polkadot/types/interfaces/offences'; -import type { AbridgedCandidateReceipt, AbridgedHostConfiguration, AbridgedHrmpChannel, AssignmentId, AssignmentKind, AttestedCandidate, AuctionIndex, AuthorityDiscoveryId, AvailabilityBitfield, AvailabilityBitfieldRecord, BackedCandidate, Bidder, BufferedSessionChange, CandidateCommitments, CandidateDescriptor, CandidateHash, CandidateInfo, CandidatePendingAvailability, CandidateReceipt, CollatorId, CollatorSignature, CommittedCandidateReceipt, CoreAssignment, CoreIndex, CoreOccupied, DisputeLocation, DisputeResult, DisputeState, DisputeStatement, DisputeStatementSet, DoubleVoteReport, DownwardMessage, ExplicitDisputeStatement, GlobalValidationData, GlobalValidationSchedule, GroupIndex, HeadData, HostConfiguration, HrmpChannel, HrmpChannelId, HrmpOpenChannelRequest, InboundDownwardMessage, InboundHrmpMessage, InboundHrmpMessages, IncomingParachain, IncomingParachainDeploy, IncomingParachainFixed, InvalidDisputeStatementKind, LeasePeriod, LeasePeriodOf, LocalValidationData, MessageIngestionType, MessageQueueChain, MessagingStateSnapshot, MessagingStateSnapshotEgressEntry, MultiDisputeStatementSet, NewBidder, OutboundHrmpMessage, ParaGenesisArgs, ParaId, ParaInfo, ParaLifecycle, ParaPastCodeMeta, ParaScheduling, ParaValidatorIndex, ParachainDispatchOrigin, ParachainInherentData, ParachainProposal, ParachainsInherentData, ParathreadClaim, ParathreadClaimQueue, ParathreadEntry, PersistedValidationData, QueuedParathread, RegisteredParachainInfo, RelayBlockNumber, RelayChainBlockNumber, RelayChainHash, RelayHash, Remark, ReplacementTimes, Retriable, Scheduling, ServiceQuality, SessionInfo, SessionInfoValidatorGroup, SignedAvailabilityBitfield, SignedAvailabilityBitfields, SigningContext, SlotRange, SlotRange10, Statement, SubId, SystemInherentData, TransientValidationData, UpgradeGoAhead, UpgradeRestriction, UpwardMessage, ValidDisputeStatementKind, ValidationCode, ValidationCodeHash, ValidationData, ValidationDataType, ValidationFunctionParams, ValidatorSignature, ValidityAttestation, VecInboundHrmpMessage, WinnersData, WinnersData10, WinnersDataTuple, WinnersDataTuple10, WinningData, WinningData10, WinningDataEntry } from '@polkadot/types/interfaces/parachains'; +import type { AbridgedCandidateReceipt, AbridgedHostConfiguration, AbridgedHrmpChannel, AssignmentId, AssignmentKind, AttestedCandidate, AuctionIndex, AuthorityDiscoveryId, AvailabilityBitfield, AvailabilityBitfieldRecord, BackedCandidate, Bidder, BufferedSessionChange, CandidateCommitments, CandidateDescriptor, CandidateEvent, CandidateHash, CandidateInfo, CandidatePendingAvailability, CandidateReceipt, CollatorId, CollatorSignature, CommittedCandidateReceipt, CoreAssignment, CoreIndex, CoreOccupied, CoreState, DisputeLocation, DisputeResult, DisputeState, DisputeStatement, DisputeStatementSet, DoubleVoteReport, DownwardMessage, ExplicitDisputeStatement, GlobalValidationData, GlobalValidationSchedule, GroupIndex, GroupRotationInfo, HeadData, HostConfiguration, HrmpChannel, HrmpChannelId, HrmpOpenChannelRequest, InboundDownwardMessage, InboundHrmpMessage, InboundHrmpMessages, IncomingParachain, IncomingParachainDeploy, IncomingParachainFixed, InvalidDisputeStatementKind, LeasePeriod, LeasePeriodOf, LocalValidationData, MessageIngestionType, MessageQueueChain, MessagingStateSnapshot, MessagingStateSnapshotEgressEntry, MultiDisputeStatementSet, NewBidder, OccupiedCore, OccupiedCoreAssumption, OldV1SessionInfo, OutboundHrmpMessage, ParaGenesisArgs, ParaId, ParaInfo, ParaLifecycle, ParaPastCodeMeta, ParaScheduling, ParaValidatorIndex, ParachainDispatchOrigin, ParachainInherentData, ParachainProposal, ParachainsInherentData, ParathreadClaim, ParathreadClaimQueue, ParathreadEntry, PersistedValidationData, PvfCheckStatement, QueuedParathread, RegisteredParachainInfo, RelayBlockNumber, RelayChainBlockNumber, RelayChainHash, RelayHash, Remark, ReplacementTimes, Retriable, ScheduledCore, Scheduling, ScrapedOnChainVotes, ServiceQuality, SessionInfo, SessionInfoValidatorGroup, SignedAvailabilityBitfield, SignedAvailabilityBitfields, SigningContext, SlotRange, SlotRange10, Statement, SubId, SystemInherentData, TransientValidationData, UpgradeGoAhead, UpgradeRestriction, UpwardMessage, ValidDisputeStatementKind, ValidationCode, ValidationCodeHash, ValidationData, ValidationDataType, ValidationFunctionParams, ValidatorSignature, ValidityAttestation, VecInboundHrmpMessage, WinnersData, WinnersData10, WinnersDataTuple, WinnersDataTuple10, WinningData, WinningData10, WinningDataEntry } from '@polkadot/types/interfaces/parachains'; import type { FeeDetails, InclusionFee, RuntimeDispatchInfo } from '@polkadot/types/interfaces/payment'; import type { Approvals } from '@polkadot/types/interfaces/poll'; import type { ProxyAnnouncement, ProxyDefinition, ProxyType } from '@polkadot/types/interfaces/proxy'; import type { AccountStatus, AccountValidity } from '@polkadot/types/interfaces/purchase'; import type { ActiveRecovery, RecoveryConfig } from '@polkadot/types/interfaces/recovery'; import type { RpcMethods } from '@polkadot/types/interfaces/rpc'; -import type { AccountId, AccountId20, AccountId32, AccountIdOf, AccountIndex, Address, AssetId, Balance, BalanceOf, Block, BlockNumber, BlockNumberFor, BlockNumberOf, Call, CallHash, CallHashOf, ChangesTrieConfiguration, ChangesTrieSignal, CodecHash, Consensus, ConsensusEngineId, CrateVersion, Digest, DigestItem, EncodedJustification, ExtrinsicsWeight, Fixed128, Fixed64, FixedI128, FixedI64, FixedU128, FixedU64, H1024, H128, H160, H2048, H256, H32, H512, H64, Hash, Header, HeaderPartial, I32F32, Index, IndicesLookupSource, Justification, Justifications, KeyTypeId, KeyValue, LockIdentifier, LookupSource, LookupTarget, ModuleId, Moment, MultiAddress, MultiSigner, OpaqueCall, Origin, OriginCaller, PalletId, PalletVersion, PalletsOrigin, Pays, PerU16, Perbill, Percent, Permill, Perquintill, Phantom, PhantomData, PreRuntime, Releases, RuntimeDbWeight, Seal, SealV0, SignedBlock, SignedBlockWithJustification, SignedBlockWithJustifications, Slot, StorageData, StorageProof, TransactionInfo, TransactionPriority, TransactionStorageProof, U32F32, ValidatorId, ValidatorIdOf, Weight, WeightMultiplier } from '@polkadot/types/interfaces/runtime'; +import type { AccountId, AccountId20, AccountId32, AccountId33, AccountIdOf, AccountIndex, Address, AssetId, Balance, BalanceOf, Block, BlockNumber, BlockNumberFor, BlockNumberOf, Call, CallHash, CallHashOf, ChangesTrieConfiguration, ChangesTrieSignal, CodecHash, Consensus, ConsensusEngineId, CrateVersion, Digest, DigestItem, EncodedJustification, ExtrinsicsWeight, Fixed128, Fixed64, FixedI128, FixedI64, FixedU128, FixedU64, H1024, H128, H160, H2048, H256, H32, H512, H64, Hash, Header, HeaderPartial, I32F32, Index, IndicesLookupSource, Justification, Justifications, KeyTypeId, KeyValue, LockIdentifier, LookupSource, LookupTarget, ModuleId, Moment, MultiAddress, MultiSigner, OpaqueCall, Origin, OriginCaller, PalletId, PalletVersion, PalletsOrigin, Pays, PerU16, Perbill, Percent, Permill, Perquintill, Phantom, PhantomData, PreRuntime, Releases, RuntimeDbWeight, Seal, SealV0, SignedBlock, SignedBlockWithJustification, SignedBlockWithJustifications, Slot, SlotDuration, StorageData, StorageInfo, StorageProof, TransactionInfo, TransactionLongevity, TransactionPriority, TransactionStorageProof, TransactionTag, U32F32, ValidatorId, ValidatorIdOf, Weight, WeightMultiplier, WeightV1, WeightV2 } from '@polkadot/types/interfaces/runtime'; import type { Si0Field, Si0LookupTypeId, Si0Path, Si0Type, Si0TypeDef, Si0TypeDefArray, Si0TypeDefBitSequence, Si0TypeDefCompact, Si0TypeDefComposite, Si0TypeDefPhantom, Si0TypeDefPrimitive, Si0TypeDefSequence, Si0TypeDefTuple, Si0TypeDefVariant, Si0TypeParameter, Si0Variant, Si1Field, Si1LookupTypeId, Si1Path, Si1Type, Si1TypeDef, Si1TypeDefArray, Si1TypeDefBitSequence, Si1TypeDefCompact, Si1TypeDefComposite, Si1TypeDefPrimitive, Si1TypeDefSequence, Si1TypeDefTuple, Si1TypeDefVariant, Si1TypeParameter, Si1Variant, SiField, SiLookupTypeId, SiPath, SiType, SiTypeDef, SiTypeDefArray, SiTypeDefBitSequence, SiTypeDefCompact, SiTypeDefComposite, SiTypeDefPrimitive, SiTypeDefSequence, SiTypeDefTuple, SiTypeDefVariant, SiTypeParameter, SiVariant } from '@polkadot/types/interfaces/scaleInfo'; import type { Period, Priority, SchedulePeriod, SchedulePriority, Scheduled, ScheduledTo254, TaskAddress } from '@polkadot/types/interfaces/scheduler'; import type { BeefyKey, FullIdentification, IdentificationTuple, Keys, MembershipProof, SessionIndex, SessionKeys1, SessionKeys10, SessionKeys10B, SessionKeys2, SessionKeys3, SessionKeys4, SessionKeys5, SessionKeys6, SessionKeys6B, SessionKeys7, SessionKeys7B, SessionKeys8, SessionKeys8B, SessionKeys9, SessionKeys9B, ValidatorCount } from '@polkadot/types/interfaces/session'; import type { Bid, BidKind, SocietyJudgement, SocietyVote, StrikeCount, VouchingStatus } from '@polkadot/types/interfaces/society'; import type { ActiveEraInfo, CompactAssignments, CompactAssignmentsTo257, CompactAssignmentsTo265, CompactAssignmentsWith16, CompactAssignmentsWith24, CompactScore, CompactScoreCompact, ElectionCompute, ElectionPhase, ElectionResult, ElectionScore, ElectionSize, ElectionStatus, EraIndex, EraPoints, EraRewardPoints, EraRewards, Exposure, ExtendedBalance, Forcing, IndividualExposure, KeyType, MomentOf, Nominations, NominatorIndex, NominatorIndexCompact, OffchainAccuracy, OffchainAccuracyCompact, PhragmenScore, Points, RawSolution, RawSolutionTo265, RawSolutionWith16, RawSolutionWith24, ReadySolution, RewardDestination, RewardPoint, RoundSnapshot, SeatHolder, SignedSubmission, SignedSubmissionOf, SignedSubmissionTo276, SlashJournalEntry, SlashingSpans, SlashingSpansTo204, SolutionOrSnapshotSize, SolutionSupport, SolutionSupports, SpanIndex, SpanRecord, StakingLedger, StakingLedgerTo223, StakingLedgerTo240, SubmissionIndicesOf, Supports, UnappliedSlash, UnappliedSlashOther, UnlockChunk, ValidatorIndex, ValidatorIndexCompact, ValidatorPrefs, ValidatorPrefsTo145, ValidatorPrefsTo196, ValidatorPrefsWithBlocked, ValidatorPrefsWithCommission, VoteWeight, Voter } from '@polkadot/types/interfaces/staking'; -import type { ApiId, BlockTrace, BlockTraceEvent, BlockTraceEventData, BlockTraceSpan, KeyValueOption, MigrationStatusResult, ReadProof, RuntimeVersion, RuntimeVersionApi, RuntimeVersionPartial, SpecVersion, StorageChangeSet, TraceBlockResponse, TraceError } from '@polkadot/types/interfaces/state'; +import type { ApiId, BlockTrace, BlockTraceEvent, BlockTraceEventData, BlockTraceSpan, KeyValueOption, MigrationStatusResult, ReadProof, RuntimeVersion, RuntimeVersionApi, RuntimeVersionPartial, RuntimeVersionPre3, RuntimeVersionPre4, SpecVersion, StorageChangeSet, TraceBlockResponse, TraceError } from '@polkadot/types/interfaces/state'; import type { WeightToFeeCoefficient } from '@polkadot/types/interfaces/support'; -import type { AccountInfo, AccountInfoWithDualRefCount, AccountInfoWithProviders, AccountInfoWithRefCount, AccountInfoWithRefCountU8, AccountInfoWithTripleRefCount, ApplyExtrinsicResult, ArithmeticError, BlockLength, BlockWeights, ChainProperties, ChainType, ConsumedWeight, DigestOf, DispatchClass, DispatchError, DispatchErrorModule, DispatchErrorModuleU8, DispatchErrorModuleU8a, DispatchErrorTo198, DispatchInfo, DispatchInfoTo190, DispatchInfoTo244, DispatchOutcome, DispatchResult, DispatchResultOf, DispatchResultTo198, Event, EventId, EventIndex, EventRecord, Health, InvalidTransaction, Key, LastRuntimeUpgradeInfo, NetworkState, NetworkStatePeerset, NetworkStatePeersetInfo, NodeRole, NotConnectedPeer, Peer, PeerEndpoint, PeerEndpointAddr, PeerInfo, PeerPing, PerDispatchClassU32, PerDispatchClassWeight, PerDispatchClassWeightsPerClass, Phase, RawOrigin, RefCount, RefCountTo259, SyncState, SystemOrigin, TokenError, TransactionValidityError, TransactionalError, UnknownTransaction, WeightPerClass } from '@polkadot/types/interfaces/system'; +import type { AccountInfo, AccountInfoWithDualRefCount, AccountInfoWithProviders, AccountInfoWithRefCount, AccountInfoWithRefCountU8, AccountInfoWithTripleRefCount, ApplyExtrinsicResult, ApplyExtrinsicResultPre6, ArithmeticError, BlockLength, BlockWeights, ChainProperties, ChainType, ConsumedWeight, DigestOf, DispatchClass, DispatchError, DispatchErrorModule, DispatchErrorModulePre6, DispatchErrorModuleU8, DispatchErrorModuleU8a, DispatchErrorPre6, DispatchErrorPre6First, DispatchErrorTo198, DispatchInfo, DispatchInfoTo190, DispatchInfoTo244, DispatchOutcome, DispatchOutcomePre6, DispatchResult, DispatchResultOf, DispatchResultTo198, Event, EventId, EventIndex, EventRecord, Health, InvalidTransaction, Key, LastRuntimeUpgradeInfo, NetworkState, NetworkStatePeerset, NetworkStatePeersetInfo, NodeRole, NotConnectedPeer, Peer, PeerEndpoint, PeerEndpointAddr, PeerInfo, PeerPing, PerDispatchClassU32, PerDispatchClassWeight, PerDispatchClassWeightsPerClass, Phase, RawOrigin, RefCount, RefCountTo259, SyncState, SystemOrigin, TokenError, TransactionValidityError, TransactionalError, UnknownTransaction, WeightPerClass } from '@polkadot/types/interfaces/system'; import type { Bounty, BountyIndex, BountyStatus, BountyStatusActive, BountyStatusCuratorProposed, BountyStatusPendingPayout, OpenTip, OpenTipFinderTo225, OpenTipTip, OpenTipTo225, TreasuryProposal } from '@polkadot/types/interfaces/treasury'; import type { Multiplier } from '@polkadot/types/interfaces/txpayment'; +import type { TransactionSource, TransactionValidity, ValidTransaction } from '@polkadot/types/interfaces/txqueue'; import type { ClassDetails, ClassId, ClassMetadata, DepositBalance, DepositBalanceOf, DestroyWitness, InstanceDetails, InstanceId, InstanceMetadata } from '@polkadot/types/interfaces/uniques'; import type { Multisig, Timepoint } from '@polkadot/types/interfaces/utility'; import type { VestingInfo } from '@polkadot/types/interfaces/vesting'; import type { AssetInstance, AssetInstanceV0, AssetInstanceV1, AssetInstanceV2, BodyId, BodyPart, DoubleEncodedCall, Fungibility, FungibilityV0, FungibilityV1, FungibilityV2, InboundStatus, InstructionV2, InteriorMultiLocation, Junction, JunctionV0, JunctionV1, JunctionV2, Junctions, JunctionsV1, JunctionsV2, MultiAsset, MultiAssetFilter, MultiAssetFilterV1, MultiAssetFilterV2, MultiAssetV0, MultiAssetV1, MultiAssetV2, MultiAssets, MultiAssetsV1, MultiAssetsV2, MultiLocation, MultiLocationV0, MultiLocationV1, MultiLocationV2, NetworkId, OriginKindV0, OriginKindV1, OriginKindV2, OutboundStatus, Outcome, QueryId, QueryStatus, QueueConfigData, Response, ResponseV0, ResponseV1, ResponseV2, ResponseV2Error, ResponseV2Result, VersionMigrationStage, VersionedMultiAsset, VersionedMultiAssets, VersionedMultiLocation, VersionedResponse, VersionedXcm, WeightLimitV2, WildFungibility, WildFungibilityV0, WildFungibilityV1, WildFungibilityV2, WildMultiAsset, WildMultiAssetV1, WildMultiAssetV2, Xcm, XcmAssetId, XcmError, XcmErrorV0, XcmErrorV1, XcmErrorV2, XcmOrder, XcmOrderV0, XcmOrderV1, XcmOrderV2, XcmOrigin, XcmOriginKind, XcmV0, XcmV1, XcmV2, XcmVersion, XcmpMessageFormat } from '@polkadot/types/interfaces/xcm'; declare module '@polkadot/types/types/registry' { - export interface InterfaceTypes { + interface InterfaceTypes { AbridgedCandidateReceipt: AbridgedCandidateReceipt; AbridgedHostConfiguration: AbridgedHostConfiguration; AbridgedHrmpChannel: AbridgedHrmpChannel; @@ -71,6 +79,7 @@ declare module '@polkadot/types/types/registry' { AccountId: AccountId; AccountId20: AccountId20; AccountId32: AccountId32; + AccountId33: AccountId33; AccountIdOf: AccountIdOf; AccountIndex: AccountIndex; AccountInfo: AccountInfo; @@ -95,6 +104,7 @@ declare module '@polkadot/types/types/registry' { AnySignature: AnySignature; ApiId: ApiId; ApplyExtrinsicResult: ApplyExtrinsicResult; + ApplyExtrinsicResultPre6: ApplyExtrinsicResultPre6; ApprovalFlag: ApprovalFlag; Approvals: Approvals; ArithmeticError: ArithmeticError; @@ -130,6 +140,8 @@ declare module '@polkadot/types/types/registry' { BabeBlockWeight: BabeBlockWeight; BabeEpochConfiguration: BabeEpochConfiguration; BabeEquivocationProof: BabeEquivocationProof; + BabeGenesisConfiguration: BabeGenesisConfiguration; + BabeGenesisConfigurationV1: BabeGenesisConfigurationV1; BabeWeight: BabeWeight; BackedCandidate: BackedCandidate; Balance: Balance; @@ -137,12 +149,20 @@ declare module '@polkadot/types/types/registry' { BalanceLockTo212: BalanceLockTo212; BalanceOf: BalanceOf; BalanceStatus: BalanceStatus; + BeefyAuthoritySet: BeefyAuthoritySet; BeefyCommitment: BeefyCommitment; BeefyId: BeefyId; BeefyKey: BeefyKey; BeefyNextAuthoritySet: BeefyNextAuthoritySet; BeefyPayload: BeefyPayload; + BeefyPayloadId: BeefyPayloadId; BeefySignedCommitment: BeefySignedCommitment; + BenchmarkBatch: BenchmarkBatch; + BenchmarkConfig: BenchmarkConfig; + BenchmarkList: BenchmarkList; + BenchmarkMetadata: BenchmarkMetadata; + BenchmarkParameter: BenchmarkParameter; + BenchmarkResult: BenchmarkResult; Bid: Bid; Bidder: Bidder; BidKind: BidKind; @@ -186,6 +206,7 @@ declare module '@polkadot/types/types/registry' { CallOrigin: CallOrigin; CandidateCommitments: CandidateCommitments; CandidateDescriptor: CandidateDescriptor; + CandidateEvent: CandidateEvent; CandidateHash: CandidateHash; CandidateInfo: CandidateInfo; CandidatePendingAvailability: CandidatePendingAvailability; @@ -195,6 +216,7 @@ declare module '@polkadot/types/types/registry' { ChainType: ChainType; ChangesTrieConfiguration: ChangesTrieConfiguration; ChangesTrieSignal: ChangesTrieSignal; + CheckInherentsResult: CheckInherentsResult; ClassDetails: ClassDetails; ClassId: ClassId; ClassMetadata: ClassMetadata; @@ -204,6 +226,8 @@ declare module '@polkadot/types/types/registry' { CodeUploadRequest: CodeUploadRequest; CodeUploadResult: CodeUploadResult; CodeUploadResultValue: CodeUploadResultValue; + CollationInfo: CollationInfo; + CollationInfoV1: CollationInfoV1; CollatorId: CollatorId; CollatorSignature: CollatorSignature; CollectiveOrigin: CollectiveOrigin; @@ -230,6 +254,7 @@ declare module '@polkadot/types/types/registry' { ContractContractSpecV1: ContractContractSpecV1; ContractContractSpecV2: ContractContractSpecV2; ContractContractSpecV3: ContractContractSpecV3; + ContractContractSpecV4: ContractContractSpecV4; ContractCryptoHasher: ContractCryptoHasher; ContractDiscriminant: ContractDiscriminant; ContractDisplayName: ContractDisplayName; @@ -273,6 +298,7 @@ declare module '@polkadot/types/types/registry' { ContractMetadataV1: ContractMetadataV1; ContractMetadataV2: ContractMetadataV2; ContractMetadataV3: ContractMetadataV3; + ContractMetadataV4: ContractMetadataV4; ContractProject: ContractProject; ContractProjectContract: ContractProjectContract; ContractProjectInfo: ContractProjectInfo; @@ -287,6 +313,7 @@ declare module '@polkadot/types/types/registry' { CoreAssignment: CoreAssignment; CoreIndex: CoreIndex; CoreOccupied: CoreOccupied; + CoreState: CoreState; CrateVersion: CrateVersion; CreatedBlock: CreatedBlock; CumulusPalletDmpQueueCall: CumulusPalletDmpQueueCall; @@ -328,14 +355,18 @@ declare module '@polkadot/types/types/registry' { DispatchClass: DispatchClass; DispatchError: DispatchError; DispatchErrorModule: DispatchErrorModule; + DispatchErrorModulePre6: DispatchErrorModulePre6; DispatchErrorModuleU8: DispatchErrorModuleU8; DispatchErrorModuleU8a: DispatchErrorModuleU8a; + DispatchErrorPre6: DispatchErrorPre6; + DispatchErrorPre6First: DispatchErrorPre6First; DispatchErrorTo198: DispatchErrorTo198; DispatchFeePayment: DispatchFeePayment; DispatchInfo: DispatchInfo; DispatchInfoTo190: DispatchInfoTo190; DispatchInfoTo244: DispatchInfoTo244; DispatchOutcome: DispatchOutcome; + DispatchOutcomePre6: DispatchOutcomePre6; DispatchResult: DispatchResult; DispatchResultOf: DispatchResultOf; DispatchResultTo198: DispatchResultTo198; @@ -359,6 +390,7 @@ declare module '@polkadot/types/types/registry' { ElectionStatus: ElectionStatus; EncodedFinalityProofs: EncodedFinalityProofs; EncodedJustification: EncodedJustification; + Epoch: Epoch; EpochAuthorship: EpochAuthorship; Era: Era; EraIndex: EraIndex; @@ -397,6 +429,7 @@ declare module '@polkadot/types/types/registry' { EthereumTransactionTransactionSignature: EthereumTransactionTransactionSignature; EthereumTransactionTransactionV2: EthereumTransactionTransactionV2; EthereumTypesHashH64: EthereumTypesHashH64; + EthFeeHistory: EthFeeHistory; EthFilter: EthFilter; EthFilterAddress: EthFilterAddress; EthFilterChanges: EthFilterChanges; @@ -406,6 +439,8 @@ declare module '@polkadot/types/types/registry' { EthHeader: EthHeader; EthLog: EthLog; EthReceipt: EthReceipt; + EthReceiptV0: EthReceiptV0; + EthReceiptV3: EthReceiptV3; EthRichBlock: EthRichBlock; EthRichHeader: EthRichHeader; EthStorageProof: EthStorageProof; @@ -433,11 +468,13 @@ declare module '@polkadot/types/types/registry' { EventMetadataV9: EventMetadataV9; EventRecord: EventRecord; EvmAccount: EvmAccount; + EvmCallInfo: EvmCallInfo; EvmCoreErrorExitError: EvmCoreErrorExitError; EvmCoreErrorExitFatal: EvmCoreErrorExitFatal; EvmCoreErrorExitReason: EvmCoreErrorExitReason; EvmCoreErrorExitRevert: EvmCoreErrorExitRevert; EvmCoreErrorExitSucceed: EvmCoreErrorExitSucceed; + EvmCreateInfo: EvmCreateInfo; EvmLog: EvmLog; EvmVicinity: EvmVicinity; ExecReturnValue: ExecReturnValue; @@ -466,6 +503,10 @@ declare module '@polkadot/types/types/registry' { ExtrinsicsWeight: ExtrinsicsWeight; ExtrinsicUnknown: ExtrinsicUnknown; ExtrinsicV4: ExtrinsicV4; + f32: f32; + F32: F32; + f64: f64; + F64: F64; FeeDetails: FeeDetails; Fixed128: Fixed128; Fixed64: Fixed64; @@ -477,18 +518,17 @@ declare module '@polkadot/types/types/registry' { ForkTreePendingChange: ForkTreePendingChange; ForkTreePendingChangeNode: ForkTreePendingChangeNode; FpRpcTransactionStatus: FpRpcTransactionStatus; + FrameSupportDispatchDispatchClass: FrameSupportDispatchDispatchClass; + FrameSupportDispatchDispatchInfo: FrameSupportDispatchDispatchInfo; + FrameSupportDispatchPays: FrameSupportDispatchPays; + FrameSupportDispatchPerDispatchClassU32: FrameSupportDispatchPerDispatchClassU32; + FrameSupportDispatchPerDispatchClassWeight: FrameSupportDispatchPerDispatchClassWeight; + FrameSupportDispatchPerDispatchClassWeightsPerClass: FrameSupportDispatchPerDispatchClassWeightsPerClass; FrameSupportDispatchRawOrigin: FrameSupportDispatchRawOrigin; FrameSupportPalletId: FrameSupportPalletId; FrameSupportScheduleLookupError: FrameSupportScheduleLookupError; FrameSupportScheduleMaybeHashed: FrameSupportScheduleMaybeHashed; FrameSupportTokensMiscBalanceStatus: FrameSupportTokensMiscBalanceStatus; - FrameSupportWeightsDispatchClass: FrameSupportWeightsDispatchClass; - FrameSupportWeightsDispatchInfo: FrameSupportWeightsDispatchInfo; - FrameSupportWeightsPays: FrameSupportWeightsPays; - FrameSupportWeightsPerDispatchClassU32: FrameSupportWeightsPerDispatchClassU32; - FrameSupportWeightsPerDispatchClassU64: FrameSupportWeightsPerDispatchClassU64; - FrameSupportWeightsPerDispatchClassWeightsPerClass: FrameSupportWeightsPerDispatchClassWeightsPerClass; - FrameSupportWeightsRuntimeDbWeight: FrameSupportWeightsRuntimeDbWeight; FrameSystemAccountInfo: FrameSystemAccountInfo; FrameSystemCall: FrameSystemCall; FrameSystemError: FrameSystemError; @@ -497,6 +537,7 @@ declare module '@polkadot/types/types/registry' { FrameSystemExtensionsCheckGenesis: FrameSystemExtensionsCheckGenesis; FrameSystemExtensionsCheckNonce: FrameSystemExtensionsCheckNonce; FrameSystemExtensionsCheckSpecVersion: FrameSystemExtensionsCheckSpecVersion; + FrameSystemExtensionsCheckTxVersion: FrameSystemExtensionsCheckTxVersion; FrameSystemExtensionsCheckWeight: FrameSystemExtensionsCheckWeight; FrameSystemLastRuntimeUpgradeInfo: FrameSystemLastRuntimeUpgradeInfo; FrameSystemLimitsBlockLength: FrameSystemLimitsBlockLength; @@ -537,6 +578,7 @@ declare module '@polkadot/types/types/registry' { GrandpaPrevote: GrandpaPrevote; GrandpaSignedPrecommit: GrandpaSignedPrecommit; GroupIndex: GroupIndex; + GroupRotationInfo: GroupRotationInfo; H1024: H1024; H128: H128; H160: H160; @@ -593,6 +635,8 @@ declare module '@polkadot/types/types/registry' { Index: Index; IndicesLookupSource: IndicesLookupSource; IndividualExposure: IndividualExposure; + InherentData: InherentData; + InherentIdentifier: InherentIdentifier; InitializationData: InitializationData; InstanceDetails: InstanceDetails; InstanceId: InstanceId; @@ -663,8 +707,14 @@ declare module '@polkadot/types/types/registry' { MetadataV14: MetadataV14; MetadataV9: MetadataV9; MigrationStatusResult: MigrationStatusResult; + MmrBatchProof: MmrBatchProof; + MmrEncodableOpaqueLeaf: MmrEncodableOpaqueLeaf; + MmrError: MmrError; MmrLeafBatchProof: MmrLeafBatchProof; + MmrLeafIndex: MmrLeafIndex; MmrLeafProof: MmrLeafProof; + MmrNodeIndex: MmrNodeIndex; + MmrProof: MmrProof; MmrRootHash: MmrRootHash; ModuleConstantMetadataV10: ModuleConstantMetadataV10; ModuleConstantMetadataV11: ModuleConstantMetadataV11; @@ -714,15 +764,21 @@ declare module '@polkadot/types/types/registry' { NominatorIndex: NominatorIndex; NominatorIndexCompact: NominatorIndexCompact; NotConnectedPeer: NotConnectedPeer; + NpApiError: NpApiError; Null: Null; + OccupiedCore: OccupiedCore; + OccupiedCoreAssumption: OccupiedCoreAssumption; OffchainAccuracy: OffchainAccuracy; OffchainAccuracyCompact: OffchainAccuracyCompact; OffenceDetails: OffenceDetails; Offender: Offender; - OpalRuntimeCheckMaintenance: OpalRuntimeCheckMaintenance; + OldV1SessionInfo: OldV1SessionInfo; OpalRuntimeOriginCaller: OpalRuntimeOriginCaller; OpalRuntimeRuntime: OpalRuntimeRuntime; + OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance: OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance; OpaqueCall: OpaqueCall; + OpaqueKeyOwnershipProof: OpaqueKeyOwnershipProof; + OpaqueMetadata: OpaqueMetadata; OpaqueMultiaddr: OpaqueMultiaddr; OpaqueNetworkState: OpaqueNetworkState; OpaquePeerId: OpaquePeerId; @@ -738,10 +794,19 @@ declare module '@polkadot/types/types/registry' { OriginKindV0: OriginKindV0; OriginKindV1: OriginKindV1; OriginKindV2: OriginKindV2; + OrmlTokensAccountData: OrmlTokensAccountData; + OrmlTokensBalanceLock: OrmlTokensBalanceLock; + OrmlTokensModuleCall: OrmlTokensModuleCall; + OrmlTokensModuleError: OrmlTokensModuleError; + OrmlTokensModuleEvent: OrmlTokensModuleEvent; + OrmlTokensReserveData: OrmlTokensReserveData; OrmlVestingModuleCall: OrmlVestingModuleCall; OrmlVestingModuleError: OrmlVestingModuleError; OrmlVestingModuleEvent: OrmlVestingModuleEvent; OrmlVestingVestingSchedule: OrmlVestingVestingSchedule; + OrmlXtokensModuleCall: OrmlXtokensModuleCall; + OrmlXtokensModuleError: OrmlXtokensModuleError; + OrmlXtokensModuleEvent: OrmlXtokensModuleEvent; OutboundHrmpMessage: OutboundHrmpMessage; OutboundLaneData: OutboundLaneData; OutboundMessageFee: OutboundMessageFee; @@ -752,6 +817,9 @@ declare module '@polkadot/types/types/registry' { Owner: Owner; PageCounter: PageCounter; PageIndexData: PageIndexData; + PalletAppPromotionCall: PalletAppPromotionCall; + PalletAppPromotionError: PalletAppPromotionError; + PalletAppPromotionEvent: PalletAppPromotionEvent; PalletBalancesAccountData: PalletBalancesAccountData; PalletBalancesBalanceLock: PalletBalancesBalanceLock; PalletBalancesCall: PalletBalancesCall; @@ -764,6 +832,7 @@ declare module '@polkadot/types/types/registry' { PalletCallMetadataV14: PalletCallMetadataV14; PalletCommonError: PalletCommonError; PalletCommonEvent: PalletCommonEvent; + PalletConfigurationCall: PalletConfigurationCall; PalletConstantMetadataLatest: PalletConstantMetadataLatest; PalletConstantMetadataV14: PalletConstantMetadataV14; PalletErrorMetadataLatest: PalletErrorMetadataLatest; @@ -779,11 +848,18 @@ declare module '@polkadot/types/types/registry' { PalletEvmCall: PalletEvmCall; PalletEvmCoderSubstrateError: PalletEvmCoderSubstrateError; PalletEvmContractHelpersError: PalletEvmContractHelpersError; + PalletEvmContractHelpersEvent: PalletEvmContractHelpersEvent; PalletEvmContractHelpersSponsoringModeT: PalletEvmContractHelpersSponsoringModeT; PalletEvmError: PalletEvmError; PalletEvmEvent: PalletEvmEvent; PalletEvmMigrationCall: PalletEvmMigrationCall; PalletEvmMigrationError: PalletEvmMigrationError; + PalletForeignAssetsAssetIds: PalletForeignAssetsAssetIds; + PalletForeignAssetsModuleAssetMetadata: PalletForeignAssetsModuleAssetMetadata; + PalletForeignAssetsModuleCall: PalletForeignAssetsModuleCall; + PalletForeignAssetsModuleError: PalletForeignAssetsModuleError; + PalletForeignAssetsModuleEvent: PalletForeignAssetsModuleEvent; + PalletForeignAssetsNativeCurrency: PalletForeignAssetsNativeCurrency; PalletFungibleError: PalletFungibleError; PalletId: PalletId; PalletInflationCall: PalletInflationCall; @@ -813,7 +889,11 @@ declare module '@polkadot/types/types/registry' { PalletSudoEvent: PalletSudoEvent; PalletTemplateTransactionPaymentCall: PalletTemplateTransactionPaymentCall; PalletTemplateTransactionPaymentChargeTransactionPayment: PalletTemplateTransactionPaymentChargeTransactionPayment; + PalletTestUtilsCall: PalletTestUtilsCall; + PalletTestUtilsError: PalletTestUtilsError; + PalletTestUtilsEvent: PalletTestUtilsEvent; PalletTimestampCall: PalletTimestampCall; + PalletTransactionPaymentEvent: PalletTransactionPaymentEvent; PalletTransactionPaymentReleases: PalletTransactionPaymentReleases; PalletTreasuryCall: PalletTreasuryCall; PalletTreasuryError: PalletTreasuryError; @@ -900,6 +980,7 @@ declare module '@polkadot/types/types/registry' { ProxyDefinition: ProxyDefinition; ProxyState: ProxyState; ProxyType: ProxyType; + PvfCheckStatement: PvfCheckStatement; QueryId: QueryId; QueryStatus: QueryStatus; QueueConfigData: QueueConfigData; @@ -988,8 +1069,11 @@ declare module '@polkadot/types/types/registry' { RuntimeVersion: RuntimeVersion; RuntimeVersionApi: RuntimeVersionApi; RuntimeVersionPartial: RuntimeVersionPartial; + RuntimeVersionPre3: RuntimeVersionPre3; + RuntimeVersionPre4: RuntimeVersionPre4; Schedule: Schedule; Scheduled: Scheduled; + ScheduledCore: ScheduledCore; ScheduledTo254: ScheduledTo254; SchedulePeriod: SchedulePeriod; SchedulePriority: SchedulePriority; @@ -997,6 +1081,7 @@ declare module '@polkadot/types/types/registry' { ScheduleTo258: ScheduleTo258; ScheduleTo264: ScheduleTo264; Scheduling: Scheduling; + ScrapedOnChainVotes: ScrapedOnChainVotes; Seal: Seal; SealV0: SealV0; SeatHolder: SeatHolder; @@ -1085,6 +1170,7 @@ declare module '@polkadot/types/types/registry' { SlashingSpansTo204: SlashingSpansTo204; SlashJournalEntry: SlashJournalEntry; Slot: Slot; + SlotDuration: SlotDuration; SlotNumber: SlotNumber; SlotRange: SlotRange; SlotRange10: SlotRange10; @@ -1110,6 +1196,7 @@ declare module '@polkadot/types/types/registry' { SpRuntimeTransactionalError: SpRuntimeTransactionalError; SpTrieStorageProof: SpTrieStorageProof; SpVersionRuntimeVersion: SpVersionRuntimeVersion; + SpWeightsRuntimeDbWeight: SpWeightsRuntimeDbWeight; Sr25519Signature: Sr25519Signature; StakingLedger: StakingLedger; StakingLedgerTo223: StakingLedgerTo223; @@ -1147,6 +1234,7 @@ declare module '@polkadot/types/types/registry' { StorageHasherV13: StorageHasherV13; StorageHasherV14: StorageHasherV14; StorageHasherV9: StorageHasherV9; + StorageInfo: StorageInfo; StorageKey: StorageKey; StorageKind: StorageKind; StorageMetadataV10: StorageMetadataV10; @@ -1176,11 +1264,15 @@ declare module '@polkadot/types/types/registry' { TraceError: TraceError; TransactionalError: TransactionalError; TransactionInfo: TransactionInfo; + TransactionLongevity: TransactionLongevity; TransactionPriority: TransactionPriority; + TransactionSource: TransactionSource; TransactionStorageProof: TransactionStorageProof; + TransactionTag: TransactionTag; TransactionV0: TransactionV0; TransactionV1: TransactionV1; TransactionV2: TransactionV2; + TransactionValidity: TransactionValidity; TransactionValidityError: TransactionValidityError; TransientValidationData: TransientValidationData; TreasuryProposal: TreasuryProposal; @@ -1220,7 +1312,8 @@ declare module '@polkadot/types/types/registry' { UpDataStructsCreateNftData: UpDataStructsCreateNftData; UpDataStructsCreateNftExData: UpDataStructsCreateNftExData; UpDataStructsCreateReFungibleData: UpDataStructsCreateReFungibleData; - UpDataStructsCreateRefungibleExData: UpDataStructsCreateRefungibleExData; + UpDataStructsCreateRefungibleExMultipleOwners: UpDataStructsCreateRefungibleExMultipleOwners; + UpDataStructsCreateRefungibleExSingleOwner: UpDataStructsCreateRefungibleExSingleOwner; UpDataStructsNestingPermissions: UpDataStructsNestingPermissions; UpDataStructsOwnerRestrictedSet: UpDataStructsOwnerRestrictedSet; UpDataStructsProperties: UpDataStructsProperties; @@ -1231,8 +1324,10 @@ declare module '@polkadot/types/types/registry' { UpDataStructsPropertyPermission: UpDataStructsPropertyPermission; UpDataStructsPropertyScope: UpDataStructsPropertyScope; UpDataStructsRpcCollection: UpDataStructsRpcCollection; + UpDataStructsRpcCollectionFlags: UpDataStructsRpcCollectionFlags; UpDataStructsSponsoringRateLimit: UpDataStructsSponsoringRateLimit; - UpDataStructsSponsorshipState: UpDataStructsSponsorshipState; + UpDataStructsSponsorshipStateAccountId32: UpDataStructsSponsorshipStateAccountId32; + UpDataStructsSponsorshipStateBasicCrossAccountIdRepr: UpDataStructsSponsorshipStateBasicCrossAccountIdRepr; UpDataStructsTokenChild: UpDataStructsTokenChild; UpDataStructsTokenData: UpDataStructsTokenData; UpgradeGoAhead: UpgradeGoAhead; @@ -1255,10 +1350,12 @@ declare module '@polkadot/types/types/registry' { ValidatorPrefsTo196: ValidatorPrefsTo196; ValidatorPrefsWithBlocked: ValidatorPrefsWithBlocked; ValidatorPrefsWithCommission: ValidatorPrefsWithCommission; + ValidatorSet: ValidatorSet; ValidatorSetId: ValidatorSetId; ValidatorSignature: ValidatorSignature; ValidDisputeStatementKind: ValidDisputeStatementKind; ValidityAttestation: ValidityAttestation; + ValidTransaction: ValidTransaction; VecInboundHrmpMessage: VecInboundHrmpMessage; VersionedMultiAsset: VersionedMultiAsset; VersionedMultiAssets: VersionedMultiAssets; @@ -1289,6 +1386,8 @@ declare module '@polkadot/types/types/registry' { WeightMultiplier: WeightMultiplier; WeightPerClass: WeightPerClass; WeightToFeeCoefficient: WeightToFeeCoefficient; + WeightV1: WeightV1; + WeightV2: WeightV2; WildFungibility: WildFungibility; WildFungibilityV0: WildFungibilityV0; WildFungibilityV1: WildFungibilityV1; @@ -1352,6 +1451,7 @@ declare module '@polkadot/types/types/registry' { XcmV2WeightLimit: XcmV2WeightLimit; XcmV2Xcm: XcmV2Xcm; XcmVersion: XcmVersion; + XcmVersionedMultiAsset: XcmVersionedMultiAsset; XcmVersionedMultiAssets: XcmVersionedMultiAssets; XcmVersionedMultiLocation: XcmVersionedMultiLocation; XcmVersionedXcm: XcmVersionedXcm; diff --git a/tests/src/interfaces/default/types.ts b/tests/src/interfaces/default/types.ts index 569c605e01..7e6ce66ff5 100644 --- a/tests/src/interfaces/default/types.ts +++ b/tests/src/interfaces/default/types.ts @@ -3,7 +3,7 @@ import type { BTreeMap, BTreeSet, Bytes, Compact, Enum, Null, Option, Result, Struct, Text, U256, U8aFixed, Vec, bool, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; import type { ITuple } from '@polkadot/types-codec/types'; -import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill } from '@polkadot/types/interfaces/runtime'; +import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill, Weight } from '@polkadot/types/interfaces/runtime'; import type { Event } from '@polkadot/types/interfaces/system'; /** @name CumulusPalletDmpQueueCall */ @@ -11,14 +11,14 @@ export interface CumulusPalletDmpQueueCall extends Enum { readonly isServiceOverweight: boolean; readonly asServiceOverweight: { readonly index: u64; - readonly weightLimit: u64; + readonly weightLimit: Weight; } & Struct; readonly type: 'ServiceOverweight'; } /** @name CumulusPalletDmpQueueConfigData */ export interface CumulusPalletDmpQueueConfigData extends Struct { - readonly maxIndividual: u64; + readonly maxIndividual: Weight; } /** @name CumulusPalletDmpQueueError */ @@ -46,19 +46,19 @@ export interface CumulusPalletDmpQueueEvent extends Enum { readonly isWeightExhausted: boolean; readonly asWeightExhausted: { readonly messageId: U8aFixed; - readonly remainingWeight: u64; - readonly requiredWeight: u64; + readonly remainingWeight: Weight; + readonly requiredWeight: Weight; } & Struct; readonly isOverweightEnqueued: boolean; readonly asOverweightEnqueued: { readonly messageId: U8aFixed; readonly overweightIndex: u64; - readonly requiredWeight: u64; + readonly requiredWeight: Weight; } & Struct; readonly isOverweightServiced: boolean; readonly asOverweightServiced: { readonly overweightIndex: u64; - readonly weightUsed: u64; + readonly weightUsed: Weight; } & Struct; readonly type: 'InvalidFormat' | 'UnsupportedVersion' | 'ExecutedDownward' | 'WeightExhausted' | 'OverweightEnqueued' | 'OverweightServiced'; } @@ -122,7 +122,7 @@ export interface CumulusPalletParachainSystemEvent extends Enum { } & Struct; readonly isDownwardMessagesProcessed: boolean; readonly asDownwardMessagesProcessed: { - readonly weightUsed: u64; + readonly weightUsed: Weight; readonly dmqHead: H256; } & Struct; readonly type: 'ValidationFunctionStored' | 'ValidationFunctionApplied' | 'ValidationFunctionDiscarded' | 'UpgradeAuthorized' | 'DownwardMessagesReceived' | 'DownwardMessagesProcessed'; @@ -166,7 +166,7 @@ export interface CumulusPalletXcmpQueueCall extends Enum { readonly isServiceOverweight: boolean; readonly asServiceOverweight: { readonly index: u64; - readonly weightLimit: u64; + readonly weightLimit: Weight; } & Struct; readonly isSuspendXcmExecution: boolean; readonly isResumeXcmExecution: boolean; @@ -184,15 +184,15 @@ export interface CumulusPalletXcmpQueueCall extends Enum { } & Struct; readonly isUpdateThresholdWeight: boolean; readonly asUpdateThresholdWeight: { - readonly new_: u64; + readonly new_: Weight; } & Struct; readonly isUpdateWeightRestrictDecay: boolean; readonly asUpdateWeightRestrictDecay: { - readonly new_: u64; + readonly new_: Weight; } & Struct; readonly isUpdateXcmpMaxIndividualWeight: boolean; readonly asUpdateXcmpMaxIndividualWeight: { - readonly new_: u64; + readonly new_: Weight; } & Struct; readonly type: 'ServiceOverweight' | 'SuspendXcmExecution' | 'ResumeXcmExecution' | 'UpdateSuspendThreshold' | 'UpdateDropThreshold' | 'UpdateResumeThreshold' | 'UpdateThresholdWeight' | 'UpdateWeightRestrictDecay' | 'UpdateXcmpMaxIndividualWeight'; } @@ -210,21 +210,44 @@ export interface CumulusPalletXcmpQueueError extends Enum { /** @name CumulusPalletXcmpQueueEvent */ export interface CumulusPalletXcmpQueueEvent extends Enum { readonly isSuccess: boolean; - readonly asSuccess: Option; + readonly asSuccess: { + readonly messageHash: Option; + readonly weight: Weight; + } & Struct; readonly isFail: boolean; - readonly asFail: ITuple<[Option, XcmV2TraitsError]>; + readonly asFail: { + readonly messageHash: Option; + readonly error: XcmV2TraitsError; + readonly weight: Weight; + } & Struct; readonly isBadVersion: boolean; - readonly asBadVersion: Option; + readonly asBadVersion: { + readonly messageHash: Option; + } & Struct; readonly isBadFormat: boolean; - readonly asBadFormat: Option; + readonly asBadFormat: { + readonly messageHash: Option; + } & Struct; readonly isUpwardMessageSent: boolean; - readonly asUpwardMessageSent: Option; + readonly asUpwardMessageSent: { + readonly messageHash: Option; + } & Struct; readonly isXcmpMessageSent: boolean; - readonly asXcmpMessageSent: Option; + readonly asXcmpMessageSent: { + readonly messageHash: Option; + } & Struct; readonly isOverweightEnqueued: boolean; - readonly asOverweightEnqueued: ITuple<[u32, u32, u64, u64]>; + readonly asOverweightEnqueued: { + readonly sender: u32; + readonly sentAt: u32; + readonly index: u64; + readonly required: Weight; + } & Struct; readonly isOverweightServiced: boolean; - readonly asOverweightServiced: ITuple<[u64, u64]>; + readonly asOverweightServiced: { + readonly index: u64; + readonly used: Weight; + } & Struct; readonly type: 'Success' | 'Fail' | 'BadVersion' | 'BadFormat' | 'UpwardMessageSent' | 'XcmpMessageSent' | 'OverweightEnqueued' | 'OverweightServiced'; } @@ -263,9 +286,9 @@ export interface CumulusPalletXcmpQueueQueueConfigData extends Struct { readonly suspendThreshold: u32; readonly dropThreshold: u32; readonly resumeThreshold: u32; - readonly thresholdWeight: u64; - readonly weightRestrictDecay: u64; - readonly xcmpMaxIndividualWeight: u64; + readonly thresholdWeight: Weight; + readonly weightRestrictDecay: Weight; + readonly xcmpMaxIndividualWeight: Weight; } /** @name CumulusPrimitivesParachainInherentParachainInherentData */ @@ -478,6 +501,49 @@ export interface FpRpcTransactionStatus extends Struct { readonly logsBloom: EthbloomBloom; } +/** @name FrameSupportDispatchDispatchClass */ +export interface FrameSupportDispatchDispatchClass extends Enum { + readonly isNormal: boolean; + readonly isOperational: boolean; + readonly isMandatory: boolean; + readonly type: 'Normal' | 'Operational' | 'Mandatory'; +} + +/** @name FrameSupportDispatchDispatchInfo */ +export interface FrameSupportDispatchDispatchInfo extends Struct { + readonly weight: Weight; + readonly class: FrameSupportDispatchDispatchClass; + readonly paysFee: FrameSupportDispatchPays; +} + +/** @name FrameSupportDispatchPays */ +export interface FrameSupportDispatchPays extends Enum { + readonly isYes: boolean; + readonly isNo: boolean; + readonly type: 'Yes' | 'No'; +} + +/** @name FrameSupportDispatchPerDispatchClassU32 */ +export interface FrameSupportDispatchPerDispatchClassU32 extends Struct { + readonly normal: u32; + readonly operational: u32; + readonly mandatory: u32; +} + +/** @name FrameSupportDispatchPerDispatchClassWeight */ +export interface FrameSupportDispatchPerDispatchClassWeight extends Struct { + readonly normal: Weight; + readonly operational: Weight; + readonly mandatory: Weight; +} + +/** @name FrameSupportDispatchPerDispatchClassWeightsPerClass */ +export interface FrameSupportDispatchPerDispatchClassWeightsPerClass extends Struct { + readonly normal: FrameSystemLimitsWeightsPerClass; + readonly operational: FrameSystemLimitsWeightsPerClass; + readonly mandatory: FrameSystemLimitsWeightsPerClass; +} + /** @name FrameSupportDispatchRawOrigin */ export interface FrameSupportDispatchRawOrigin extends Enum { readonly isRoot: boolean; @@ -513,55 +579,6 @@ export interface FrameSupportTokensMiscBalanceStatus extends Enum { readonly type: 'Free' | 'Reserved'; } -/** @name FrameSupportWeightsDispatchClass */ -export interface FrameSupportWeightsDispatchClass extends Enum { - readonly isNormal: boolean; - readonly isOperational: boolean; - readonly isMandatory: boolean; - readonly type: 'Normal' | 'Operational' | 'Mandatory'; -} - -/** @name FrameSupportWeightsDispatchInfo */ -export interface FrameSupportWeightsDispatchInfo extends Struct { - readonly weight: u64; - readonly class: FrameSupportWeightsDispatchClass; - readonly paysFee: FrameSupportWeightsPays; -} - -/** @name FrameSupportWeightsPays */ -export interface FrameSupportWeightsPays extends Enum { - readonly isYes: boolean; - readonly isNo: boolean; - readonly type: 'Yes' | 'No'; -} - -/** @name FrameSupportWeightsPerDispatchClassU32 */ -export interface FrameSupportWeightsPerDispatchClassU32 extends Struct { - readonly normal: u32; - readonly operational: u32; - readonly mandatory: u32; -} - -/** @name FrameSupportWeightsPerDispatchClassU64 */ -export interface FrameSupportWeightsPerDispatchClassU64 extends Struct { - readonly normal: u64; - readonly operational: u64; - readonly mandatory: u64; -} - -/** @name FrameSupportWeightsPerDispatchClassWeightsPerClass */ -export interface FrameSupportWeightsPerDispatchClassWeightsPerClass extends Struct { - readonly normal: FrameSystemLimitsWeightsPerClass; - readonly operational: FrameSystemLimitsWeightsPerClass; - readonly mandatory: FrameSystemLimitsWeightsPerClass; -} - -/** @name FrameSupportWeightsRuntimeDbWeight */ -export interface FrameSupportWeightsRuntimeDbWeight extends Struct { - readonly read: u64; - readonly write: u64; -} - /** @name FrameSystemAccountInfo */ export interface FrameSystemAccountInfo extends Struct { readonly nonce: u32; @@ -628,12 +645,12 @@ export interface FrameSystemError extends Enum { export interface FrameSystemEvent extends Enum { readonly isExtrinsicSuccess: boolean; readonly asExtrinsicSuccess: { - readonly dispatchInfo: FrameSupportWeightsDispatchInfo; + readonly dispatchInfo: FrameSupportDispatchDispatchInfo; } & Struct; readonly isExtrinsicFailed: boolean; readonly asExtrinsicFailed: { readonly dispatchError: SpRuntimeDispatchError; - readonly dispatchInfo: FrameSupportWeightsDispatchInfo; + readonly dispatchInfo: FrameSupportDispatchDispatchInfo; } & Struct; readonly isCodeUpdated: boolean; readonly isNewAccount: boolean; @@ -668,6 +685,9 @@ export interface FrameSystemExtensionsCheckNonce extends Compact {} /** @name FrameSystemExtensionsCheckSpecVersion */ export interface FrameSystemExtensionsCheckSpecVersion extends Null {} +/** @name FrameSystemExtensionsCheckTxVersion */ +export interface FrameSystemExtensionsCheckTxVersion extends Null {} + /** @name FrameSystemExtensionsCheckWeight */ export interface FrameSystemExtensionsCheckWeight extends Null {} @@ -679,22 +699,22 @@ export interface FrameSystemLastRuntimeUpgradeInfo extends Struct { /** @name FrameSystemLimitsBlockLength */ export interface FrameSystemLimitsBlockLength extends Struct { - readonly max: FrameSupportWeightsPerDispatchClassU32; + readonly max: FrameSupportDispatchPerDispatchClassU32; } /** @name FrameSystemLimitsBlockWeights */ export interface FrameSystemLimitsBlockWeights extends Struct { - readonly baseBlock: u64; - readonly maxBlock: u64; - readonly perClass: FrameSupportWeightsPerDispatchClassWeightsPerClass; + readonly baseBlock: Weight; + readonly maxBlock: Weight; + readonly perClass: FrameSupportDispatchPerDispatchClassWeightsPerClass; } /** @name FrameSystemLimitsWeightsPerClass */ export interface FrameSystemLimitsWeightsPerClass extends Struct { - readonly baseExtrinsic: u64; - readonly maxExtrinsic: Option; - readonly maxTotal: Option; - readonly reserved: Option; + readonly baseExtrinsic: Weight; + readonly maxExtrinsic: Option; + readonly maxTotal: Option; + readonly reserved: Option; } /** @name FrameSystemPhase */ @@ -706,27 +726,184 @@ export interface FrameSystemPhase extends Enum { readonly type: 'ApplyExtrinsic' | 'Finalization' | 'Initialization'; } -/** @name OpalRuntimeCheckMaintenance */ -export interface OpalRuntimeCheckMaintenance extends Null {} - /** @name OpalRuntimeOriginCaller */ export interface OpalRuntimeOriginCaller extends Enum { - readonly isVoid: boolean; - readonly asVoid: SpCoreVoid; readonly isSystem: boolean; readonly asSystem: FrameSupportDispatchRawOrigin; + readonly isVoid: boolean; + readonly asVoid: SpCoreVoid; readonly isPolkadotXcm: boolean; readonly asPolkadotXcm: PalletXcmOrigin; readonly isCumulusXcm: boolean; readonly asCumulusXcm: CumulusPalletXcmOrigin; readonly isEthereum: boolean; readonly asEthereum: PalletEthereumRawOrigin; - readonly type: 'Void' | 'System' | 'PolkadotXcm' | 'CumulusXcm' | 'Ethereum'; + readonly type: 'System' | 'Void' | 'PolkadotXcm' | 'CumulusXcm' | 'Ethereum'; } /** @name OpalRuntimeRuntime */ export interface OpalRuntimeRuntime extends Null {} +/** @name OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance */ +export interface OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance extends Null {} + +/** @name OrmlTokensAccountData */ +export interface OrmlTokensAccountData extends Struct { + readonly free: u128; + readonly reserved: u128; + readonly frozen: u128; +} + +/** @name OrmlTokensBalanceLock */ +export interface OrmlTokensBalanceLock extends Struct { + readonly id: U8aFixed; + readonly amount: u128; +} + +/** @name OrmlTokensModuleCall */ +export interface OrmlTokensModuleCall extends Enum { + readonly isTransfer: boolean; + readonly asTransfer: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; + } & Struct; + readonly isTransferAll: boolean; + readonly asTransferAll: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly keepAlive: bool; + } & Struct; + readonly isTransferKeepAlive: boolean; + readonly asTransferKeepAlive: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; + } & Struct; + readonly isForceTransfer: boolean; + readonly asForceTransfer: { + readonly source: MultiAddress; + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; + } & Struct; + readonly isSetBalance: boolean; + readonly asSetBalance: { + readonly who: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly newFree: Compact; + readonly newReserved: Compact; + } & Struct; + readonly type: 'Transfer' | 'TransferAll' | 'TransferKeepAlive' | 'ForceTransfer' | 'SetBalance'; +} + +/** @name OrmlTokensModuleError */ +export interface OrmlTokensModuleError extends Enum { + readonly isBalanceTooLow: boolean; + readonly isAmountIntoBalanceFailed: boolean; + readonly isLiquidityRestrictions: boolean; + readonly isMaxLocksExceeded: boolean; + readonly isKeepAlive: boolean; + readonly isExistentialDeposit: boolean; + readonly isDeadAccount: boolean; + readonly isTooManyReserves: boolean; + readonly type: 'BalanceTooLow' | 'AmountIntoBalanceFailed' | 'LiquidityRestrictions' | 'MaxLocksExceeded' | 'KeepAlive' | 'ExistentialDeposit' | 'DeadAccount' | 'TooManyReserves'; +} + +/** @name OrmlTokensModuleEvent */ +export interface OrmlTokensModuleEvent extends Enum { + readonly isEndowed: boolean; + readonly asEndowed: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isDustLost: boolean; + readonly asDustLost: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isTransfer: boolean; + readonly asTransfer: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly from: AccountId32; + readonly to: AccountId32; + readonly amount: u128; + } & Struct; + readonly isReserved: boolean; + readonly asReserved: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isUnreserved: boolean; + readonly asUnreserved: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isReserveRepatriated: boolean; + readonly asReserveRepatriated: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly from: AccountId32; + readonly to: AccountId32; + readonly amount: u128; + readonly status: FrameSupportTokensMiscBalanceStatus; + } & Struct; + readonly isBalanceSet: boolean; + readonly asBalanceSet: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly free: u128; + readonly reserved: u128; + } & Struct; + readonly isTotalIssuanceSet: boolean; + readonly asTotalIssuanceSet: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + } & Struct; + readonly isWithdrawn: boolean; + readonly asWithdrawn: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isSlashed: boolean; + readonly asSlashed: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly freeAmount: u128; + readonly reservedAmount: u128; + } & Struct; + readonly isDeposited: boolean; + readonly asDeposited: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isLockSet: boolean; + readonly asLockSet: { + readonly lockId: U8aFixed; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isLockRemoved: boolean; + readonly asLockRemoved: { + readonly lockId: U8aFixed; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + } & Struct; + readonly type: 'Endowed' | 'DustLost' | 'Transfer' | 'Reserved' | 'Unreserved' | 'ReserveRepatriated' | 'BalanceSet' | 'TotalIssuanceSet' | 'Withdrawn' | 'Slashed' | 'Deposited' | 'LockSet' | 'LockRemoved'; +} + +/** @name OrmlTokensReserveData */ +export interface OrmlTokensReserveData extends Struct { + readonly id: Null; + readonly amount: u128; +} + /** @name OrmlVestingModuleCall */ export interface OrmlVestingModuleCall extends Enum { readonly isClaim: boolean; @@ -786,6 +963,147 @@ export interface OrmlVestingVestingSchedule extends Struct { readonly perPeriod: Compact; } +/** @name OrmlXtokensModuleCall */ +export interface OrmlXtokensModuleCall extends Enum { + readonly isTransfer: boolean; + readonly asTransfer: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiasset: boolean; + readonly asTransferMultiasset: { + readonly asset: XcmVersionedMultiAsset; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferWithFee: boolean; + readonly asTransferWithFee: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + readonly fee: u128; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiassetWithFee: boolean; + readonly asTransferMultiassetWithFee: { + readonly asset: XcmVersionedMultiAsset; + readonly fee: XcmVersionedMultiAsset; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMulticurrencies: boolean; + readonly asTransferMulticurrencies: { + readonly currencies: Vec>; + readonly feeItem: u32; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiassets: boolean; + readonly asTransferMultiassets: { + readonly assets: XcmVersionedMultiAssets; + readonly feeItem: u32; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly type: 'Transfer' | 'TransferMultiasset' | 'TransferWithFee' | 'TransferMultiassetWithFee' | 'TransferMulticurrencies' | 'TransferMultiassets'; +} + +/** @name OrmlXtokensModuleError */ +export interface OrmlXtokensModuleError extends Enum { + readonly isAssetHasNoReserve: boolean; + readonly isNotCrossChainTransfer: boolean; + readonly isInvalidDest: boolean; + readonly isNotCrossChainTransferableCurrency: boolean; + readonly isUnweighableMessage: boolean; + readonly isXcmExecutionFailed: boolean; + readonly isCannotReanchor: boolean; + readonly isInvalidAncestry: boolean; + readonly isInvalidAsset: boolean; + readonly isDestinationNotInvertible: boolean; + readonly isBadVersion: boolean; + readonly isDistinctReserveForAssetAndFee: boolean; + readonly isZeroFee: boolean; + readonly isZeroAmount: boolean; + readonly isTooManyAssetsBeingSent: boolean; + readonly isAssetIndexNonExistent: boolean; + readonly isFeeNotEnough: boolean; + readonly isNotSupportedMultiLocation: boolean; + readonly isMinXcmFeeNotDefined: boolean; + readonly type: 'AssetHasNoReserve' | 'NotCrossChainTransfer' | 'InvalidDest' | 'NotCrossChainTransferableCurrency' | 'UnweighableMessage' | 'XcmExecutionFailed' | 'CannotReanchor' | 'InvalidAncestry' | 'InvalidAsset' | 'DestinationNotInvertible' | 'BadVersion' | 'DistinctReserveForAssetAndFee' | 'ZeroFee' | 'ZeroAmount' | 'TooManyAssetsBeingSent' | 'AssetIndexNonExistent' | 'FeeNotEnough' | 'NotSupportedMultiLocation' | 'MinXcmFeeNotDefined'; +} + +/** @name OrmlXtokensModuleEvent */ +export interface OrmlXtokensModuleEvent extends Enum { + readonly isTransferredMultiAssets: boolean; + readonly asTransferredMultiAssets: { + readonly sender: AccountId32; + readonly assets: XcmV1MultiassetMultiAssets; + readonly fee: XcmV1MultiAsset; + readonly dest: XcmV1MultiLocation; + } & Struct; + readonly type: 'TransferredMultiAssets'; +} + +/** @name PalletAppPromotionCall */ +export interface PalletAppPromotionCall extends Enum { + readonly isSetAdminAddress: boolean; + readonly asSetAdminAddress: { + readonly admin: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isStake: boolean; + readonly asStake: { + readonly amount: u128; + } & Struct; + readonly isUnstake: boolean; + readonly isSponsorCollection: boolean; + readonly asSponsorCollection: { + readonly collectionId: u32; + } & Struct; + readonly isStopSponsoringCollection: boolean; + readonly asStopSponsoringCollection: { + readonly collectionId: u32; + } & Struct; + readonly isSponsorContract: boolean; + readonly asSponsorContract: { + readonly contractId: H160; + } & Struct; + readonly isStopSponsoringContract: boolean; + readonly asStopSponsoringContract: { + readonly contractId: H160; + } & Struct; + readonly isPayoutStakers: boolean; + readonly asPayoutStakers: { + readonly stakersNumber: Option; + } & Struct; + readonly type: 'SetAdminAddress' | 'Stake' | 'Unstake' | 'SponsorCollection' | 'StopSponsoringCollection' | 'SponsorContract' | 'StopSponsoringContract' | 'PayoutStakers'; +} + +/** @name PalletAppPromotionError */ +export interface PalletAppPromotionError extends Enum { + readonly isAdminNotSet: boolean; + readonly isNoPermission: boolean; + readonly isNotSufficientFunds: boolean; + readonly isPendingForBlockOverflow: boolean; + readonly isSponsorNotSet: boolean; + readonly isIncorrectLockedBalanceOperation: boolean; + readonly type: 'AdminNotSet' | 'NoPermission' | 'NotSufficientFunds' | 'PendingForBlockOverflow' | 'SponsorNotSet' | 'IncorrectLockedBalanceOperation'; +} + +/** @name PalletAppPromotionEvent */ +export interface PalletAppPromotionEvent extends Enum { + readonly isStakingRecalculation: boolean; + readonly asStakingRecalculation: ITuple<[AccountId32, u128, u128]>; + readonly isStake: boolean; + readonly asStake: ITuple<[AccountId32, u128]>; + readonly isUnstake: boolean; + readonly asUnstake: ITuple<[AccountId32, u128]>; + readonly isSetAdmin: boolean; + readonly asSetAdmin: AccountId32; + readonly type: 'StakingRecalculation' | 'Stake' | 'Unstake' | 'SetAdmin'; +} + /** @name PalletBalancesAccountData */ export interface PalletBalancesAccountData extends Struct { readonly free: u128; @@ -997,6 +1315,19 @@ export interface PalletCommonEvent extends Enum { readonly type: 'CollectionCreated' | 'CollectionDestroyed' | 'ItemCreated' | 'ItemDestroyed' | 'Transfer' | 'Approved' | 'CollectionPropertySet' | 'CollectionPropertyDeleted' | 'TokenPropertySet' | 'TokenPropertyDeleted' | 'PropertyPermissionSet'; } +/** @name PalletConfigurationCall */ +export interface PalletConfigurationCall extends Enum { + readonly isSetWeightToFeeCoefficientOverride: boolean; + readonly asSetWeightToFeeCoefficientOverride: { + readonly coeff: Option; + } & Struct; + readonly isSetMinGasPriceOverride: boolean; + readonly asSetMinGasPriceOverride: { + readonly coeff: Option; + } & Struct; + readonly type: 'SetWeightToFeeCoefficientOverride' | 'SetMinGasPriceOverride'; +} + /** @name PalletEthereumCall */ export interface PalletEthereumCall extends Enum { readonly isTransact: boolean; @@ -1094,7 +1425,20 @@ export interface PalletEvmCoderSubstrateError extends Enum { /** @name PalletEvmContractHelpersError */ export interface PalletEvmContractHelpersError extends Enum { readonly isNoPermission: boolean; - readonly type: 'NoPermission'; + readonly isNoPendingSponsor: boolean; + readonly isTooManyMethodsHaveSponsoredLimit: boolean; + readonly type: 'NoPermission' | 'NoPendingSponsor' | 'TooManyMethodsHaveSponsoredLimit'; +} + +/** @name PalletEvmContractHelpersEvent */ +export interface PalletEvmContractHelpersEvent extends Enum { + readonly isContractSponsorSet: boolean; + readonly asContractSponsorSet: ITuple<[H160, AccountId32]>; + readonly isContractSponsorshipConfirmed: boolean; + readonly asContractSponsorshipConfirmed: ITuple<[H160, AccountId32]>; + readonly isContractSponsorRemoved: boolean; + readonly asContractSponsorRemoved: H160; + readonly type: 'ContractSponsorSet' | 'ContractSponsorshipConfirmed' | 'ContractSponsorRemoved'; } /** @name PalletEvmContractHelpersSponsoringModeT */ @@ -1161,6 +1505,83 @@ export interface PalletEvmMigrationError extends Enum { readonly type: 'AccountNotEmpty' | 'AccountIsNotMigrating'; } +/** @name PalletForeignAssetsAssetIds */ +export interface PalletForeignAssetsAssetIds extends Enum { + readonly isForeignAssetId: boolean; + readonly asForeignAssetId: u32; + readonly isNativeAssetId: boolean; + readonly asNativeAssetId: PalletForeignAssetsNativeCurrency; + readonly type: 'ForeignAssetId' | 'NativeAssetId'; +} + +/** @name PalletForeignAssetsModuleAssetMetadata */ +export interface PalletForeignAssetsModuleAssetMetadata extends Struct { + readonly name: Bytes; + readonly symbol: Bytes; + readonly decimals: u8; + readonly minimalBalance: u128; +} + +/** @name PalletForeignAssetsModuleCall */ +export interface PalletForeignAssetsModuleCall extends Enum { + readonly isRegisterForeignAsset: boolean; + readonly asRegisterForeignAsset: { + readonly owner: AccountId32; + readonly location: XcmVersionedMultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isUpdateForeignAsset: boolean; + readonly asUpdateForeignAsset: { + readonly foreignAssetId: u32; + readonly location: XcmVersionedMultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly type: 'RegisterForeignAsset' | 'UpdateForeignAsset'; +} + +/** @name PalletForeignAssetsModuleError */ +export interface PalletForeignAssetsModuleError extends Enum { + readonly isBadLocation: boolean; + readonly isMultiLocationExisted: boolean; + readonly isAssetIdNotExists: boolean; + readonly isAssetIdExisted: boolean; + readonly type: 'BadLocation' | 'MultiLocationExisted' | 'AssetIdNotExists' | 'AssetIdExisted'; +} + +/** @name PalletForeignAssetsModuleEvent */ +export interface PalletForeignAssetsModuleEvent extends Enum { + readonly isForeignAssetRegistered: boolean; + readonly asForeignAssetRegistered: { + readonly assetId: u32; + readonly assetAddress: XcmV1MultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isForeignAssetUpdated: boolean; + readonly asForeignAssetUpdated: { + readonly assetId: u32; + readonly assetAddress: XcmV1MultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isAssetRegistered: boolean; + readonly asAssetRegistered: { + readonly assetId: PalletForeignAssetsAssetIds; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isAssetUpdated: boolean; + readonly asAssetUpdated: { + readonly assetId: PalletForeignAssetsAssetIds; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly type: 'ForeignAssetRegistered' | 'ForeignAssetUpdated' | 'AssetRegistered' | 'AssetUpdated'; +} + +/** @name PalletForeignAssetsNativeCurrency */ +export interface PalletForeignAssetsNativeCurrency extends Enum { + readonly isHere: boolean; + readonly isParent: boolean; + readonly type: 'Here' | 'Parent'; +} + /** @name PalletFungibleError */ export interface PalletFungibleError extends Enum { readonly isNotFungibleDataUsedToMintFungibleCollectionToken: boolean; @@ -1334,7 +1755,6 @@ export interface PalletRmrkCoreCall extends Enum { /** @name PalletRmrkCoreError */ export interface PalletRmrkCoreError extends Enum { readonly isCorruptedCollectionType: boolean; - readonly isNftTypeEncodeError: boolean; readonly isRmrkPropertyKeyIsTooLong: boolean; readonly isRmrkPropertyValueIsTooLong: boolean; readonly isRmrkPropertyIsNotFound: boolean; @@ -1353,7 +1773,7 @@ export interface PalletRmrkCoreError extends Enum { readonly isCannotRejectNonPendingNft: boolean; readonly isResourceNotPending: boolean; readonly isNoAvailableResourceId: boolean; - readonly type: 'CorruptedCollectionType' | 'NftTypeEncodeError' | 'RmrkPropertyKeyIsTooLong' | 'RmrkPropertyValueIsTooLong' | 'RmrkPropertyIsNotFound' | 'UnableToDecodeRmrkData' | 'CollectionNotEmpty' | 'NoAvailableCollectionId' | 'NoAvailableNftId' | 'CollectionUnknown' | 'NoPermission' | 'NonTransferable' | 'CollectionFullOrLocked' | 'ResourceDoesntExist' | 'CannotSendToDescendentOrSelf' | 'CannotAcceptNonOwnedNft' | 'CannotRejectNonOwnedNft' | 'CannotRejectNonPendingNft' | 'ResourceNotPending' | 'NoAvailableResourceId'; + readonly type: 'CorruptedCollectionType' | 'RmrkPropertyKeyIsTooLong' | 'RmrkPropertyValueIsTooLong' | 'RmrkPropertyIsNotFound' | 'UnableToDecodeRmrkData' | 'CollectionNotEmpty' | 'NoAvailableCollectionId' | 'NoAvailableNftId' | 'CollectionUnknown' | 'NoPermission' | 'NonTransferable' | 'CollectionFullOrLocked' | 'ResourceDoesntExist' | 'CannotSendToDescendentOrSelf' | 'CannotAcceptNonOwnedNft' | 'CannotRejectNonOwnedNft' | 'CannotRejectNonPendingNft' | 'ResourceNotPending' | 'NoAvailableResourceId'; } /** @name PalletRmrkCoreEvent */ @@ -1523,7 +1943,7 @@ export interface PalletSudoCall extends Enum { readonly isSudoUncheckedWeight: boolean; readonly asSudoUncheckedWeight: { readonly call: Call; - readonly weight: u64; + readonly weight: Weight; } & Struct; readonly isSetKey: boolean; readonly asSetKey: { @@ -1566,6 +1986,41 @@ export interface PalletTemplateTransactionPaymentCall extends Null {} /** @name PalletTemplateTransactionPaymentChargeTransactionPayment */ export interface PalletTemplateTransactionPaymentChargeTransactionPayment extends Compact {} +/** @name PalletTestUtilsCall */ +export interface PalletTestUtilsCall extends Enum { + readonly isEnable: boolean; + readonly isSetTestValue: boolean; + readonly asSetTestValue: { + readonly value: u32; + } & Struct; + readonly isSetTestValueAndRollback: boolean; + readonly asSetTestValueAndRollback: { + readonly value: u32; + } & Struct; + readonly isIncTestValue: boolean; + readonly isSelfCancelingInc: boolean; + readonly asSelfCancelingInc: { + readonly id: U8aFixed; + readonly maxTestValue: u32; + } & Struct; + readonly isJustTakeFee: boolean; + readonly type: 'Enable' | 'SetTestValue' | 'SetTestValueAndRollback' | 'IncTestValue' | 'SelfCancelingInc' | 'JustTakeFee'; +} + +/** @name PalletTestUtilsError */ +export interface PalletTestUtilsError extends Enum { + readonly isTestPalletDisabled: boolean; + readonly isTriggerRollback: boolean; + readonly type: 'TestPalletDisabled' | 'TriggerRollback'; +} + +/** @name PalletTestUtilsEvent */ +export interface PalletTestUtilsEvent extends Enum { + readonly isValueIsSet: boolean; + readonly isShouldRollback: boolean; + readonly type: 'ValueIsSet' | 'ShouldRollback'; +} + /** @name PalletTimestampCall */ export interface PalletTimestampCall extends Enum { readonly isSet: boolean; @@ -1575,6 +2030,17 @@ export interface PalletTimestampCall extends Enum { readonly type: 'Set'; } +/** @name PalletTransactionPaymentEvent */ +export interface PalletTransactionPaymentEvent extends Enum { + readonly isTransactionFeePaid: boolean; + readonly asTransactionFeePaid: { + readonly who: AccountId32; + readonly actualFee: u128; + readonly tip: u128; + } & Struct; + readonly type: 'TransactionFeePaid'; +} + /** @name PalletTransactionPaymentReleases */ export interface PalletTransactionPaymentReleases extends Enum { readonly isV1Ancient: boolean; @@ -1597,11 +2063,16 @@ export interface PalletTreasuryCall extends Enum { readonly asApproveProposal: { readonly proposalId: Compact; } & Struct; + readonly isSpend: boolean; + readonly asSpend: { + readonly amount: Compact; + readonly beneficiary: MultiAddress; + } & Struct; readonly isRemoveApproval: boolean; readonly asRemoveApproval: { readonly proposalId: Compact; } & Struct; - readonly type: 'ProposeSpend' | 'RejectProposal' | 'ApproveProposal' | 'RemoveApproval'; + readonly type: 'ProposeSpend' | 'RejectProposal' | 'ApproveProposal' | 'Spend' | 'RemoveApproval'; } /** @name PalletTreasuryError */ @@ -1609,8 +2080,9 @@ export interface PalletTreasuryError extends Enum { readonly isInsufficientProposersBalance: boolean; readonly isInvalidIndex: boolean; readonly isTooManyApprovals: boolean; + readonly isInsufficientPermission: boolean; readonly isProposalNotApproved: boolean; - readonly type: 'InsufficientProposersBalance' | 'InvalidIndex' | 'TooManyApprovals' | 'ProposalNotApproved'; + readonly type: 'InsufficientProposersBalance' | 'InvalidIndex' | 'TooManyApprovals' | 'InsufficientPermission' | 'ProposalNotApproved'; } /** @name PalletTreasuryEvent */ @@ -1646,7 +2118,13 @@ export interface PalletTreasuryEvent extends Enum { readonly asDeposit: { readonly value: u128; } & Struct; - readonly type: 'Proposed' | 'Spending' | 'Awarded' | 'Rejected' | 'Burnt' | 'Rollover' | 'Deposit'; + readonly isSpendApproved: boolean; + readonly asSpendApproved: { + readonly proposalIndex: u32; + readonly amount: u128; + readonly beneficiary: AccountId32; + } & Struct; + readonly type: 'Proposed' | 'Spending' | 'Awarded' | 'Rejected' | 'Burnt' | 'Rollover' | 'Deposit' | 'SpendApproved'; } /** @name PalletTreasuryProposal */ @@ -1804,12 +2282,12 @@ export interface PalletUniqueCall extends Enum { readonly isSetCollectionPermissions: boolean; readonly asSetCollectionPermissions: { readonly collectionId: u32; - readonly newLimit: UpDataStructsCollectionPermissions; + readonly newPermission: UpDataStructsCollectionPermissions; } & Struct; readonly isRepartition: boolean; readonly asRepartition: { readonly collectionId: u32; - readonly token: u32; + readonly tokenId: u32; readonly amount: u128; } & Struct; readonly type: 'CreateCollection' | 'CreateCollectionEx' | 'DestroyCollection' | 'AddToAllowList' | 'RemoveFromAllowList' | 'ChangeCollectionOwner' | 'AddCollectionAdmin' | 'RemoveCollectionAdmin' | 'SetCollectionSponsor' | 'ConfirmSponsorship' | 'RemoveCollectionSponsor' | 'CreateItem' | 'CreateMultipleItems' | 'SetCollectionProperties' | 'DeleteCollectionProperties' | 'SetTokenProperties' | 'DeleteTokenProperties' | 'SetTokenPropertyPermissions' | 'CreateMultipleItemsEx' | 'SetTransfersEnabledFlag' | 'BurnItem' | 'BurnFrom' | 'Transfer' | 'Approve' | 'TransferFrom' | 'SetCollectionLimits' | 'SetCollectionPermissions' | 'Repartition'; @@ -1856,7 +2334,7 @@ export interface PalletUniqueSchedulerCall extends Enum { readonly id: U8aFixed; readonly when: u32; readonly maybePeriodic: Option>; - readonly priority: u8; + readonly priority: Option; readonly call: FrameSupportScheduleMaybeHashed; } & Struct; readonly isCancelNamed: boolean; @@ -1868,10 +2346,15 @@ export interface PalletUniqueSchedulerCall extends Enum { readonly id: U8aFixed; readonly after: u32; readonly maybePeriodic: Option>; - readonly priority: u8; + readonly priority: Option; readonly call: FrameSupportScheduleMaybeHashed; } & Struct; - readonly type: 'ScheduleNamed' | 'CancelNamed' | 'ScheduleNamedAfter'; + readonly isChangeNamedPriority: boolean; + readonly asChangeNamedPriority: { + readonly id: U8aFixed; + readonly priority: u8; + } & Struct; + readonly type: 'ScheduleNamed' | 'CancelNamed' | 'ScheduleNamedAfter' | 'ChangeNamedPriority'; } /** @name PalletUniqueSchedulerError */ @@ -1895,6 +2378,12 @@ export interface PalletUniqueSchedulerEvent extends Enum { readonly when: u32; readonly index: u32; } & Struct; + readonly isPriorityChanged: boolean; + readonly asPriorityChanged: { + readonly when: u32; + readonly index: u32; + readonly priority: u8; + } & Struct; readonly isDispatched: boolean; readonly asDispatched: { readonly task: ITuple<[u32, u32]>; @@ -1907,7 +2396,7 @@ export interface PalletUniqueSchedulerEvent extends Enum { readonly id: Option; readonly error: FrameSupportScheduleLookupError; } & Struct; - readonly type: 'Scheduled' | 'Canceled' | 'Dispatched' | 'CallLookupFailed'; + readonly type: 'Scheduled' | 'Canceled' | 'PriorityChanged' | 'Dispatched' | 'CallLookupFailed'; } /** @name PalletUniqueSchedulerScheduledV3 */ @@ -1943,7 +2432,7 @@ export interface PalletXcmCall extends Enum { readonly isExecute: boolean; readonly asExecute: { readonly message: XcmVersionedXcm; - readonly maxWeight: u64; + readonly maxWeight: Weight; } & Struct; readonly isForceXcmVersion: boolean; readonly asForceXcmVersion: { @@ -2012,7 +2501,7 @@ export interface PalletXcmEvent extends Enum { readonly isNotified: boolean; readonly asNotified: ITuple<[u64, u8, u8]>; readonly isNotifyOverweight: boolean; - readonly asNotifyOverweight: ITuple<[u64, u8, u8, u64, u64]>; + readonly asNotifyOverweight: ITuple<[u64, u8, u8, Weight, Weight]>; readonly isNotifyDispatchError: boolean; readonly asNotifyDispatchError: ITuple<[u64, u8, u8]>; readonly isNotifyDecodeFailed: boolean; @@ -2367,6 +2856,12 @@ export interface SpVersionRuntimeVersion extends Struct { readonly stateVersion: u8; } +/** @name SpWeightsRuntimeDbWeight */ +export interface SpWeightsRuntimeDbWeight extends Struct { + readonly read: u64; + readonly write: u64; +} + /** @name UpDataStructsAccessMode */ export interface UpDataStructsAccessMode extends Enum { readonly isNormal: boolean; @@ -2381,10 +2876,10 @@ export interface UpDataStructsCollection extends Struct { readonly name: Vec; readonly description: Vec; readonly tokenPrefix: Bytes; - readonly sponsorship: UpDataStructsSponsorshipState; + readonly sponsorship: UpDataStructsSponsorshipStateAccountId32; readonly limits: UpDataStructsCollectionLimits; readonly permissions: UpDataStructsCollectionPermissions; - readonly externalCollection: bool; + readonly flags: U8aFixed; } /** @name UpDataStructsCollectionLimits */ @@ -2460,9 +2955,9 @@ export interface UpDataStructsCreateItemExData extends Enum { readonly isFungible: boolean; readonly asFungible: BTreeMap; readonly isRefungibleMultipleItems: boolean; - readonly asRefungibleMultipleItems: Vec; + readonly asRefungibleMultipleItems: Vec; readonly isRefungibleMultipleOwners: boolean; - readonly asRefungibleMultipleOwners: UpDataStructsCreateRefungibleExData; + readonly asRefungibleMultipleOwners: UpDataStructsCreateRefungibleExMultipleOwners; readonly type: 'Nft' | 'Fungible' | 'RefungibleMultipleItems' | 'RefungibleMultipleOwners'; } @@ -2479,14 +2974,21 @@ export interface UpDataStructsCreateNftExData extends Struct { /** @name UpDataStructsCreateReFungibleData */ export interface UpDataStructsCreateReFungibleData extends Struct { - readonly constData: Bytes; readonly pieces: u128; + readonly properties: Vec; } -/** @name UpDataStructsCreateRefungibleExData */ -export interface UpDataStructsCreateRefungibleExData extends Struct { - readonly constData: Bytes; +/** @name UpDataStructsCreateRefungibleExMultipleOwners */ +export interface UpDataStructsCreateRefungibleExMultipleOwners extends Struct { readonly users: BTreeMap; + readonly properties: Vec; +} + +/** @name UpDataStructsCreateRefungibleExSingleOwner */ +export interface UpDataStructsCreateRefungibleExSingleOwner extends Struct { + readonly user: PalletEvmAccountBasicCrossAccountIdRepr; + readonly pieces: u128; + readonly properties: Vec; } /** @name UpDataStructsNestingPermissions */ @@ -2545,12 +3047,19 @@ export interface UpDataStructsRpcCollection extends Struct { readonly name: Vec; readonly description: Vec; readonly tokenPrefix: Bytes; - readonly sponsorship: UpDataStructsSponsorshipState; + readonly sponsorship: UpDataStructsSponsorshipStateAccountId32; readonly limits: UpDataStructsCollectionLimits; readonly permissions: UpDataStructsCollectionPermissions; readonly tokenPropertyPermissions: Vec; readonly properties: Vec; readonly readOnly: bool; + readonly flags: UpDataStructsRpcCollectionFlags; +} + +/** @name UpDataStructsRpcCollectionFlags */ +export interface UpDataStructsRpcCollectionFlags extends Struct { + readonly foreign: bool; + readonly erc721metadata: bool; } /** @name UpDataStructsSponsoringRateLimit */ @@ -2561,8 +3070,8 @@ export interface UpDataStructsSponsoringRateLimit extends Enum { readonly type: 'SponsoringDisabled' | 'Blocks'; } -/** @name UpDataStructsSponsorshipState */ -export interface UpDataStructsSponsorshipState extends Enum { +/** @name UpDataStructsSponsorshipStateAccountId32 */ +export interface UpDataStructsSponsorshipStateAccountId32 extends Enum { readonly isDisabled: boolean; readonly isUnconfirmed: boolean; readonly asUnconfirmed: AccountId32; @@ -2571,6 +3080,16 @@ export interface UpDataStructsSponsorshipState extends Enum { readonly type: 'Disabled' | 'Unconfirmed' | 'Confirmed'; } +/** @name UpDataStructsSponsorshipStateBasicCrossAccountIdRepr */ +export interface UpDataStructsSponsorshipStateBasicCrossAccountIdRepr extends Enum { + readonly isDisabled: boolean; + readonly isUnconfirmed: boolean; + readonly asUnconfirmed: PalletEvmAccountBasicCrossAccountIdRepr; + readonly isConfirmed: boolean; + readonly asConfirmed: PalletEvmAccountBasicCrossAccountIdRepr; + readonly type: 'Disabled' | 'Unconfirmed' | 'Confirmed'; +} + /** @name UpDataStructsTokenChild */ export interface UpDataStructsTokenChild extends Struct { readonly token: u32; @@ -2581,6 +3100,7 @@ export interface UpDataStructsTokenChild extends Struct { export interface UpDataStructsTokenData extends Struct { readonly properties: Vec; readonly owner: Option; + readonly pieces: u128; } /** @name XcmDoubleEncoded */ @@ -3315,6 +3835,15 @@ export interface XcmV2WeightLimit extends Enum { /** @name XcmV2Xcm */ export interface XcmV2Xcm extends Vec {} +/** @name XcmVersionedMultiAsset */ +export interface XcmVersionedMultiAsset extends Enum { + readonly isV0: boolean; + readonly asV0: XcmV0MultiAsset; + readonly isV1: boolean; + readonly asV1: XcmV1MultiAsset; + readonly type: 'V0' | 'V1'; +} + /** @name XcmVersionedMultiAssets */ export interface XcmVersionedMultiAssets extends Enum { readonly isV0: boolean; diff --git a/tests/src/interfaces/definitions.ts b/tests/src/interfaces/definitions.ts index fe782eaecc..3b01bbf892 100644 --- a/tests/src/interfaces/definitions.ts +++ b/tests/src/interfaces/definitions.ts @@ -15,5 +15,6 @@ // along with Unique Network. If not, see . export {default as unique} from './unique/definitions'; +export {default as appPromotion} from './appPromotion/definitions'; export {default as rmrk} from './rmrk/definitions'; export {default as default} from './default/definitions'; \ No newline at end of file diff --git a/tests/src/interfaces/lookup.ts b/tests/src/interfaces/lookup.ts index 79f5365e5c..9f38ea791f 100644 --- a/tests/src/interfaces/lookup.ts +++ b/tests/src/interfaces/lookup.ts @@ -5,208 +5,176 @@ export default { /** - * Lookup2: polkadot_primitives::v2::PersistedValidationData + * Lookup3: frame_system::AccountInfo> **/ - PolkadotPrimitivesV2PersistedValidationData: { - parentHead: 'Bytes', - relayParentNumber: 'u32', - relayParentStorageRoot: 'H256', - maxPovSize: 'u32' - }, - /** - * Lookup9: polkadot_primitives::v2::UpgradeRestriction - **/ - PolkadotPrimitivesV2UpgradeRestriction: { - _enum: ['Present'] + FrameSystemAccountInfo: { + nonce: 'u32', + consumers: 'u32', + providers: 'u32', + sufficients: 'u32', + data: 'PalletBalancesAccountData' }, /** - * Lookup10: sp_trie::storage_proof::StorageProof + * Lookup5: pallet_balances::AccountData **/ - SpTrieStorageProof: { - trieNodes: 'BTreeSet' + PalletBalancesAccountData: { + free: 'u128', + reserved: 'u128', + miscFrozen: 'u128', + feeFrozen: 'u128' }, /** - * Lookup13: cumulus_pallet_parachain_system::relay_state_snapshot::MessagingStateSnapshot + * Lookup7: frame_support::dispatch::PerDispatchClass **/ - CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot: { - dmqMqcHead: 'H256', - relayDispatchQueueSize: '(u32,u32)', - ingressChannels: 'Vec<(u32,PolkadotPrimitivesV2AbridgedHrmpChannel)>', - egressChannels: 'Vec<(u32,PolkadotPrimitivesV2AbridgedHrmpChannel)>' + FrameSupportDispatchPerDispatchClassWeight: { + normal: 'Weight', + operational: 'Weight', + mandatory: 'Weight' }, /** - * Lookup18: polkadot_primitives::v2::AbridgedHrmpChannel + * Lookup12: sp_runtime::generic::digest::Digest **/ - PolkadotPrimitivesV2AbridgedHrmpChannel: { - maxCapacity: 'u32', - maxTotalSize: 'u32', - maxMessageSize: 'u32', - msgCount: 'u32', - totalSize: 'u32', - mqcHead: 'Option' + SpRuntimeDigest: { + logs: 'Vec' }, /** - * Lookup20: polkadot_primitives::v2::AbridgedHostConfiguration + * Lookup14: sp_runtime::generic::digest::DigestItem **/ - PolkadotPrimitivesV2AbridgedHostConfiguration: { - maxCodeSize: 'u32', - maxHeadDataSize: 'u32', - maxUpwardQueueCount: 'u32', - maxUpwardQueueSize: 'u32', - maxUpwardMessageSize: 'u32', - maxUpwardMessageNumPerCandidate: 'u32', - hrmpMaxMessageNumPerCandidate: 'u32', - validationUpgradeCooldown: 'u32', - validationUpgradeDelay: 'u32' + SpRuntimeDigestDigestItem: { + _enum: { + Other: 'Bytes', + __Unused1: 'Null', + __Unused2: 'Null', + __Unused3: 'Null', + Consensus: '([u8;4],Bytes)', + Seal: '([u8;4],Bytes)', + PreRuntime: '([u8;4],Bytes)', + __Unused7: 'Null', + RuntimeEnvironmentUpdated: 'Null' + } }, /** - * Lookup26: polkadot_core_primitives::OutboundHrmpMessage + * Lookup17: frame_system::EventRecord **/ - PolkadotCorePrimitivesOutboundHrmpMessage: { - recipient: 'u32', - data: 'Bytes' + FrameSystemEventRecord: { + phase: 'FrameSystemPhase', + event: 'Event', + topics: 'Vec' }, /** - * Lookup28: cumulus_pallet_parachain_system::pallet::Call + * Lookup19: frame_system::pallet::Event **/ - CumulusPalletParachainSystemCall: { + FrameSystemEvent: { _enum: { - set_validation_data: { - data: 'CumulusPrimitivesParachainInherentParachainInherentData', + ExtrinsicSuccess: { + dispatchInfo: 'FrameSupportDispatchDispatchInfo', }, - sudo_send_upward_message: { - message: 'Bytes', + ExtrinsicFailed: { + dispatchError: 'SpRuntimeDispatchError', + dispatchInfo: 'FrameSupportDispatchDispatchInfo', }, - authorize_upgrade: { - codeHash: 'H256', + CodeUpdated: 'Null', + NewAccount: { + account: 'AccountId32', }, - enact_authorized_upgrade: { - code: 'Bytes' + KilledAccount: { + account: 'AccountId32', + }, + Remarked: { + _alias: { + hash_: 'hash', + }, + sender: 'AccountId32', + hash_: 'H256' } } }, /** - * Lookup29: cumulus_primitives_parachain_inherent::ParachainInherentData + * Lookup20: frame_support::dispatch::DispatchInfo **/ - CumulusPrimitivesParachainInherentParachainInherentData: { - validationData: 'PolkadotPrimitivesV2PersistedValidationData', - relayChainState: 'SpTrieStorageProof', - downwardMessages: 'Vec', - horizontalMessages: 'BTreeMap>' + FrameSupportDispatchDispatchInfo: { + weight: 'Weight', + class: 'FrameSupportDispatchDispatchClass', + paysFee: 'FrameSupportDispatchPays' }, /** - * Lookup31: polkadot_core_primitives::InboundDownwardMessage + * Lookup21: frame_support::dispatch::DispatchClass **/ - PolkadotCorePrimitivesInboundDownwardMessage: { - sentAt: 'u32', - msg: 'Bytes' + FrameSupportDispatchDispatchClass: { + _enum: ['Normal', 'Operational', 'Mandatory'] }, /** - * Lookup34: polkadot_core_primitives::InboundHrmpMessage + * Lookup22: frame_support::dispatch::Pays **/ - PolkadotCorePrimitivesInboundHrmpMessage: { - sentAt: 'u32', - data: 'Bytes' + FrameSupportDispatchPays: { + _enum: ['Yes', 'No'] }, /** - * Lookup37: cumulus_pallet_parachain_system::pallet::Event + * Lookup23: sp_runtime::DispatchError **/ - CumulusPalletParachainSystemEvent: { + SpRuntimeDispatchError: { _enum: { - ValidationFunctionStored: 'Null', - ValidationFunctionApplied: { - relayChainBlockNum: 'u32', - }, - ValidationFunctionDiscarded: 'Null', - UpgradeAuthorized: { - codeHash: 'H256', - }, - DownwardMessagesReceived: { - count: 'u32', - }, - DownwardMessagesProcessed: { - weightUsed: 'u64', - dmqHead: 'H256' - } + Other: 'Null', + CannotLookup: 'Null', + BadOrigin: 'Null', + Module: 'SpRuntimeModuleError', + ConsumerRemaining: 'Null', + NoProviders: 'Null', + TooManyConsumers: 'Null', + Token: 'SpRuntimeTokenError', + Arithmetic: 'SpRuntimeArithmeticError', + Transactional: 'SpRuntimeTransactionalError' } }, /** - * Lookup38: cumulus_pallet_parachain_system::pallet::Error - **/ - CumulusPalletParachainSystemError: { - _enum: ['OverlappingUpgrades', 'ProhibitedByPolkadot', 'TooBig', 'ValidationDataNotAvailable', 'HostConfigurationNotAvailable', 'NotScheduled', 'NothingAuthorized', 'Unauthorized'] - }, - /** - * Lookup41: pallet_balances::AccountData - **/ - PalletBalancesAccountData: { - free: 'u128', - reserved: 'u128', - miscFrozen: 'u128', - feeFrozen: 'u128' - }, - /** - * Lookup43: pallet_balances::BalanceLock + * Lookup24: sp_runtime::ModuleError **/ - PalletBalancesBalanceLock: { - id: '[u8;8]', - amount: 'u128', - reasons: 'PalletBalancesReasons' + SpRuntimeModuleError: { + index: 'u8', + error: '[u8;4]' }, /** - * Lookup45: pallet_balances::Reasons + * Lookup25: sp_runtime::TokenError **/ - PalletBalancesReasons: { - _enum: ['Fee', 'Misc', 'All'] + SpRuntimeTokenError: { + _enum: ['NoFunds', 'WouldDie', 'BelowMinimum', 'CannotCreate', 'UnknownAsset', 'Frozen', 'Unsupported'] }, /** - * Lookup48: pallet_balances::ReserveData + * Lookup26: sp_runtime::ArithmeticError **/ - PalletBalancesReserveData: { - id: '[u8;16]', - amount: 'u128' + SpRuntimeArithmeticError: { + _enum: ['Underflow', 'Overflow', 'DivisionByZero'] }, /** - * Lookup51: pallet_balances::Releases + * Lookup27: sp_runtime::TransactionalError **/ - PalletBalancesReleases: { - _enum: ['V1_0_0', 'V2_0_0'] + SpRuntimeTransactionalError: { + _enum: ['LimitReached', 'NoLayer'] }, /** - * Lookup52: pallet_balances::pallet::Call + * Lookup28: cumulus_pallet_parachain_system::pallet::Event **/ - PalletBalancesCall: { + CumulusPalletParachainSystemEvent: { _enum: { - transfer: { - dest: 'MultiAddress', - value: 'Compact', - }, - set_balance: { - who: 'MultiAddress', - newFree: 'Compact', - newReserved: 'Compact', - }, - force_transfer: { - source: 'MultiAddress', - dest: 'MultiAddress', - value: 'Compact', + ValidationFunctionStored: 'Null', + ValidationFunctionApplied: { + relayChainBlockNum: 'u32', }, - transfer_keep_alive: { - dest: 'MultiAddress', - value: 'Compact', + ValidationFunctionDiscarded: 'Null', + UpgradeAuthorized: { + codeHash: 'H256', }, - transfer_all: { - dest: 'MultiAddress', - keepAlive: 'bool', + DownwardMessagesReceived: { + count: 'u32', }, - force_unreserve: { - who: 'MultiAddress', - amount: 'u128' + DownwardMessagesProcessed: { + weightUsed: 'Weight', + dmqHead: 'H256' } } }, /** - * Lookup58: pallet_balances::pallet::Event + * Lookup29: pallet_balances::pallet::Event **/ PalletBalancesEvent: { _enum: { @@ -257,64 +225,25 @@ export default { } }, /** - * Lookup59: frame_support::traits::tokens::misc::BalanceStatus + * Lookup30: frame_support::traits::tokens::misc::BalanceStatus **/ FrameSupportTokensMiscBalanceStatus: { _enum: ['Free', 'Reserved'] }, /** - * Lookup60: pallet_balances::pallet::Error - **/ - PalletBalancesError: { - _enum: ['VestingBalance', 'LiquidityRestrictions', 'InsufficientBalance', 'ExistentialDeposit', 'KeepAlive', 'ExistingVestingSchedule', 'DeadAccount', 'TooManyReserves'] - }, - /** - * Lookup63: pallet_timestamp::pallet::Call - **/ - PalletTimestampCall: { - _enum: { - set: { - now: 'Compact' - } - } - }, - /** - * Lookup66: pallet_transaction_payment::Releases - **/ - PalletTransactionPaymentReleases: { - _enum: ['V1Ancient', 'V2'] - }, - /** - * Lookup67: pallet_treasury::Proposal - **/ - PalletTreasuryProposal: { - proposer: 'AccountId32', - value: 'u128', - beneficiary: 'AccountId32', - bond: 'u128' - }, - /** - * Lookup70: pallet_treasury::pallet::Call + * Lookup31: pallet_transaction_payment::pallet::Event **/ - PalletTreasuryCall: { + PalletTransactionPaymentEvent: { _enum: { - propose_spend: { - value: 'Compact', - beneficiary: 'MultiAddress', - }, - reject_proposal: { - proposalId: 'Compact', - }, - approve_proposal: { - proposalId: 'Compact', - }, - remove_approval: { - proposalId: 'Compact' + TransactionFeePaid: { + who: 'AccountId32', + actualFee: 'u128', + tip: 'u128' } } }, /** - * Lookup72: pallet_treasury::pallet::Event + * Lookup32: pallet_treasury::pallet::Event **/ PalletTreasuryEvent: { _enum: { @@ -340,103 +269,52 @@ export default { rolloverBalance: 'u128', }, Deposit: { - value: 'u128' + value: 'u128', + }, + SpendApproved: { + proposalIndex: 'u32', + amount: 'u128', + beneficiary: 'AccountId32' } } }, /** - * Lookup75: frame_support::PalletId - **/ - FrameSupportPalletId: '[u8;8]', - /** - * Lookup76: pallet_treasury::pallet::Error - **/ - PalletTreasuryError: { - _enum: ['InsufficientProposersBalance', 'InvalidIndex', 'TooManyApprovals', 'ProposalNotApproved'] - }, - /** - * Lookup77: pallet_sudo::pallet::Call + * Lookup33: pallet_sudo::pallet::Event **/ - PalletSudoCall: { + PalletSudoEvent: { _enum: { - sudo: { - call: 'Call', + Sudid: { + sudoResult: 'Result', }, - sudo_unchecked_weight: { - call: 'Call', - weight: 'u64', - }, - set_key: { - _alias: { - new_: 'new', - }, - new_: 'MultiAddress', - }, - sudo_as: { - who: 'MultiAddress', - call: 'Call' - } - } - }, - /** - * Lookup79: frame_system::pallet::Call - **/ - FrameSystemCall: { - _enum: { - fill_block: { - ratio: 'Perbill', - }, - remark: { - remark: 'Bytes', - }, - set_heap_pages: { - pages: 'u64', - }, - set_code: { - code: 'Bytes', - }, - set_code_without_checks: { - code: 'Bytes', - }, - set_storage: { - items: 'Vec<(Bytes,Bytes)>', - }, - kill_storage: { - _alias: { - keys_: 'keys', - }, - keys_: 'Vec', - }, - kill_prefix: { - prefix: 'Bytes', - subkeys: 'u32', + KeyChanged: { + oldSudoer: 'Option', }, - remark_with_event: { - remark: 'Bytes' + SudoAsDone: { + sudoResult: 'Result' } } }, /** - * Lookup83: orml_vesting::module::Call + * Lookup37: orml_vesting::module::Event **/ - OrmlVestingModuleCall: { + OrmlVestingModuleEvent: { _enum: { - claim: 'Null', - vested_transfer: { - dest: 'MultiAddress', - schedule: 'OrmlVestingVestingSchedule', + VestingScheduleAdded: { + from: 'AccountId32', + to: 'AccountId32', + vestingSchedule: 'OrmlVestingVestingSchedule', }, - update_vesting_schedules: { - who: 'MultiAddress', - vestingSchedules: 'Vec', + Claimed: { + who: 'AccountId32', + amount: 'u128', }, - claim_for: { - dest: 'MultiAddress' + VestingSchedulesUpdated: { + who: 'AccountId32' } } }, /** - * Lookup84: orml_vesting::VestingSchedule + * Lookup38: orml_vesting::VestingSchedule **/ OrmlVestingVestingSchedule: { start: 'u32', @@ -445,139 +323,66 @@ export default { perPeriod: 'Compact' }, /** - * Lookup86: cumulus_pallet_xcmp_queue::pallet::Call + * Lookup40: orml_xtokens::module::Event **/ - CumulusPalletXcmpQueueCall: { + OrmlXtokensModuleEvent: { _enum: { - service_overweight: { - index: 'u64', - weightLimit: 'u64', - }, - suspend_xcm_execution: 'Null', - resume_xcm_execution: 'Null', - update_suspend_threshold: { - _alias: { - new_: 'new', - }, - new_: 'u32', - }, - update_drop_threshold: { - _alias: { - new_: 'new', - }, - new_: 'u32', - }, - update_resume_threshold: { - _alias: { - new_: 'new', - }, - new_: 'u32', - }, - update_threshold_weight: { - _alias: { - new_: 'new', - }, - new_: 'u64', - }, - update_weight_restrict_decay: { - _alias: { - new_: 'new', - }, - new_: 'u64', - }, - update_xcmp_max_individual_weight: { - _alias: { - new_: 'new', - }, - new_: 'u64' + TransferredMultiAssets: { + sender: 'AccountId32', + assets: 'XcmV1MultiassetMultiAssets', + fee: 'XcmV1MultiAsset', + dest: 'XcmV1MultiLocation' } } }, /** - * Lookup87: pallet_xcm::pallet::Call + * Lookup41: xcm::v1::multiasset::MultiAssets **/ - PalletXcmCall: { - _enum: { - send: { - dest: 'XcmVersionedMultiLocation', - message: 'XcmVersionedXcm', - }, - teleport_assets: { - dest: 'XcmVersionedMultiLocation', - beneficiary: 'XcmVersionedMultiLocation', - assets: 'XcmVersionedMultiAssets', - feeAssetItem: 'u32', - }, - reserve_transfer_assets: { - dest: 'XcmVersionedMultiLocation', - beneficiary: 'XcmVersionedMultiLocation', - assets: 'XcmVersionedMultiAssets', - feeAssetItem: 'u32', - }, - execute: { - message: 'XcmVersionedXcm', - maxWeight: 'u64', - }, - force_xcm_version: { - location: 'XcmV1MultiLocation', - xcmVersion: 'u32', - }, - force_default_xcm_version: { - maybeXcmVersion: 'Option', - }, - force_subscribe_version_notify: { - location: 'XcmVersionedMultiLocation', - }, - force_unsubscribe_version_notify: { - location: 'XcmVersionedMultiLocation', - }, - limited_reserve_transfer_assets: { - dest: 'XcmVersionedMultiLocation', - beneficiary: 'XcmVersionedMultiLocation', - assets: 'XcmVersionedMultiAssets', - feeAssetItem: 'u32', - weightLimit: 'XcmV2WeightLimit', - }, - limited_teleport_assets: { - dest: 'XcmVersionedMultiLocation', - beneficiary: 'XcmVersionedMultiLocation', - assets: 'XcmVersionedMultiAssets', - feeAssetItem: 'u32', - weightLimit: 'XcmV2WeightLimit' - } - } + XcmV1MultiassetMultiAssets: 'Vec', + /** + * Lookup43: xcm::v1::multiasset::MultiAsset + **/ + XcmV1MultiAsset: { + id: 'XcmV1MultiassetAssetId', + fun: 'XcmV1MultiassetFungibility' }, /** - * Lookup88: xcm::VersionedMultiLocation + * Lookup44: xcm::v1::multiasset::AssetId **/ - XcmVersionedMultiLocation: { + XcmV1MultiassetAssetId: { _enum: { - V0: 'XcmV0MultiLocation', - V1: 'XcmV1MultiLocation' + Concrete: 'XcmV1MultiLocation', + Abstract: 'Bytes' } }, /** - * Lookup89: xcm::v0::multi_location::MultiLocation + * Lookup45: xcm::v1::multilocation::MultiLocation **/ - XcmV0MultiLocation: { + XcmV1MultiLocation: { + parents: 'u8', + interior: 'XcmV1MultilocationJunctions' + }, + /** + * Lookup46: xcm::v1::multilocation::Junctions + **/ + XcmV1MultilocationJunctions: { _enum: { - Null: 'Null', - X1: 'XcmV0Junction', - X2: '(XcmV0Junction,XcmV0Junction)', - X3: '(XcmV0Junction,XcmV0Junction,XcmV0Junction)', - X4: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', - X5: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', - X6: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', - X7: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', - X8: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)' + Here: 'Null', + X1: 'XcmV1Junction', + X2: '(XcmV1Junction,XcmV1Junction)', + X3: '(XcmV1Junction,XcmV1Junction,XcmV1Junction)', + X4: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', + X5: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', + X6: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', + X7: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', + X8: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)' } }, /** - * Lookup90: xcm::v0::junction::Junction + * Lookup47: xcm::v1::junction::Junction **/ - XcmV0Junction: { + XcmV1Junction: { _enum: { - Parent: 'Null', Parachain: 'Compact', AccountId32: { network: 'XcmV0JunctionNetworkId', @@ -602,7 +407,7 @@ export default { } }, /** - * Lookup91: xcm::v0::junction::NetworkId + * Lookup49: xcm::v0::junction::NetworkId **/ XcmV0JunctionNetworkId: { _enum: { @@ -613,7 +418,7 @@ export default { } }, /** - * Lookup92: xcm::v0::junction::BodyId + * Lookup53: xcm::v0::junction::BodyId **/ XcmV0JunctionBodyId: { _enum: { @@ -627,7 +432,7 @@ export default { } }, /** - * Lookup93: xcm::v0::junction::BodyPart + * Lookup54: xcm::v0::junction::BodyPart **/ XcmV0JunctionBodyPart: { _enum: { @@ -650,256 +455,240 @@ export default { } }, /** - * Lookup94: xcm::v1::multilocation::MultiLocation + * Lookup55: xcm::v1::multiasset::Fungibility **/ - XcmV1MultiLocation: { - parents: 'u8', - interior: 'XcmV1MultilocationJunctions' + XcmV1MultiassetFungibility: { + _enum: { + Fungible: 'Compact', + NonFungible: 'XcmV1MultiassetAssetInstance' + } }, /** - * Lookup95: xcm::v1::multilocation::Junctions + * Lookup56: xcm::v1::multiasset::AssetInstance **/ - XcmV1MultilocationJunctions: { + XcmV1MultiassetAssetInstance: { _enum: { - Here: 'Null', - X1: 'XcmV1Junction', - X2: '(XcmV1Junction,XcmV1Junction)', - X3: '(XcmV1Junction,XcmV1Junction,XcmV1Junction)', - X4: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', - X5: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', - X6: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', - X7: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)', - X8: '(XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction,XcmV1Junction)' + Undefined: 'Null', + Index: 'Compact', + Array4: '[u8;4]', + Array8: '[u8;8]', + Array16: '[u8;16]', + Array32: '[u8;32]', + Blob: 'Bytes' } }, /** - * Lookup96: xcm::v1::junction::Junction + * Lookup59: orml_tokens::module::Event **/ - XcmV1Junction: { + OrmlTokensModuleEvent: { _enum: { - Parachain: 'Compact', - AccountId32: { - network: 'XcmV0JunctionNetworkId', - id: '[u8;32]', + Endowed: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - AccountIndex64: { - network: 'XcmV0JunctionNetworkId', - index: 'Compact', + DustLost: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - AccountKey20: { - network: 'XcmV0JunctionNetworkId', - key: '[u8;20]', + Transfer: { + currencyId: 'PalletForeignAssetsAssetIds', + from: 'AccountId32', + to: 'AccountId32', + amount: 'u128', }, - PalletInstance: 'u8', - GeneralIndex: 'Compact', - GeneralKey: 'Bytes', - OnlyChild: 'Null', - Plurality: { - id: 'XcmV0JunctionBodyId', - part: 'XcmV0JunctionBodyPart' - } - } - }, - /** - * Lookup97: xcm::VersionedXcm - **/ - XcmVersionedXcm: { - _enum: { - V0: 'XcmV0Xcm', - V1: 'XcmV1Xcm', - V2: 'XcmV2Xcm' - } - }, - /** - * Lookup98: xcm::v0::Xcm - **/ - XcmV0Xcm: { - _enum: { - WithdrawAsset: { - assets: 'Vec', - effects: 'Vec', - }, - ReserveAssetDeposit: { - assets: 'Vec', - effects: 'Vec', + Reserved: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - TeleportAsset: { - assets: 'Vec', - effects: 'Vec', + Unreserved: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - QueryResponse: { - queryId: 'Compact', - response: 'XcmV0Response', + ReserveRepatriated: { + currencyId: 'PalletForeignAssetsAssetIds', + from: 'AccountId32', + to: 'AccountId32', + amount: 'u128', + status: 'FrameSupportTokensMiscBalanceStatus', }, - TransferAsset: { - assets: 'Vec', - dest: 'XcmV0MultiLocation', + BalanceSet: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + free: 'u128', + reserved: 'u128', }, - TransferReserveAsset: { - assets: 'Vec', - dest: 'XcmV0MultiLocation', - effects: 'Vec', + TotalIssuanceSet: { + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'u128', }, - Transact: { - originType: 'XcmV0OriginKind', - requireWeightAtMost: 'u64', - call: 'XcmDoubleEncoded', + Withdrawn: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - HrmpNewChannelOpenRequest: { - sender: 'Compact', - maxMessageSize: 'Compact', - maxCapacity: 'Compact', + Slashed: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + freeAmount: 'u128', + reservedAmount: 'u128', }, - HrmpChannelAccepted: { - recipient: 'Compact', + Deposited: { + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - HrmpChannelClosing: { - initiator: 'Compact', - sender: 'Compact', - recipient: 'Compact', + LockSet: { + lockId: '[u8;8]', + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32', + amount: 'u128', }, - RelayedFrom: { - who: 'XcmV0MultiLocation', - message: 'XcmV0Xcm' + LockRemoved: { + lockId: '[u8;8]', + currencyId: 'PalletForeignAssetsAssetIds', + who: 'AccountId32' } } }, /** - * Lookup100: xcm::v0::multi_asset::MultiAsset + * Lookup60: pallet_foreign_assets::AssetIds **/ - XcmV0MultiAsset: { + PalletForeignAssetsAssetIds: { _enum: { - None: 'Null', - All: 'Null', - AllFungible: 'Null', - AllNonFungible: 'Null', - AllAbstractFungible: { - id: 'Bytes', - }, - AllAbstractNonFungible: { - class: 'Bytes', - }, - AllConcreteFungible: { - id: 'XcmV0MultiLocation', - }, - AllConcreteNonFungible: { - class: 'XcmV0MultiLocation', - }, - AbstractFungible: { - id: 'Bytes', - amount: 'Compact', - }, - AbstractNonFungible: { - class: 'Bytes', - instance: 'XcmV1MultiassetAssetInstance', - }, - ConcreteFungible: { - id: 'XcmV0MultiLocation', - amount: 'Compact', - }, - ConcreteNonFungible: { - class: 'XcmV0MultiLocation', - instance: 'XcmV1MultiassetAssetInstance' - } + ForeignAssetId: 'u32', + NativeAssetId: 'PalletForeignAssetsNativeCurrency' } }, /** - * Lookup101: xcm::v1::multiasset::AssetInstance + * Lookup61: pallet_foreign_assets::NativeCurrency **/ - XcmV1MultiassetAssetInstance: { - _enum: { - Undefined: 'Null', - Index: 'Compact', - Array4: '[u8;4]', - Array8: '[u8;8]', - Array16: '[u8;16]', - Array32: '[u8;32]', - Blob: 'Bytes' - } + PalletForeignAssetsNativeCurrency: { + _enum: ['Here', 'Parent'] }, /** - * Lookup104: xcm::v0::order::Order + * Lookup62: cumulus_pallet_xcmp_queue::pallet::Event **/ - XcmV0Order: { + CumulusPalletXcmpQueueEvent: { _enum: { - Null: 'Null', - DepositAsset: { - assets: 'Vec', - dest: 'XcmV0MultiLocation', + Success: { + messageHash: 'Option', + weight: 'Weight', }, - DepositReserveAsset: { - assets: 'Vec', - dest: 'XcmV0MultiLocation', - effects: 'Vec', + Fail: { + messageHash: 'Option', + error: 'XcmV2TraitsError', + weight: 'Weight', }, - ExchangeAsset: { - give: 'Vec', - receive: 'Vec', + BadVersion: { + messageHash: 'Option', }, - InitiateReserveWithdraw: { - assets: 'Vec', - reserve: 'XcmV0MultiLocation', - effects: 'Vec', + BadFormat: { + messageHash: 'Option', }, - InitiateTeleport: { - assets: 'Vec', - dest: 'XcmV0MultiLocation', - effects: 'Vec', + UpwardMessageSent: { + messageHash: 'Option', }, - QueryHolding: { - queryId: 'Compact', - dest: 'XcmV0MultiLocation', - assets: 'Vec', + XcmpMessageSent: { + messageHash: 'Option', }, - BuyExecution: { - fees: 'XcmV0MultiAsset', - weight: 'u64', - debt: 'u64', - haltOnError: 'bool', - xcm: 'Vec' + OverweightEnqueued: { + sender: 'u32', + sentAt: 'u32', + index: 'u64', + required: 'Weight', + }, + OverweightServiced: { + index: 'u64', + used: 'Weight' } } }, /** - * Lookup106: xcm::v0::Response + * Lookup64: xcm::v2::traits::Error **/ - XcmV0Response: { + XcmV2TraitsError: { _enum: { - Assets: 'Vec' + Overflow: 'Null', + Unimplemented: 'Null', + UntrustedReserveLocation: 'Null', + UntrustedTeleportLocation: 'Null', + MultiLocationFull: 'Null', + MultiLocationNotInvertible: 'Null', + BadOrigin: 'Null', + InvalidLocation: 'Null', + AssetNotFound: 'Null', + FailedToTransactAsset: 'Null', + NotWithdrawable: 'Null', + LocationCannotHold: 'Null', + ExceedsMaxMessageSize: 'Null', + DestinationUnsupported: 'Null', + Transport: 'Null', + Unroutable: 'Null', + UnknownClaim: 'Null', + FailedToDecode: 'Null', + MaxWeightInvalid: 'Null', + NotHoldingFees: 'Null', + TooExpensive: 'Null', + Trap: 'u64', + UnhandledXcmVersion: 'Null', + WeightLimitReached: 'u64', + Barrier: 'Null', + WeightNotComputable: 'Null' } }, /** - * Lookup107: xcm::v0::OriginKind + * Lookup66: pallet_xcm::pallet::Event **/ - XcmV0OriginKind: { - _enum: ['Native', 'SovereignAccount', 'Superuser', 'Xcm'] + PalletXcmEvent: { + _enum: { + Attempted: 'XcmV2TraitsOutcome', + Sent: '(XcmV1MultiLocation,XcmV1MultiLocation,XcmV2Xcm)', + UnexpectedResponse: '(XcmV1MultiLocation,u64)', + ResponseReady: '(u64,XcmV2Response)', + Notified: '(u64,u8,u8)', + NotifyOverweight: '(u64,u8,u8,Weight,Weight)', + NotifyDispatchError: '(u64,u8,u8)', + NotifyDecodeFailed: '(u64,u8,u8)', + InvalidResponder: '(XcmV1MultiLocation,u64,Option)', + InvalidResponderVersion: '(XcmV1MultiLocation,u64)', + ResponseTaken: 'u64', + AssetsTrapped: '(H256,XcmV1MultiLocation,XcmVersionedMultiAssets)', + VersionChangeNotified: '(XcmV1MultiLocation,u32)', + SupportedVersionChanged: '(XcmV1MultiLocation,u32)', + NotifyTargetSendFail: '(XcmV1MultiLocation,u64,XcmV2TraitsError)', + NotifyTargetMigrationFail: '(XcmVersionedMultiLocation,u64)' + } }, /** - * Lookup108: xcm::double_encoded::DoubleEncoded + * Lookup67: xcm::v2::traits::Outcome **/ - XcmDoubleEncoded: { - encoded: 'Bytes' + XcmV2TraitsOutcome: { + _enum: { + Complete: 'u64', + Incomplete: '(u64,XcmV2TraitsError)', + Error: 'XcmV2TraitsError' + } }, /** - * Lookup109: xcm::v1::Xcm + * Lookup68: xcm::v2::Xcm **/ - XcmV1Xcm: { + XcmV2Xcm: 'Vec', + /** + * Lookup70: xcm::v2::Instruction + **/ + XcmV2Instruction: { _enum: { - WithdrawAsset: { - assets: 'XcmV1MultiassetMultiAssets', - effects: 'Vec', - }, - ReserveAssetDeposited: { - assets: 'XcmV1MultiassetMultiAssets', - effects: 'Vec', - }, - ReceiveTeleportedAsset: { - assets: 'XcmV1MultiassetMultiAssets', - effects: 'Vec', - }, + WithdrawAsset: 'XcmV1MultiassetMultiAssets', + ReserveAssetDeposited: 'XcmV1MultiassetMultiAssets', + ReceiveTeleportedAsset: 'XcmV1MultiassetMultiAssets', QueryResponse: { queryId: 'Compact', - response: 'XcmV1Response', + response: 'XcmV2Response', + maxWeight: 'Compact', }, TransferAsset: { assets: 'XcmV1MultiassetMultiAssets', @@ -908,11 +697,11 @@ export default { TransferReserveAsset: { assets: 'XcmV1MultiassetMultiAssets', dest: 'XcmV1MultiLocation', - effects: 'Vec', + xcm: 'XcmV2Xcm', }, Transact: { originType: 'XcmV0OriginKind', - requireWeightAtMost: 'u64', + requireWeightAtMost: 'Compact', call: 'XcmDoubleEncoded', }, HrmpNewChannelOpenRequest: { @@ -928,176 +717,11 @@ export default { sender: 'Compact', recipient: 'Compact', }, - RelayedFrom: { - who: 'XcmV1MultilocationJunctions', - message: 'XcmV1Xcm', - }, - SubscribeVersion: { + ClearOrigin: 'Null', + DescendOrigin: 'XcmV1MultilocationJunctions', + ReportError: { queryId: 'Compact', - maxResponseWeight: 'Compact', - }, - UnsubscribeVersion: 'Null' - } - }, - /** - * Lookup110: xcm::v1::multiasset::MultiAssets - **/ - XcmV1MultiassetMultiAssets: 'Vec', - /** - * Lookup112: xcm::v1::multiasset::MultiAsset - **/ - XcmV1MultiAsset: { - id: 'XcmV1MultiassetAssetId', - fun: 'XcmV1MultiassetFungibility' - }, - /** - * Lookup113: xcm::v1::multiasset::AssetId - **/ - XcmV1MultiassetAssetId: { - _enum: { - Concrete: 'XcmV1MultiLocation', - Abstract: 'Bytes' - } - }, - /** - * Lookup114: xcm::v1::multiasset::Fungibility - **/ - XcmV1MultiassetFungibility: { - _enum: { - Fungible: 'Compact', - NonFungible: 'XcmV1MultiassetAssetInstance' - } - }, - /** - * Lookup116: xcm::v1::order::Order - **/ - XcmV1Order: { - _enum: { - Noop: 'Null', - DepositAsset: { - assets: 'XcmV1MultiassetMultiAssetFilter', - maxAssets: 'u32', - beneficiary: 'XcmV1MultiLocation', - }, - DepositReserveAsset: { - assets: 'XcmV1MultiassetMultiAssetFilter', - maxAssets: 'u32', - dest: 'XcmV1MultiLocation', - effects: 'Vec', - }, - ExchangeAsset: { - give: 'XcmV1MultiassetMultiAssetFilter', - receive: 'XcmV1MultiassetMultiAssets', - }, - InitiateReserveWithdraw: { - assets: 'XcmV1MultiassetMultiAssetFilter', - reserve: 'XcmV1MultiLocation', - effects: 'Vec', - }, - InitiateTeleport: { - assets: 'XcmV1MultiassetMultiAssetFilter', - dest: 'XcmV1MultiLocation', - effects: 'Vec', - }, - QueryHolding: { - queryId: 'Compact', - dest: 'XcmV1MultiLocation', - assets: 'XcmV1MultiassetMultiAssetFilter', - }, - BuyExecution: { - fees: 'XcmV1MultiAsset', - weight: 'u64', - debt: 'u64', - haltOnError: 'bool', - instructions: 'Vec' - } - } - }, - /** - * Lookup117: xcm::v1::multiasset::MultiAssetFilter - **/ - XcmV1MultiassetMultiAssetFilter: { - _enum: { - Definite: 'XcmV1MultiassetMultiAssets', - Wild: 'XcmV1MultiassetWildMultiAsset' - } - }, - /** - * Lookup118: xcm::v1::multiasset::WildMultiAsset - **/ - XcmV1MultiassetWildMultiAsset: { - _enum: { - All: 'Null', - AllOf: { - id: 'XcmV1MultiassetAssetId', - fun: 'XcmV1MultiassetWildFungibility' - } - } - }, - /** - * Lookup119: xcm::v1::multiasset::WildFungibility - **/ - XcmV1MultiassetWildFungibility: { - _enum: ['Fungible', 'NonFungible'] - }, - /** - * Lookup121: xcm::v1::Response - **/ - XcmV1Response: { - _enum: { - Assets: 'XcmV1MultiassetMultiAssets', - Version: 'u32' - } - }, - /** - * Lookup122: xcm::v2::Xcm - **/ - XcmV2Xcm: 'Vec', - /** - * Lookup124: xcm::v2::Instruction - **/ - XcmV2Instruction: { - _enum: { - WithdrawAsset: 'XcmV1MultiassetMultiAssets', - ReserveAssetDeposited: 'XcmV1MultiassetMultiAssets', - ReceiveTeleportedAsset: 'XcmV1MultiassetMultiAssets', - QueryResponse: { - queryId: 'Compact', - response: 'XcmV2Response', - maxWeight: 'Compact', - }, - TransferAsset: { - assets: 'XcmV1MultiassetMultiAssets', - beneficiary: 'XcmV1MultiLocation', - }, - TransferReserveAsset: { - assets: 'XcmV1MultiassetMultiAssets', - dest: 'XcmV1MultiLocation', - xcm: 'XcmV2Xcm', - }, - Transact: { - originType: 'XcmV0OriginKind', - requireWeightAtMost: 'Compact', - call: 'XcmDoubleEncoded', - }, - HrmpNewChannelOpenRequest: { - sender: 'Compact', - maxMessageSize: 'Compact', - maxCapacity: 'Compact', - }, - HrmpChannelAccepted: { - recipient: 'Compact', - }, - HrmpChannelClosing: { - initiator: 'Compact', - sender: 'Compact', - recipient: 'Compact', - }, - ClearOrigin: 'Null', - DescendOrigin: 'XcmV1MultilocationJunctions', - ReportError: { - queryId: 'Compact', - dest: 'XcmV1MultiLocation', + dest: 'XcmV1MultiLocation', maxResponseWeight: 'Compact', }, DepositAsset: { @@ -1152,7 +776,7 @@ export default { } }, /** - * Lookup125: xcm::v2::Response + * Lookup71: xcm::v2::Response **/ XcmV2Response: { _enum: { @@ -1163,40 +787,46 @@ export default { } }, /** - * Lookup128: xcm::v2::traits::Error + * Lookup74: xcm::v0::OriginKind **/ - XcmV2TraitsError: { + XcmV0OriginKind: { + _enum: ['Native', 'SovereignAccount', 'Superuser', 'Xcm'] + }, + /** + * Lookup75: xcm::double_encoded::DoubleEncoded + **/ + XcmDoubleEncoded: { + encoded: 'Bytes' + }, + /** + * Lookup76: xcm::v1::multiasset::MultiAssetFilter + **/ + XcmV1MultiassetMultiAssetFilter: { _enum: { - Overflow: 'Null', - Unimplemented: 'Null', - UntrustedReserveLocation: 'Null', - UntrustedTeleportLocation: 'Null', - MultiLocationFull: 'Null', - MultiLocationNotInvertible: 'Null', - BadOrigin: 'Null', - InvalidLocation: 'Null', - AssetNotFound: 'Null', - FailedToTransactAsset: 'Null', - NotWithdrawable: 'Null', - LocationCannotHold: 'Null', - ExceedsMaxMessageSize: 'Null', - DestinationUnsupported: 'Null', - Transport: 'Null', - Unroutable: 'Null', - UnknownClaim: 'Null', - FailedToDecode: 'Null', - MaxWeightInvalid: 'Null', - NotHoldingFees: 'Null', - TooExpensive: 'Null', - Trap: 'u64', - UnhandledXcmVersion: 'Null', - WeightLimitReached: 'u64', - Barrier: 'Null', - WeightNotComputable: 'Null' + Definite: 'XcmV1MultiassetMultiAssets', + Wild: 'XcmV1MultiassetWildMultiAsset' + } + }, + /** + * Lookup77: xcm::v1::multiasset::WildMultiAsset + **/ + XcmV1MultiassetWildMultiAsset: { + _enum: { + All: 'Null', + AllOf: { + id: 'XcmV1MultiassetAssetId', + fun: 'XcmV1MultiassetWildFungibility' + } } }, /** - * Lookup129: xcm::v2::WeightLimit + * Lookup78: xcm::v1::multiasset::WildFungibility + **/ + XcmV1MultiassetWildFungibility: { + _enum: ['Fungible', 'NonFungible'] + }, + /** + * Lookup79: xcm::v2::WeightLimit **/ XcmV2WeightLimit: { _enum: { @@ -1205,7 +835,7 @@ export default { } }, /** - * Lookup130: xcm::VersionedMultiAssets + * Lookup81: xcm::VersionedMultiAssets **/ XcmVersionedMultiAssets: { _enum: { @@ -1214,1347 +844,2143 @@ export default { } }, /** - * Lookup145: cumulus_pallet_xcm::pallet::Call - **/ - CumulusPalletXcmCall: 'Null', - /** - * Lookup146: cumulus_pallet_dmp_queue::pallet::Call + * Lookup83: xcm::v0::multi_asset::MultiAsset **/ - CumulusPalletDmpQueueCall: { + XcmV0MultiAsset: { _enum: { - service_overweight: { - index: 'u64', - weightLimit: 'u64' + None: 'Null', + All: 'Null', + AllFungible: 'Null', + AllNonFungible: 'Null', + AllAbstractFungible: { + id: 'Bytes', + }, + AllAbstractNonFungible: { + class: 'Bytes', + }, + AllConcreteFungible: { + id: 'XcmV0MultiLocation', + }, + AllConcreteNonFungible: { + class: 'XcmV0MultiLocation', + }, + AbstractFungible: { + id: 'Bytes', + amount: 'Compact', + }, + AbstractNonFungible: { + class: 'Bytes', + instance: 'XcmV1MultiassetAssetInstance', + }, + ConcreteFungible: { + id: 'XcmV0MultiLocation', + amount: 'Compact', + }, + ConcreteNonFungible: { + class: 'XcmV0MultiLocation', + instance: 'XcmV1MultiassetAssetInstance' } } }, /** - * Lookup147: pallet_inflation::pallet::Call + * Lookup84: xcm::v0::multi_location::MultiLocation **/ - PalletInflationCall: { + XcmV0MultiLocation: { _enum: { - start_inflation: { - inflationStartRelayBlock: 'u32' - } + Null: 'Null', + X1: 'XcmV0Junction', + X2: '(XcmV0Junction,XcmV0Junction)', + X3: '(XcmV0Junction,XcmV0Junction,XcmV0Junction)', + X4: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', + X5: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', + X6: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', + X7: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)', + X8: '(XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction,XcmV0Junction)' } }, /** - * Lookup148: pallet_unique::Call + * Lookup85: xcm::v0::junction::Junction **/ - PalletUniqueCall: { + XcmV0Junction: { _enum: { - create_collection: { - collectionName: 'Vec', - collectionDescription: 'Vec', - tokenPrefix: 'Bytes', - mode: 'UpDataStructsCollectionMode', - }, - create_collection_ex: { - data: 'UpDataStructsCreateCollectionData', + Parent: 'Null', + Parachain: 'Compact', + AccountId32: { + network: 'XcmV0JunctionNetworkId', + id: '[u8;32]', }, - destroy_collection: { - collectionId: 'u32', + AccountIndex64: { + network: 'XcmV0JunctionNetworkId', + index: 'Compact', }, - add_to_allow_list: { - collectionId: 'u32', - address: 'PalletEvmAccountBasicCrossAccountIdRepr', + AccountKey20: { + network: 'XcmV0JunctionNetworkId', + key: '[u8;20]', }, - remove_from_allow_list: { - collectionId: 'u32', - address: 'PalletEvmAccountBasicCrossAccountIdRepr', + PalletInstance: 'u8', + GeneralIndex: 'Compact', + GeneralKey: 'Bytes', + OnlyChild: 'Null', + Plurality: { + id: 'XcmV0JunctionBodyId', + part: 'XcmV0JunctionBodyPart' + } + } + }, + /** + * Lookup86: xcm::VersionedMultiLocation + **/ + XcmVersionedMultiLocation: { + _enum: { + V0: 'XcmV0MultiLocation', + V1: 'XcmV1MultiLocation' + } + }, + /** + * Lookup87: cumulus_pallet_xcm::pallet::Event + **/ + CumulusPalletXcmEvent: { + _enum: { + InvalidFormat: '[u8;8]', + UnsupportedVersion: '[u8;8]', + ExecutedDownward: '([u8;8],XcmV2TraitsOutcome)' + } + }, + /** + * Lookup88: cumulus_pallet_dmp_queue::pallet::Event + **/ + CumulusPalletDmpQueueEvent: { + _enum: { + InvalidFormat: { + messageId: '[u8;32]', }, - change_collection_owner: { - collectionId: 'u32', - newOwner: 'AccountId32', + UnsupportedVersion: { + messageId: '[u8;32]', }, - add_collection_admin: { - collectionId: 'u32', - newAdminId: 'PalletEvmAccountBasicCrossAccountIdRepr', + ExecutedDownward: { + messageId: '[u8;32]', + outcome: 'XcmV2TraitsOutcome', }, - remove_collection_admin: { - collectionId: 'u32', - accountId: 'PalletEvmAccountBasicCrossAccountIdRepr', + WeightExhausted: { + messageId: '[u8;32]', + remainingWeight: 'Weight', + requiredWeight: 'Weight', }, - set_collection_sponsor: { - collectionId: 'u32', - newSponsor: 'AccountId32', + OverweightEnqueued: { + messageId: '[u8;32]', + overweightIndex: 'u64', + requiredWeight: 'Weight', }, - confirm_sponsorship: { - collectionId: 'u32', + OverweightServiced: { + overweightIndex: 'u64', + weightUsed: 'Weight' + } + } + }, + /** + * Lookup89: pallet_unique::RawEvent> + **/ + PalletUniqueRawEvent: { + _enum: { + CollectionSponsorRemoved: 'u32', + CollectionAdminAdded: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', + CollectionOwnedChanged: '(u32,AccountId32)', + CollectionSponsorSet: '(u32,AccountId32)', + SponsorshipConfirmed: '(u32,AccountId32)', + CollectionAdminRemoved: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', + AllowListAddressRemoved: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', + AllowListAddressAdded: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', + CollectionLimitSet: 'u32', + CollectionPermissionSet: 'u32' + } + }, + /** + * Lookup90: pallet_evm::account::BasicCrossAccountIdRepr + **/ + PalletEvmAccountBasicCrossAccountIdRepr: { + _enum: { + Substrate: 'AccountId32', + Ethereum: 'H160' + } + }, + /** + * Lookup93: pallet_unique_scheduler::pallet::Event + **/ + PalletUniqueSchedulerEvent: { + _enum: { + Scheduled: { + when: 'u32', + index: 'u32', }, - remove_collection_sponsor: { - collectionId: 'u32', + Canceled: { + when: 'u32', + index: 'u32', }, - create_item: { - collectionId: 'u32', - owner: 'PalletEvmAccountBasicCrossAccountIdRepr', - data: 'UpDataStructsCreateItemData', + PriorityChanged: { + when: 'u32', + index: 'u32', + priority: 'u8', }, - create_multiple_items: { - collectionId: 'u32', - owner: 'PalletEvmAccountBasicCrossAccountIdRepr', - itemsData: 'Vec', + Dispatched: { + task: '(u32,u32)', + id: 'Option<[u8;16]>', + result: 'Result', }, - set_collection_properties: { + CallLookupFailed: { + task: '(u32,u32)', + id: 'Option<[u8;16]>', + error: 'FrameSupportScheduleLookupError' + } + } + }, + /** + * Lookup96: frame_support::traits::schedule::LookupError + **/ + FrameSupportScheduleLookupError: { + _enum: ['Unknown', 'BadFormat'] + }, + /** + * Lookup97: pallet_common::pallet::Event + **/ + PalletCommonEvent: { + _enum: { + CollectionCreated: '(u32,u8,AccountId32)', + CollectionDestroyed: 'u32', + ItemCreated: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,u128)', + ItemDestroyed: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,u128)', + Transfer: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,PalletEvmAccountBasicCrossAccountIdRepr,u128)', + Approved: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,PalletEvmAccountBasicCrossAccountIdRepr,u128)', + CollectionPropertySet: '(u32,Bytes)', + CollectionPropertyDeleted: '(u32,Bytes)', + TokenPropertySet: '(u32,u32,Bytes)', + TokenPropertyDeleted: '(u32,u32,Bytes)', + PropertyPermissionSet: '(u32,Bytes)' + } + }, + /** + * Lookup100: pallet_structure::pallet::Event + **/ + PalletStructureEvent: { + _enum: { + Executed: 'Result' + } + }, + /** + * Lookup101: pallet_rmrk_core::pallet::Event + **/ + PalletRmrkCoreEvent: { + _enum: { + CollectionCreated: { + issuer: 'AccountId32', collectionId: 'u32', - properties: 'Vec', }, - delete_collection_properties: { + CollectionDestroyed: { + issuer: 'AccountId32', collectionId: 'u32', - propertyKeys: 'Vec', }, - set_token_properties: { + IssuerChanged: { + oldIssuer: 'AccountId32', + newIssuer: 'AccountId32', collectionId: 'u32', - tokenId: 'u32', - properties: 'Vec', }, - delete_token_properties: { + CollectionLocked: { + issuer: 'AccountId32', collectionId: 'u32', - tokenId: 'u32', - propertyKeys: 'Vec', }, - set_token_property_permissions: { + NftMinted: { + owner: 'AccountId32', collectionId: 'u32', - propertyPermissions: 'Vec', + nftId: 'u32', }, - create_multiple_items_ex: { - collectionId: 'u32', - data: 'UpDataStructsCreateItemExData', + NFTBurned: { + owner: 'AccountId32', + nftId: 'u32', }, - set_transfers_enabled_flag: { + NFTSent: { + sender: 'AccountId32', + recipient: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', collectionId: 'u32', - value: 'bool', + nftId: 'u32', + approvalRequired: 'bool', }, - burn_item: { + NFTAccepted: { + sender: 'AccountId32', + recipient: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', collectionId: 'u32', - itemId: 'u32', - value: 'u128', + nftId: 'u32', }, - burn_from: { + NFTRejected: { + sender: 'AccountId32', collectionId: 'u32', - from: 'PalletEvmAccountBasicCrossAccountIdRepr', - itemId: 'u32', - value: 'u128', + nftId: 'u32', }, - transfer: { - recipient: 'PalletEvmAccountBasicCrossAccountIdRepr', + PropertySet: { collectionId: 'u32', - itemId: 'u32', - value: 'u128', + maybeNftId: 'Option', + key: 'Bytes', + value: 'Bytes', }, - approve: { - spender: 'PalletEvmAccountBasicCrossAccountIdRepr', - collectionId: 'u32', - itemId: 'u32', - amount: 'u128', + ResourceAdded: { + nftId: 'u32', + resourceId: 'u32', }, - transfer_from: { - from: 'PalletEvmAccountBasicCrossAccountIdRepr', - recipient: 'PalletEvmAccountBasicCrossAccountIdRepr', - collectionId: 'u32', - itemId: 'u32', - value: 'u128', + ResourceRemoval: { + nftId: 'u32', + resourceId: 'u32', }, - set_collection_limits: { - collectionId: 'u32', - newLimit: 'UpDataStructsCollectionLimits', + ResourceAccepted: { + nftId: 'u32', + resourceId: 'u32', }, - set_collection_permissions: { - collectionId: 'u32', - newLimit: 'UpDataStructsCollectionPermissions', + ResourceRemovalAccepted: { + nftId: 'u32', + resourceId: 'u32', }, - repartition: { + PrioritySet: { collectionId: 'u32', - token: 'u32', - amount: 'u128' + nftId: 'u32' } } }, /** - * Lookup154: up_data_structs::CollectionMode + * Lookup102: rmrk_traits::nft::AccountIdOrCollectionNftTuple **/ - UpDataStructsCollectionMode: { + RmrkTraitsNftAccountIdOrCollectionNftTuple: { _enum: { - NFT: 'Null', - Fungible: 'u8', - ReFungible: 'Null' + AccountId: 'AccountId32', + CollectionAndNftTuple: '(u32,u32)' } }, /** - * Lookup155: up_data_structs::CreateCollectionData + * Lookup107: pallet_rmrk_equip::pallet::Event **/ - UpDataStructsCreateCollectionData: { - mode: 'UpDataStructsCollectionMode', - access: 'Option', - name: 'Vec', - description: 'Vec', - tokenPrefix: 'Bytes', - pendingSponsor: 'Option', - limits: 'Option', - permissions: 'Option', - tokenPropertyPermissions: 'Vec', - properties: 'Vec' + PalletRmrkEquipEvent: { + _enum: { + BaseCreated: { + issuer: 'AccountId32', + baseId: 'u32', + }, + EquippablesUpdated: { + baseId: 'u32', + slotId: 'u32' + } + } }, /** - * Lookup157: up_data_structs::AccessMode + * Lookup108: pallet_app_promotion::pallet::Event **/ - UpDataStructsAccessMode: { - _enum: ['Normal', 'AllowList'] + PalletAppPromotionEvent: { + _enum: { + StakingRecalculation: '(AccountId32,u128,u128)', + Stake: '(AccountId32,u128)', + Unstake: '(AccountId32,u128)', + SetAdmin: 'AccountId32' + } }, /** - * Lookup160: up_data_structs::CollectionLimits + * Lookup109: pallet_foreign_assets::module::Event **/ - UpDataStructsCollectionLimits: { - accountTokenOwnershipLimit: 'Option', - sponsoredDataSize: 'Option', - sponsoredDataRateLimit: 'Option', - tokenLimit: 'Option', - sponsorTransferTimeout: 'Option', - sponsorApproveTimeout: 'Option', - ownerCanTransfer: 'Option', - ownerCanDestroy: 'Option', - transfersEnabled: 'Option' + PalletForeignAssetsModuleEvent: { + _enum: { + ForeignAssetRegistered: { + assetId: 'u32', + assetAddress: 'XcmV1MultiLocation', + metadata: 'PalletForeignAssetsModuleAssetMetadata', + }, + ForeignAssetUpdated: { + assetId: 'u32', + assetAddress: 'XcmV1MultiLocation', + metadata: 'PalletForeignAssetsModuleAssetMetadata', + }, + AssetRegistered: { + assetId: 'PalletForeignAssetsAssetIds', + metadata: 'PalletForeignAssetsModuleAssetMetadata', + }, + AssetUpdated: { + assetId: 'PalletForeignAssetsAssetIds', + metadata: 'PalletForeignAssetsModuleAssetMetadata' + } + } }, /** - * Lookup162: up_data_structs::SponsoringRateLimit + * Lookup110: pallet_foreign_assets::module::AssetMetadata **/ - UpDataStructsSponsoringRateLimit: { + PalletForeignAssetsModuleAssetMetadata: { + name: 'Bytes', + symbol: 'Bytes', + decimals: 'u8', + minimalBalance: 'u128' + }, + /** + * Lookup111: pallet_evm::pallet::Event + **/ + PalletEvmEvent: { _enum: { - SponsoringDisabled: 'Null', - Blocks: 'u32' + Log: 'EthereumLog', + Created: 'H160', + CreatedFailed: 'H160', + Executed: 'H160', + ExecutedFailed: 'H160', + BalanceDeposit: '(AccountId32,H160,U256)', + BalanceWithdraw: '(AccountId32,H160,U256)' } }, /** - * Lookup165: up_data_structs::CollectionPermissions + * Lookup112: ethereum::log::Log **/ - UpDataStructsCollectionPermissions: { - access: 'Option', - mintMode: 'Option', - nesting: 'Option' + EthereumLog: { + address: 'H160', + topics: 'Vec', + data: 'Bytes' }, /** - * Lookup167: up_data_structs::NestingPermissions + * Lookup116: pallet_ethereum::pallet::Event **/ - UpDataStructsNestingPermissions: { - tokenOwner: 'bool', - collectionAdmin: 'bool', - restricted: 'Option' + PalletEthereumEvent: { + _enum: { + Executed: '(H160,H160,H256,EvmCoreErrorExitReason)' + } + }, + /** + * Lookup117: evm_core::error::ExitReason + **/ + EvmCoreErrorExitReason: { + _enum: { + Succeed: 'EvmCoreErrorExitSucceed', + Error: 'EvmCoreErrorExitError', + Revert: 'EvmCoreErrorExitRevert', + Fatal: 'EvmCoreErrorExitFatal' + } + }, + /** + * Lookup118: evm_core::error::ExitSucceed + **/ + EvmCoreErrorExitSucceed: { + _enum: ['Stopped', 'Returned', 'Suicided'] + }, + /** + * Lookup119: evm_core::error::ExitError + **/ + EvmCoreErrorExitError: { + _enum: { + StackUnderflow: 'Null', + StackOverflow: 'Null', + InvalidJump: 'Null', + InvalidRange: 'Null', + DesignatedInvalid: 'Null', + CallTooDeep: 'Null', + CreateCollision: 'Null', + CreateContractLimit: 'Null', + OutOfOffset: 'Null', + OutOfGas: 'Null', + OutOfFund: 'Null', + PCUnderflow: 'Null', + CreateEmpty: 'Null', + Other: 'Text', + InvalidCode: 'Null' + } + }, + /** + * Lookup122: evm_core::error::ExitRevert + **/ + EvmCoreErrorExitRevert: { + _enum: ['Reverted'] + }, + /** + * Lookup123: evm_core::error::ExitFatal + **/ + EvmCoreErrorExitFatal: { + _enum: { + NotSupported: 'Null', + UnhandledInterrupt: 'Null', + CallErrorAsFatal: 'EvmCoreErrorExitError', + Other: 'Text' + } + }, + /** + * Lookup124: pallet_evm_contract_helpers::pallet::Event + **/ + PalletEvmContractHelpersEvent: { + _enum: { + ContractSponsorSet: '(H160,AccountId32)', + ContractSponsorshipConfirmed: '(H160,AccountId32)', + ContractSponsorRemoved: 'H160' + } + }, + /** + * Lookup125: pallet_maintenance::pallet::Event + **/ + PalletMaintenanceEvent: { + _enum: ['MaintenanceEnabled', 'MaintenanceDisabled'] + }, + /** + * Lookup126: pallet_test_utils::pallet::Event + **/ + PalletTestUtilsEvent: { + _enum: ['ValueIsSet', 'ShouldRollback'] + }, + /** + * Lookup127: frame_system::Phase + **/ + FrameSystemPhase: { + _enum: { + ApplyExtrinsic: 'u32', + Finalization: 'Null', + Initialization: 'Null' + } + }, + /** + * Lookup129: frame_system::LastRuntimeUpgradeInfo + **/ + FrameSystemLastRuntimeUpgradeInfo: { + specVersion: 'Compact', + specName: 'Text' + }, + /** + * Lookup130: frame_system::pallet::Call + **/ + FrameSystemCall: { + _enum: { + fill_block: { + ratio: 'Perbill', + }, + remark: { + remark: 'Bytes', + }, + set_heap_pages: { + pages: 'u64', + }, + set_code: { + code: 'Bytes', + }, + set_code_without_checks: { + code: 'Bytes', + }, + set_storage: { + items: 'Vec<(Bytes,Bytes)>', + }, + kill_storage: { + _alias: { + keys_: 'keys', + }, + keys_: 'Vec', + }, + kill_prefix: { + prefix: 'Bytes', + subkeys: 'u32', + }, + remark_with_event: { + remark: 'Bytes' + } + } + }, + /** + * Lookup135: frame_system::limits::BlockWeights + **/ + FrameSystemLimitsBlockWeights: { + baseBlock: 'Weight', + maxBlock: 'Weight', + perClass: 'FrameSupportDispatchPerDispatchClassWeightsPerClass' + }, + /** + * Lookup136: frame_support::dispatch::PerDispatchClass + **/ + FrameSupportDispatchPerDispatchClassWeightsPerClass: { + normal: 'FrameSystemLimitsWeightsPerClass', + operational: 'FrameSystemLimitsWeightsPerClass', + mandatory: 'FrameSystemLimitsWeightsPerClass' + }, + /** + * Lookup137: frame_system::limits::WeightsPerClass + **/ + FrameSystemLimitsWeightsPerClass: { + baseExtrinsic: 'Weight', + maxExtrinsic: 'Option', + maxTotal: 'Option', + reserved: 'Option' + }, + /** + * Lookup139: frame_system::limits::BlockLength + **/ + FrameSystemLimitsBlockLength: { + max: 'FrameSupportDispatchPerDispatchClassU32' + }, + /** + * Lookup140: frame_support::dispatch::PerDispatchClass + **/ + FrameSupportDispatchPerDispatchClassU32: { + normal: 'u32', + operational: 'u32', + mandatory: 'u32' + }, + /** + * Lookup141: sp_weights::RuntimeDbWeight + **/ + SpWeightsRuntimeDbWeight: { + read: 'u64', + write: 'u64' + }, + /** + * Lookup142: sp_version::RuntimeVersion + **/ + SpVersionRuntimeVersion: { + specName: 'Text', + implName: 'Text', + authoringVersion: 'u32', + specVersion: 'u32', + implVersion: 'u32', + apis: 'Vec<([u8;8],u32)>', + transactionVersion: 'u32', + stateVersion: 'u8' + }, + /** + * Lookup147: frame_system::pallet::Error + **/ + FrameSystemError: { + _enum: ['InvalidSpecName', 'SpecVersionNeedsToIncrease', 'FailedToExtractRuntimeVersion', 'NonDefaultComposite', 'NonZeroRefCount', 'CallFiltered'] + }, + /** + * Lookup148: polkadot_primitives::v2::PersistedValidationData + **/ + PolkadotPrimitivesV2PersistedValidationData: { + parentHead: 'Bytes', + relayParentNumber: 'u32', + relayParentStorageRoot: 'H256', + maxPovSize: 'u32' + }, + /** + * Lookup151: polkadot_primitives::v2::UpgradeRestriction + **/ + PolkadotPrimitivesV2UpgradeRestriction: { + _enum: ['Present'] + }, + /** + * Lookup152: sp_trie::storage_proof::StorageProof + **/ + SpTrieStorageProof: { + trieNodes: 'BTreeSet' + }, + /** + * Lookup154: cumulus_pallet_parachain_system::relay_state_snapshot::MessagingStateSnapshot + **/ + CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot: { + dmqMqcHead: 'H256', + relayDispatchQueueSize: '(u32,u32)', + ingressChannels: 'Vec<(u32,PolkadotPrimitivesV2AbridgedHrmpChannel)>', + egressChannels: 'Vec<(u32,PolkadotPrimitivesV2AbridgedHrmpChannel)>' + }, + /** + * Lookup157: polkadot_primitives::v2::AbridgedHrmpChannel + **/ + PolkadotPrimitivesV2AbridgedHrmpChannel: { + maxCapacity: 'u32', + maxTotalSize: 'u32', + maxMessageSize: 'u32', + msgCount: 'u32', + totalSize: 'u32', + mqcHead: 'Option' + }, + /** + * Lookup158: polkadot_primitives::v2::AbridgedHostConfiguration + **/ + PolkadotPrimitivesV2AbridgedHostConfiguration: { + maxCodeSize: 'u32', + maxHeadDataSize: 'u32', + maxUpwardQueueCount: 'u32', + maxUpwardQueueSize: 'u32', + maxUpwardMessageSize: 'u32', + maxUpwardMessageNumPerCandidate: 'u32', + hrmpMaxMessageNumPerCandidate: 'u32', + validationUpgradeCooldown: 'u32', + validationUpgradeDelay: 'u32' + }, + /** + * Lookup164: polkadot_core_primitives::OutboundHrmpMessage + **/ + PolkadotCorePrimitivesOutboundHrmpMessage: { + recipient: 'u32', + data: 'Bytes' + }, + /** + * Lookup165: cumulus_pallet_parachain_system::pallet::Call + **/ + CumulusPalletParachainSystemCall: { + _enum: { + set_validation_data: { + data: 'CumulusPrimitivesParachainInherentParachainInherentData', + }, + sudo_send_upward_message: { + message: 'Bytes', + }, + authorize_upgrade: { + codeHash: 'H256', + }, + enact_authorized_upgrade: { + code: 'Bytes' + } + } + }, + /** + * Lookup166: cumulus_primitives_parachain_inherent::ParachainInherentData + **/ + CumulusPrimitivesParachainInherentParachainInherentData: { + validationData: 'PolkadotPrimitivesV2PersistedValidationData', + relayChainState: 'SpTrieStorageProof', + downwardMessages: 'Vec', + horizontalMessages: 'BTreeMap>' + }, + /** + * Lookup168: polkadot_core_primitives::InboundDownwardMessage + **/ + PolkadotCorePrimitivesInboundDownwardMessage: { + sentAt: 'u32', + msg: 'Bytes' + }, + /** + * Lookup171: polkadot_core_primitives::InboundHrmpMessage + **/ + PolkadotCorePrimitivesInboundHrmpMessage: { + sentAt: 'u32', + data: 'Bytes' + }, + /** + * Lookup174: cumulus_pallet_parachain_system::pallet::Error + **/ + CumulusPalletParachainSystemError: { + _enum: ['OverlappingUpgrades', 'ProhibitedByPolkadot', 'TooBig', 'ValidationDataNotAvailable', 'HostConfigurationNotAvailable', 'NotScheduled', 'NothingAuthorized', 'Unauthorized'] + }, + /** + * Lookup176: pallet_balances::BalanceLock + **/ + PalletBalancesBalanceLock: { + id: '[u8;8]', + amount: 'u128', + reasons: 'PalletBalancesReasons' + }, + /** + * Lookup177: pallet_balances::Reasons + **/ + PalletBalancesReasons: { + _enum: ['Fee', 'Misc', 'All'] + }, + /** + * Lookup180: pallet_balances::ReserveData + **/ + PalletBalancesReserveData: { + id: '[u8;16]', + amount: 'u128' + }, + /** + * Lookup182: pallet_balances::Releases + **/ + PalletBalancesReleases: { + _enum: ['V1_0_0', 'V2_0_0'] + }, + /** + * Lookup183: pallet_balances::pallet::Call + **/ + PalletBalancesCall: { + _enum: { + transfer: { + dest: 'MultiAddress', + value: 'Compact', + }, + set_balance: { + who: 'MultiAddress', + newFree: 'Compact', + newReserved: 'Compact', + }, + force_transfer: { + source: 'MultiAddress', + dest: 'MultiAddress', + value: 'Compact', + }, + transfer_keep_alive: { + dest: 'MultiAddress', + value: 'Compact', + }, + transfer_all: { + dest: 'MultiAddress', + keepAlive: 'bool', + }, + force_unreserve: { + who: 'MultiAddress', + amount: 'u128' + } + } + }, + /** + * Lookup186: pallet_balances::pallet::Error + **/ + PalletBalancesError: { + _enum: ['VestingBalance', 'LiquidityRestrictions', 'InsufficientBalance', 'ExistentialDeposit', 'KeepAlive', 'ExistingVestingSchedule', 'DeadAccount', 'TooManyReserves'] + }, + /** + * Lookup188: pallet_timestamp::pallet::Call + **/ + PalletTimestampCall: { + _enum: { + set: { + now: 'Compact' + } + } + }, + /** + * Lookup190: pallet_transaction_payment::Releases + **/ + PalletTransactionPaymentReleases: { + _enum: ['V1Ancient', 'V2'] + }, + /** + * Lookup191: pallet_treasury::Proposal + **/ + PalletTreasuryProposal: { + proposer: 'AccountId32', + value: 'u128', + beneficiary: 'AccountId32', + bond: 'u128' + }, + /** + * Lookup194: pallet_treasury::pallet::Call + **/ + PalletTreasuryCall: { + _enum: { + propose_spend: { + value: 'Compact', + beneficiary: 'MultiAddress', + }, + reject_proposal: { + proposalId: 'Compact', + }, + approve_proposal: { + proposalId: 'Compact', + }, + spend: { + amount: 'Compact', + beneficiary: 'MultiAddress', + }, + remove_approval: { + proposalId: 'Compact' + } + } }, /** - * Lookup169: up_data_structs::OwnerRestrictedSet - **/ - UpDataStructsOwnerRestrictedSet: 'BTreeSet', - /** - * Lookup175: up_data_structs::PropertyKeyPermission + * Lookup197: frame_support::PalletId **/ - UpDataStructsPropertyKeyPermission: { - key: 'Bytes', - permission: 'UpDataStructsPropertyPermission' - }, + FrameSupportPalletId: '[u8;8]', /** - * Lookup177: up_data_structs::PropertyPermission + * Lookup198: pallet_treasury::pallet::Error **/ - UpDataStructsPropertyPermission: { - mutable: 'bool', - collectionAdmin: 'bool', - tokenOwner: 'bool' + PalletTreasuryError: { + _enum: ['InsufficientProposersBalance', 'InvalidIndex', 'TooManyApprovals', 'InsufficientPermission', 'ProposalNotApproved'] }, /** - * Lookup180: up_data_structs::Property + * Lookup199: pallet_sudo::pallet::Call **/ - UpDataStructsProperty: { - key: 'Bytes', - value: 'Bytes' + PalletSudoCall: { + _enum: { + sudo: { + call: 'Call', + }, + sudo_unchecked_weight: { + call: 'Call', + weight: 'Weight', + }, + set_key: { + _alias: { + new_: 'new', + }, + new_: 'MultiAddress', + }, + sudo_as: { + who: 'MultiAddress', + call: 'Call' + } + } }, /** - * Lookup183: pallet_evm::account::BasicCrossAccountIdRepr + * Lookup201: orml_vesting::module::Call **/ - PalletEvmAccountBasicCrossAccountIdRepr: { + OrmlVestingModuleCall: { _enum: { - Substrate: 'AccountId32', - Ethereum: 'H160' + claim: 'Null', + vested_transfer: { + dest: 'MultiAddress', + schedule: 'OrmlVestingVestingSchedule', + }, + update_vesting_schedules: { + who: 'MultiAddress', + vestingSchedules: 'Vec', + }, + claim_for: { + dest: 'MultiAddress' + } } }, /** - * Lookup185: up_data_structs::CreateItemData + * Lookup203: orml_xtokens::module::Call **/ - UpDataStructsCreateItemData: { + OrmlXtokensModuleCall: { _enum: { - NFT: 'UpDataStructsCreateNftData', - Fungible: 'UpDataStructsCreateFungibleData', - ReFungible: 'UpDataStructsCreateReFungibleData' + transfer: { + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'u128', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64', + }, + transfer_multiasset: { + asset: 'XcmVersionedMultiAsset', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64', + }, + transfer_with_fee: { + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'u128', + fee: 'u128', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64', + }, + transfer_multiasset_with_fee: { + asset: 'XcmVersionedMultiAsset', + fee: 'XcmVersionedMultiAsset', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64', + }, + transfer_multicurrencies: { + currencies: 'Vec<(PalletForeignAssetsAssetIds,u128)>', + feeItem: 'u32', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64', + }, + transfer_multiassets: { + assets: 'XcmVersionedMultiAssets', + feeItem: 'u32', + dest: 'XcmVersionedMultiLocation', + destWeight: 'u64' + } } }, /** - * Lookup186: up_data_structs::CreateNftData + * Lookup204: xcm::VersionedMultiAsset **/ - UpDataStructsCreateNftData: { - properties: 'Vec' + XcmVersionedMultiAsset: { + _enum: { + V0: 'XcmV0MultiAsset', + V1: 'XcmV1MultiAsset' + } }, /** - * Lookup187: up_data_structs::CreateFungibleData + * Lookup207: orml_tokens::module::Call **/ - UpDataStructsCreateFungibleData: { - value: 'u128' + OrmlTokensModuleCall: { + _enum: { + transfer: { + dest: 'MultiAddress', + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'Compact', + }, + transfer_all: { + dest: 'MultiAddress', + currencyId: 'PalletForeignAssetsAssetIds', + keepAlive: 'bool', + }, + transfer_keep_alive: { + dest: 'MultiAddress', + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'Compact', + }, + force_transfer: { + source: 'MultiAddress', + dest: 'MultiAddress', + currencyId: 'PalletForeignAssetsAssetIds', + amount: 'Compact', + }, + set_balance: { + who: 'MultiAddress', + currencyId: 'PalletForeignAssetsAssetIds', + newFree: 'Compact', + newReserved: 'Compact' + } + } }, /** - * Lookup188: up_data_structs::CreateReFungibleData + * Lookup208: cumulus_pallet_xcmp_queue::pallet::Call **/ - UpDataStructsCreateReFungibleData: { - constData: 'Bytes', - pieces: 'u128' + CumulusPalletXcmpQueueCall: { + _enum: { + service_overweight: { + index: 'u64', + weightLimit: 'Weight', + }, + suspend_xcm_execution: 'Null', + resume_xcm_execution: 'Null', + update_suspend_threshold: { + _alias: { + new_: 'new', + }, + new_: 'u32', + }, + update_drop_threshold: { + _alias: { + new_: 'new', + }, + new_: 'u32', + }, + update_resume_threshold: { + _alias: { + new_: 'new', + }, + new_: 'u32', + }, + update_threshold_weight: { + _alias: { + new_: 'new', + }, + new_: 'Weight', + }, + update_weight_restrict_decay: { + _alias: { + new_: 'new', + }, + new_: 'Weight', + }, + update_xcmp_max_individual_weight: { + _alias: { + new_: 'new', + }, + new_: 'Weight' + } + } }, /** - * Lookup193: up_data_structs::CreateItemExData> + * Lookup209: pallet_xcm::pallet::Call **/ - UpDataStructsCreateItemExData: { + PalletXcmCall: { _enum: { - NFT: 'Vec', - Fungible: 'BTreeMap', - RefungibleMultipleItems: 'Vec', - RefungibleMultipleOwners: 'UpDataStructsCreateRefungibleExData' + send: { + dest: 'XcmVersionedMultiLocation', + message: 'XcmVersionedXcm', + }, + teleport_assets: { + dest: 'XcmVersionedMultiLocation', + beneficiary: 'XcmVersionedMultiLocation', + assets: 'XcmVersionedMultiAssets', + feeAssetItem: 'u32', + }, + reserve_transfer_assets: { + dest: 'XcmVersionedMultiLocation', + beneficiary: 'XcmVersionedMultiLocation', + assets: 'XcmVersionedMultiAssets', + feeAssetItem: 'u32', + }, + execute: { + message: 'XcmVersionedXcm', + maxWeight: 'Weight', + }, + force_xcm_version: { + location: 'XcmV1MultiLocation', + xcmVersion: 'u32', + }, + force_default_xcm_version: { + maybeXcmVersion: 'Option', + }, + force_subscribe_version_notify: { + location: 'XcmVersionedMultiLocation', + }, + force_unsubscribe_version_notify: { + location: 'XcmVersionedMultiLocation', + }, + limited_reserve_transfer_assets: { + dest: 'XcmVersionedMultiLocation', + beneficiary: 'XcmVersionedMultiLocation', + assets: 'XcmVersionedMultiAssets', + feeAssetItem: 'u32', + weightLimit: 'XcmV2WeightLimit', + }, + limited_teleport_assets: { + dest: 'XcmVersionedMultiLocation', + beneficiary: 'XcmVersionedMultiLocation', + assets: 'XcmVersionedMultiAssets', + feeAssetItem: 'u32', + weightLimit: 'XcmV2WeightLimit' + } } }, /** - * Lookup195: up_data_structs::CreateNftExData> + * Lookup210: xcm::VersionedXcm **/ - UpDataStructsCreateNftExData: { - properties: 'Vec', - owner: 'PalletEvmAccountBasicCrossAccountIdRepr' + XcmVersionedXcm: { + _enum: { + V0: 'XcmV0Xcm', + V1: 'XcmV1Xcm', + V2: 'XcmV2Xcm' + } }, /** - * Lookup202: up_data_structs::CreateRefungibleExData> + * Lookup211: xcm::v0::Xcm **/ - UpDataStructsCreateRefungibleExData: { - constData: 'Bytes', - users: 'BTreeMap' + XcmV0Xcm: { + _enum: { + WithdrawAsset: { + assets: 'Vec', + effects: 'Vec', + }, + ReserveAssetDeposit: { + assets: 'Vec', + effects: 'Vec', + }, + TeleportAsset: { + assets: 'Vec', + effects: 'Vec', + }, + QueryResponse: { + queryId: 'Compact', + response: 'XcmV0Response', + }, + TransferAsset: { + assets: 'Vec', + dest: 'XcmV0MultiLocation', + }, + TransferReserveAsset: { + assets: 'Vec', + dest: 'XcmV0MultiLocation', + effects: 'Vec', + }, + Transact: { + originType: 'XcmV0OriginKind', + requireWeightAtMost: 'u64', + call: 'XcmDoubleEncoded', + }, + HrmpNewChannelOpenRequest: { + sender: 'Compact', + maxMessageSize: 'Compact', + maxCapacity: 'Compact', + }, + HrmpChannelAccepted: { + recipient: 'Compact', + }, + HrmpChannelClosing: { + initiator: 'Compact', + sender: 'Compact', + recipient: 'Compact', + }, + RelayedFrom: { + who: 'XcmV0MultiLocation', + message: 'XcmV0Xcm' + } + } }, /** - * Lookup204: pallet_unique_scheduler::pallet::Call + * Lookup213: xcm::v0::order::Order **/ - PalletUniqueSchedulerCall: { + XcmV0Order: { _enum: { - schedule_named: { - id: '[u8;16]', - when: 'u32', - maybePeriodic: 'Option<(u32,u32)>', - priority: 'u8', - call: 'FrameSupportScheduleMaybeHashed', + Null: 'Null', + DepositAsset: { + assets: 'Vec', + dest: 'XcmV0MultiLocation', }, - cancel_named: { - id: '[u8;16]', + DepositReserveAsset: { + assets: 'Vec', + dest: 'XcmV0MultiLocation', + effects: 'Vec', }, - schedule_named_after: { - id: '[u8;16]', - after: 'u32', - maybePeriodic: 'Option<(u32,u32)>', - priority: 'u8', - call: 'FrameSupportScheduleMaybeHashed' + ExchangeAsset: { + give: 'Vec', + receive: 'Vec', + }, + InitiateReserveWithdraw: { + assets: 'Vec', + reserve: 'XcmV0MultiLocation', + effects: 'Vec', + }, + InitiateTeleport: { + assets: 'Vec', + dest: 'XcmV0MultiLocation', + effects: 'Vec', + }, + QueryHolding: { + queryId: 'Compact', + dest: 'XcmV0MultiLocation', + assets: 'Vec', + }, + BuyExecution: { + fees: 'XcmV0MultiAsset', + weight: 'u64', + debt: 'u64', + haltOnError: 'bool', + xcm: 'Vec' } } }, /** - * Lookup206: frame_support::traits::schedule::MaybeHashed + * Lookup215: xcm::v0::Response **/ - FrameSupportScheduleMaybeHashed: { + XcmV0Response: { _enum: { - Value: 'Call', - Hash: 'H256' + Assets: 'Vec' } }, /** - * Lookup207: pallet_template_transaction_payment::Call - **/ - PalletTemplateTransactionPaymentCall: 'Null', - /** - * Lookup208: pallet_structure::pallet::Call - **/ - PalletStructureCall: 'Null', - /** - * Lookup209: pallet_rmrk_core::pallet::Call + * Lookup216: xcm::v1::Xcm **/ - PalletRmrkCoreCall: { + XcmV1Xcm: { _enum: { - create_collection: { - metadata: 'Bytes', - max: 'Option', - symbol: 'Bytes', - }, - destroy_collection: { - collectionId: 'u32', - }, - change_collection_issuer: { - collectionId: 'u32', - newIssuer: 'MultiAddress', - }, - lock_collection: { - collectionId: 'u32', - }, - mint_nft: { - owner: 'Option', - collectionId: 'u32', - recipient: 'Option', - royaltyAmount: 'Option', - metadata: 'Bytes', - transferable: 'bool', - resources: 'Option>', + WithdrawAsset: { + assets: 'XcmV1MultiassetMultiAssets', + effects: 'Vec', }, - burn_nft: { - collectionId: 'u32', - nftId: 'u32', - maxBurns: 'u32', + ReserveAssetDeposited: { + assets: 'XcmV1MultiassetMultiAssets', + effects: 'Vec', }, - send: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', - newOwner: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', + ReceiveTeleportedAsset: { + assets: 'XcmV1MultiassetMultiAssets', + effects: 'Vec', }, - accept_nft: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', - newOwner: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', + QueryResponse: { + queryId: 'Compact', + response: 'XcmV1Response', }, - reject_nft: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', + TransferAsset: { + assets: 'XcmV1MultiassetMultiAssets', + beneficiary: 'XcmV1MultiLocation', }, - accept_resource: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', - resourceId: 'u32', + TransferReserveAsset: { + assets: 'XcmV1MultiassetMultiAssets', + dest: 'XcmV1MultiLocation', + effects: 'Vec', }, - accept_resource_removal: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', - resourceId: 'u32', + Transact: { + originType: 'XcmV0OriginKind', + requireWeightAtMost: 'u64', + call: 'XcmDoubleEncoded', }, - set_property: { - rmrkCollectionId: 'Compact', - maybeNftId: 'Option', - key: 'Bytes', - value: 'Bytes', + HrmpNewChannelOpenRequest: { + sender: 'Compact', + maxMessageSize: 'Compact', + maxCapacity: 'Compact', }, - set_priority: { - rmrkCollectionId: 'u32', - rmrkNftId: 'u32', - priorities: 'Vec', + HrmpChannelAccepted: { + recipient: 'Compact', }, - add_basic_resource: { - rmrkCollectionId: 'u32', - nftId: 'u32', - resource: 'RmrkTraitsResourceBasicResource', + HrmpChannelClosing: { + initiator: 'Compact', + sender: 'Compact', + recipient: 'Compact', }, - add_composable_resource: { - rmrkCollectionId: 'u32', - nftId: 'u32', - resource: 'RmrkTraitsResourceComposableResource', + RelayedFrom: { + who: 'XcmV1MultilocationJunctions', + message: 'XcmV1Xcm', }, - add_slot_resource: { - rmrkCollectionId: 'u32', - nftId: 'u32', - resource: 'RmrkTraitsResourceSlotResource', + SubscribeVersion: { + queryId: 'Compact', + maxResponseWeight: 'Compact', }, - remove_resource: { - rmrkCollectionId: 'u32', - nftId: 'u32', - resourceId: 'u32' - } - } - }, - /** - * Lookup215: rmrk_traits::resource::ResourceTypes, frame_support::storage::bounded_vec::BoundedVec> - **/ - RmrkTraitsResourceResourceTypes: { - _enum: { - Basic: 'RmrkTraitsResourceBasicResource', - Composable: 'RmrkTraitsResourceComposableResource', - Slot: 'RmrkTraitsResourceSlotResource' - } - }, - /** - * Lookup217: rmrk_traits::resource::BasicResource> - **/ - RmrkTraitsResourceBasicResource: { - src: 'Option', - metadata: 'Option', - license: 'Option', - thumb: 'Option' - }, - /** - * Lookup219: rmrk_traits::resource::ComposableResource, frame_support::storage::bounded_vec::BoundedVec> - **/ - RmrkTraitsResourceComposableResource: { - parts: 'Vec', - base: 'u32', - src: 'Option', - metadata: 'Option', - license: 'Option', - thumb: 'Option' - }, - /** - * Lookup220: rmrk_traits::resource::SlotResource> - **/ - RmrkTraitsResourceSlotResource: { - base: 'u32', - src: 'Option', - metadata: 'Option', - slot: 'u32', - license: 'Option', - thumb: 'Option' - }, - /** - * Lookup222: rmrk_traits::nft::AccountIdOrCollectionNftTuple - **/ - RmrkTraitsNftAccountIdOrCollectionNftTuple: { - _enum: { - AccountId: 'AccountId32', - CollectionAndNftTuple: '(u32,u32)' + UnsubscribeVersion: 'Null' } }, /** - * Lookup226: pallet_rmrk_equip::pallet::Call + * Lookup218: xcm::v1::order::Order **/ - PalletRmrkEquipCall: { + XcmV1Order: { _enum: { - create_base: { - baseType: 'Bytes', - symbol: 'Bytes', - parts: 'Vec', + Noop: 'Null', + DepositAsset: { + assets: 'XcmV1MultiassetMultiAssetFilter', + maxAssets: 'u32', + beneficiary: 'XcmV1MultiLocation', }, - theme_add: { - baseId: 'u32', - theme: 'RmrkTraitsTheme', + DepositReserveAsset: { + assets: 'XcmV1MultiassetMultiAssetFilter', + maxAssets: 'u32', + dest: 'XcmV1MultiLocation', + effects: 'Vec', + }, + ExchangeAsset: { + give: 'XcmV1MultiassetMultiAssetFilter', + receive: 'XcmV1MultiassetMultiAssets', + }, + InitiateReserveWithdraw: { + assets: 'XcmV1MultiassetMultiAssetFilter', + reserve: 'XcmV1MultiLocation', + effects: 'Vec', + }, + InitiateTeleport: { + assets: 'XcmV1MultiassetMultiAssetFilter', + dest: 'XcmV1MultiLocation', + effects: 'Vec', + }, + QueryHolding: { + queryId: 'Compact', + dest: 'XcmV1MultiLocation', + assets: 'XcmV1MultiassetMultiAssetFilter', }, - equippable: { - baseId: 'u32', - slotId: 'u32', - equippables: 'RmrkTraitsPartEquippableList' + BuyExecution: { + fees: 'XcmV1MultiAsset', + weight: 'u64', + debt: 'u64', + haltOnError: 'bool', + instructions: 'Vec' } } }, /** - * Lookup229: rmrk_traits::part::PartType, frame_support::storage::bounded_vec::BoundedVec> + * Lookup220: xcm::v1::Response **/ - RmrkTraitsPartPartType: { + XcmV1Response: { _enum: { - FixedPart: 'RmrkTraitsPartFixedPart', - SlotPart: 'RmrkTraitsPartSlotPart' + Assets: 'XcmV1MultiassetMultiAssets', + Version: 'u32' } }, /** - * Lookup231: rmrk_traits::part::FixedPart> - **/ - RmrkTraitsPartFixedPart: { - id: 'u32', - z: 'u32', - src: 'Bytes' - }, - /** - * Lookup232: rmrk_traits::part::SlotPart, frame_support::storage::bounded_vec::BoundedVec> + * Lookup234: cumulus_pallet_xcm::pallet::Call **/ - RmrkTraitsPartSlotPart: { - id: 'u32', - equippable: 'RmrkTraitsPartEquippableList', - src: 'Bytes', - z: 'u32' - }, + CumulusPalletXcmCall: 'Null', /** - * Lookup233: rmrk_traits::part::EquippableList> + * Lookup235: cumulus_pallet_dmp_queue::pallet::Call **/ - RmrkTraitsPartEquippableList: { + CumulusPalletDmpQueueCall: { _enum: { - All: 'Null', - Empty: 'Null', - Custom: 'Vec' + service_overweight: { + index: 'u64', + weightLimit: 'Weight' + } } }, /** - * Lookup235: rmrk_traits::theme::Theme, frame_support::storage::bounded_vec::BoundedVec>, S>> - **/ - RmrkTraitsTheme: { - name: 'Bytes', - properties: 'Vec', - inherit: 'bool' - }, - /** - * Lookup237: rmrk_traits::theme::ThemeProperty> + * Lookup236: pallet_inflation::pallet::Call **/ - RmrkTraitsThemeThemeProperty: { - key: 'Bytes', - value: 'Bytes' + PalletInflationCall: { + _enum: { + start_inflation: { + inflationStartRelayBlock: 'u32' + } + } }, /** - * Lookup239: pallet_evm::pallet::Call + * Lookup237: pallet_unique::Call **/ - PalletEvmCall: { + PalletUniqueCall: { _enum: { - withdraw: { - address: 'H160', + create_collection: { + collectionName: 'Vec', + collectionDescription: 'Vec', + tokenPrefix: 'Bytes', + mode: 'UpDataStructsCollectionMode', + }, + create_collection_ex: { + data: 'UpDataStructsCreateCollectionData', + }, + destroy_collection: { + collectionId: 'u32', + }, + add_to_allow_list: { + collectionId: 'u32', + address: 'PalletEvmAccountBasicCrossAccountIdRepr', + }, + remove_from_allow_list: { + collectionId: 'u32', + address: 'PalletEvmAccountBasicCrossAccountIdRepr', + }, + change_collection_owner: { + collectionId: 'u32', + newOwner: 'AccountId32', + }, + add_collection_admin: { + collectionId: 'u32', + newAdminId: 'PalletEvmAccountBasicCrossAccountIdRepr', + }, + remove_collection_admin: { + collectionId: 'u32', + accountId: 'PalletEvmAccountBasicCrossAccountIdRepr', + }, + set_collection_sponsor: { + collectionId: 'u32', + newSponsor: 'AccountId32', + }, + confirm_sponsorship: { + collectionId: 'u32', + }, + remove_collection_sponsor: { + collectionId: 'u32', + }, + create_item: { + collectionId: 'u32', + owner: 'PalletEvmAccountBasicCrossAccountIdRepr', + data: 'UpDataStructsCreateItemData', + }, + create_multiple_items: { + collectionId: 'u32', + owner: 'PalletEvmAccountBasicCrossAccountIdRepr', + itemsData: 'Vec', + }, + set_collection_properties: { + collectionId: 'u32', + properties: 'Vec', + }, + delete_collection_properties: { + collectionId: 'u32', + propertyKeys: 'Vec', + }, + set_token_properties: { + collectionId: 'u32', + tokenId: 'u32', + properties: 'Vec', + }, + delete_token_properties: { + collectionId: 'u32', + tokenId: 'u32', + propertyKeys: 'Vec', + }, + set_token_property_permissions: { + collectionId: 'u32', + propertyPermissions: 'Vec', + }, + create_multiple_items_ex: { + collectionId: 'u32', + data: 'UpDataStructsCreateItemExData', + }, + set_transfers_enabled_flag: { + collectionId: 'u32', + value: 'bool', + }, + burn_item: { + collectionId: 'u32', + itemId: 'u32', value: 'u128', }, - call: { - source: 'H160', - target: 'H160', - input: 'Bytes', - value: 'U256', - gasLimit: 'u64', - maxFeePerGas: 'U256', - maxPriorityFeePerGas: 'Option', - nonce: 'Option', - accessList: 'Vec<(H160,Vec)>', + burn_from: { + collectionId: 'u32', + from: 'PalletEvmAccountBasicCrossAccountIdRepr', + itemId: 'u32', + value: 'u128', }, - create: { - source: 'H160', - init: 'Bytes', - value: 'U256', - gasLimit: 'u64', - maxFeePerGas: 'U256', - maxPriorityFeePerGas: 'Option', - nonce: 'Option', - accessList: 'Vec<(H160,Vec)>', + transfer: { + recipient: 'PalletEvmAccountBasicCrossAccountIdRepr', + collectionId: 'u32', + itemId: 'u32', + value: 'u128', }, - create2: { - source: 'H160', - init: 'Bytes', - salt: 'H256', - value: 'U256', - gasLimit: 'u64', - maxFeePerGas: 'U256', - maxPriorityFeePerGas: 'Option', - nonce: 'Option', - accessList: 'Vec<(H160,Vec)>' + approve: { + spender: 'PalletEvmAccountBasicCrossAccountIdRepr', + collectionId: 'u32', + itemId: 'u32', + amount: 'u128', + }, + transfer_from: { + from: 'PalletEvmAccountBasicCrossAccountIdRepr', + recipient: 'PalletEvmAccountBasicCrossAccountIdRepr', + collectionId: 'u32', + itemId: 'u32', + value: 'u128', + }, + set_collection_limits: { + collectionId: 'u32', + newLimit: 'UpDataStructsCollectionLimits', + }, + set_collection_permissions: { + collectionId: 'u32', + newPermission: 'UpDataStructsCollectionPermissions', + }, + repartition: { + collectionId: 'u32', + tokenId: 'u32', + amount: 'u128' } } }, /** - * Lookup245: pallet_ethereum::pallet::Call + * Lookup242: up_data_structs::CollectionMode **/ - PalletEthereumCall: { + UpDataStructsCollectionMode: { _enum: { - transact: { - transaction: 'EthereumTransactionTransactionV2' - } + NFT: 'Null', + Fungible: 'u8', + ReFungible: 'Null' } }, /** - * Lookup246: ethereum::transaction::TransactionV2 + * Lookup243: up_data_structs::CreateCollectionData **/ - EthereumTransactionTransactionV2: { - _enum: { - Legacy: 'EthereumTransactionLegacyTransaction', - EIP2930: 'EthereumTransactionEip2930Transaction', - EIP1559: 'EthereumTransactionEip1559Transaction' - } + UpDataStructsCreateCollectionData: { + mode: 'UpDataStructsCollectionMode', + access: 'Option', + name: 'Vec', + description: 'Vec', + tokenPrefix: 'Bytes', + pendingSponsor: 'Option', + limits: 'Option', + permissions: 'Option', + tokenPropertyPermissions: 'Vec', + properties: 'Vec' }, /** - * Lookup247: ethereum::transaction::LegacyTransaction + * Lookup245: up_data_structs::AccessMode **/ - EthereumTransactionLegacyTransaction: { - nonce: 'U256', - gasPrice: 'U256', - gasLimit: 'U256', - action: 'EthereumTransactionTransactionAction', - value: 'U256', - input: 'Bytes', - signature: 'EthereumTransactionTransactionSignature' + UpDataStructsAccessMode: { + _enum: ['Normal', 'AllowList'] }, /** - * Lookup248: ethereum::transaction::TransactionAction + * Lookup247: up_data_structs::CollectionLimits **/ - EthereumTransactionTransactionAction: { + UpDataStructsCollectionLimits: { + accountTokenOwnershipLimit: 'Option', + sponsoredDataSize: 'Option', + sponsoredDataRateLimit: 'Option', + tokenLimit: 'Option', + sponsorTransferTimeout: 'Option', + sponsorApproveTimeout: 'Option', + ownerCanTransfer: 'Option', + ownerCanDestroy: 'Option', + transfersEnabled: 'Option' + }, + /** + * Lookup249: up_data_structs::SponsoringRateLimit + **/ + UpDataStructsSponsoringRateLimit: { _enum: { - Call: 'H160', - Create: 'Null' + SponsoringDisabled: 'Null', + Blocks: 'u32' } }, /** - * Lookup249: ethereum::transaction::TransactionSignature + * Lookup252: up_data_structs::CollectionPermissions **/ - EthereumTransactionTransactionSignature: { - v: 'u64', - r: 'H256', - s: 'H256' + UpDataStructsCollectionPermissions: { + access: 'Option', + mintMode: 'Option', + nesting: 'Option' }, /** - * Lookup251: ethereum::transaction::EIP2930Transaction + * Lookup254: up_data_structs::NestingPermissions **/ - EthereumTransactionEip2930Transaction: { - chainId: 'u64', - nonce: 'U256', - gasPrice: 'U256', - gasLimit: 'U256', - action: 'EthereumTransactionTransactionAction', - value: 'U256', - input: 'Bytes', - accessList: 'Vec', - oddYParity: 'bool', - r: 'H256', - s: 'H256' + UpDataStructsNestingPermissions: { + tokenOwner: 'bool', + collectionAdmin: 'bool', + restricted: 'Option' }, /** - * Lookup253: ethereum::transaction::AccessListItem + * Lookup256: up_data_structs::OwnerRestrictedSet **/ - EthereumTransactionAccessListItem: { - address: 'H160', - storageKeys: 'Vec' - }, + UpDataStructsOwnerRestrictedSet: 'BTreeSet', /** - * Lookup254: ethereum::transaction::EIP1559Transaction + * Lookup261: up_data_structs::PropertyKeyPermission **/ - EthereumTransactionEip1559Transaction: { - chainId: 'u64', - nonce: 'U256', - maxPriorityFeePerGas: 'U256', - maxFeePerGas: 'U256', - gasLimit: 'U256', - action: 'EthereumTransactionTransactionAction', - value: 'U256', - input: 'Bytes', - accessList: 'Vec', - oddYParity: 'bool', - r: 'H256', - s: 'H256' + UpDataStructsPropertyKeyPermission: { + key: 'Bytes', + permission: 'UpDataStructsPropertyPermission' }, /** - * Lookup255: pallet_evm_migration::pallet::Call + * Lookup262: up_data_structs::PropertyPermission **/ - PalletEvmMigrationCall: { - _enum: { - begin: { - address: 'H160', - }, - set_data: { - address: 'H160', - data: 'Vec<(H256,H256)>', - }, - finish: { - address: 'H160', - code: 'Bytes' - } - } + UpDataStructsPropertyPermission: { + mutable: 'bool', + collectionAdmin: 'bool', + tokenOwner: 'bool' }, /** - * Lookup258: pallet_maintenance::pallet::Call + * Lookup265: up_data_structs::Property **/ - PalletMaintenanceCall: { - _enum: ['enable', 'disable'] + UpDataStructsProperty: { + key: 'Bytes', + value: 'Bytes' }, /** - * Lookup259: pallet_sudo::pallet::Event + * Lookup268: up_data_structs::CreateItemData **/ - PalletSudoEvent: { + UpDataStructsCreateItemData: { _enum: { - Sudid: { - sudoResult: 'Result', - }, - KeyChanged: { - oldSudoer: 'Option', - }, - SudoAsDone: { - sudoResult: 'Result' - } + NFT: 'UpDataStructsCreateNftData', + Fungible: 'UpDataStructsCreateFungibleData', + ReFungible: 'UpDataStructsCreateReFungibleData' } }, /** - * Lookup261: sp_runtime::DispatchError + * Lookup269: up_data_structs::CreateNftData **/ - SpRuntimeDispatchError: { - _enum: { - Other: 'Null', - CannotLookup: 'Null', - BadOrigin: 'Null', - Module: 'SpRuntimeModuleError', - ConsumerRemaining: 'Null', - NoProviders: 'Null', - TooManyConsumers: 'Null', - Token: 'SpRuntimeTokenError', - Arithmetic: 'SpRuntimeArithmeticError', - Transactional: 'SpRuntimeTransactionalError' - } + UpDataStructsCreateNftData: { + properties: 'Vec' }, /** - * Lookup262: sp_runtime::ModuleError + * Lookup270: up_data_structs::CreateFungibleData **/ - SpRuntimeModuleError: { - index: 'u8', - error: '[u8;4]' + UpDataStructsCreateFungibleData: { + value: 'u128' }, /** - * Lookup263: sp_runtime::TokenError + * Lookup271: up_data_structs::CreateReFungibleData **/ - SpRuntimeTokenError: { - _enum: ['NoFunds', 'WouldDie', 'BelowMinimum', 'CannotCreate', 'UnknownAsset', 'Frozen', 'Unsupported'] + UpDataStructsCreateReFungibleData: { + pieces: 'u128', + properties: 'Vec' }, /** - * Lookup264: sp_runtime::ArithmeticError + * Lookup274: up_data_structs::CreateItemExData> **/ - SpRuntimeArithmeticError: { - _enum: ['Underflow', 'Overflow', 'DivisionByZero'] + UpDataStructsCreateItemExData: { + _enum: { + NFT: 'Vec', + Fungible: 'BTreeMap', + RefungibleMultipleItems: 'Vec', + RefungibleMultipleOwners: 'UpDataStructsCreateRefungibleExMultipleOwners' + } }, /** - * Lookup265: sp_runtime::TransactionalError + * Lookup276: up_data_structs::CreateNftExData> **/ - SpRuntimeTransactionalError: { - _enum: ['LimitReached', 'NoLayer'] + UpDataStructsCreateNftExData: { + properties: 'Vec', + owner: 'PalletEvmAccountBasicCrossAccountIdRepr' }, /** - * Lookup266: pallet_sudo::pallet::Error + * Lookup283: up_data_structs::CreateRefungibleExSingleOwner> **/ - PalletSudoError: { - _enum: ['RequireSudo'] + UpDataStructsCreateRefungibleExSingleOwner: { + user: 'PalletEvmAccountBasicCrossAccountIdRepr', + pieces: 'u128', + properties: 'Vec' }, /** - * Lookup267: frame_system::AccountInfo> + * Lookup285: up_data_structs::CreateRefungibleExMultipleOwners> **/ - FrameSystemAccountInfo: { - nonce: 'u32', - consumers: 'u32', - providers: 'u32', - sufficients: 'u32', - data: 'PalletBalancesAccountData' + UpDataStructsCreateRefungibleExMultipleOwners: { + users: 'BTreeMap', + properties: 'Vec' }, /** - * Lookup268: frame_support::weights::PerDispatchClass + * Lookup286: pallet_unique_scheduler::pallet::Call **/ - FrameSupportWeightsPerDispatchClassU64: { - normal: 'u64', - operational: 'u64', - mandatory: 'u64' + PalletUniqueSchedulerCall: { + _enum: { + schedule_named: { + id: '[u8;16]', + when: 'u32', + maybePeriodic: 'Option<(u32,u32)>', + priority: 'Option', + call: 'FrameSupportScheduleMaybeHashed', + }, + cancel_named: { + id: '[u8;16]', + }, + schedule_named_after: { + id: '[u8;16]', + after: 'u32', + maybePeriodic: 'Option<(u32,u32)>', + priority: 'Option', + call: 'FrameSupportScheduleMaybeHashed', + }, + change_named_priority: { + id: '[u8;16]', + priority: 'u8' + } + } }, /** - * Lookup269: sp_runtime::generic::digest::Digest + * Lookup289: frame_support::traits::schedule::MaybeHashed **/ - SpRuntimeDigest: { - logs: 'Vec' + FrameSupportScheduleMaybeHashed: { + _enum: { + Value: 'Call', + Hash: 'H256' + } }, /** - * Lookup271: sp_runtime::generic::digest::DigestItem + * Lookup290: pallet_configuration::pallet::Call **/ - SpRuntimeDigestDigestItem: { + PalletConfigurationCall: { _enum: { - Other: 'Bytes', - __Unused1: 'Null', - __Unused2: 'Null', - __Unused3: 'Null', - Consensus: '([u8;4],Bytes)', - Seal: '([u8;4],Bytes)', - PreRuntime: '([u8;4],Bytes)', - __Unused7: 'Null', - RuntimeEnvironmentUpdated: 'Null' + set_weight_to_fee_coefficient_override: { + coeff: 'Option', + }, + set_min_gas_price_override: { + coeff: 'Option' + } } }, /** - * Lookup273: frame_system::EventRecord + * Lookup292: pallet_template_transaction_payment::Call **/ - FrameSystemEventRecord: { - phase: 'FrameSystemPhase', - event: 'Event', - topics: 'Vec' - }, + PalletTemplateTransactionPaymentCall: 'Null', /** - * Lookup275: frame_system::pallet::Event + * Lookup293: pallet_structure::pallet::Call **/ - FrameSystemEvent: { + PalletStructureCall: 'Null', + /** + * Lookup294: pallet_rmrk_core::pallet::Call + **/ + PalletRmrkCoreCall: { _enum: { - ExtrinsicSuccess: { - dispatchInfo: 'FrameSupportWeightsDispatchInfo', + create_collection: { + metadata: 'Bytes', + max: 'Option', + symbol: 'Bytes', }, - ExtrinsicFailed: { - dispatchError: 'SpRuntimeDispatchError', - dispatchInfo: 'FrameSupportWeightsDispatchInfo', + destroy_collection: { + collectionId: 'u32', }, - CodeUpdated: 'Null', - NewAccount: { - account: 'AccountId32', + change_collection_issuer: { + collectionId: 'u32', + newIssuer: 'MultiAddress', }, - KilledAccount: { - account: 'AccountId32', + lock_collection: { + collectionId: 'u32', + }, + mint_nft: { + owner: 'Option', + collectionId: 'u32', + recipient: 'Option', + royaltyAmount: 'Option', + metadata: 'Bytes', + transferable: 'bool', + resources: 'Option>', + }, + burn_nft: { + collectionId: 'u32', + nftId: 'u32', + maxBurns: 'u32', + }, + send: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + newOwner: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', + }, + accept_nft: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + newOwner: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', + }, + reject_nft: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + }, + accept_resource: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + resourceId: 'u32', + }, + accept_resource_removal: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + resourceId: 'u32', + }, + set_property: { + rmrkCollectionId: 'Compact', + maybeNftId: 'Option', + key: 'Bytes', + value: 'Bytes', + }, + set_priority: { + rmrkCollectionId: 'u32', + rmrkNftId: 'u32', + priorities: 'Vec', + }, + add_basic_resource: { + rmrkCollectionId: 'u32', + nftId: 'u32', + resource: 'RmrkTraitsResourceBasicResource', }, - Remarked: { - _alias: { - hash_: 'hash', - }, - sender: 'AccountId32', - hash_: 'H256' - } - } - }, - /** - * Lookup276: frame_support::weights::DispatchInfo - **/ - FrameSupportWeightsDispatchInfo: { - weight: 'u64', - class: 'FrameSupportWeightsDispatchClass', - paysFee: 'FrameSupportWeightsPays' - }, - /** - * Lookup277: frame_support::weights::DispatchClass - **/ - FrameSupportWeightsDispatchClass: { - _enum: ['Normal', 'Operational', 'Mandatory'] - }, - /** - * Lookup278: frame_support::weights::Pays - **/ - FrameSupportWeightsPays: { - _enum: ['Yes', 'No'] - }, - /** - * Lookup279: orml_vesting::module::Event - **/ - OrmlVestingModuleEvent: { - _enum: { - VestingScheduleAdded: { - from: 'AccountId32', - to: 'AccountId32', - vestingSchedule: 'OrmlVestingVestingSchedule', + add_composable_resource: { + rmrkCollectionId: 'u32', + nftId: 'u32', + resource: 'RmrkTraitsResourceComposableResource', }, - Claimed: { - who: 'AccountId32', - amount: 'u128', + add_slot_resource: { + rmrkCollectionId: 'u32', + nftId: 'u32', + resource: 'RmrkTraitsResourceSlotResource', }, - VestingSchedulesUpdated: { - who: 'AccountId32' + remove_resource: { + rmrkCollectionId: 'u32', + nftId: 'u32', + resourceId: 'u32' } } }, /** - * Lookup280: cumulus_pallet_xcmp_queue::pallet::Event + * Lookup300: rmrk_traits::resource::ResourceTypes, sp_core::bounded::bounded_vec::BoundedVec> **/ - CumulusPalletXcmpQueueEvent: { + RmrkTraitsResourceResourceTypes: { _enum: { - Success: 'Option', - Fail: '(Option,XcmV2TraitsError)', - BadVersion: 'Option', - BadFormat: 'Option', - UpwardMessageSent: 'Option', - XcmpMessageSent: 'Option', - OverweightEnqueued: '(u32,u32,u64,u64)', - OverweightServiced: '(u64,u64)' + Basic: 'RmrkTraitsResourceBasicResource', + Composable: 'RmrkTraitsResourceComposableResource', + Slot: 'RmrkTraitsResourceSlotResource' } }, /** - * Lookup281: pallet_xcm::pallet::Event + * Lookup302: rmrk_traits::resource::BasicResource> **/ - PalletXcmEvent: { - _enum: { - Attempted: 'XcmV2TraitsOutcome', - Sent: '(XcmV1MultiLocation,XcmV1MultiLocation,XcmV2Xcm)', - UnexpectedResponse: '(XcmV1MultiLocation,u64)', - ResponseReady: '(u64,XcmV2Response)', - Notified: '(u64,u8,u8)', - NotifyOverweight: '(u64,u8,u8,u64,u64)', - NotifyDispatchError: '(u64,u8,u8)', - NotifyDecodeFailed: '(u64,u8,u8)', - InvalidResponder: '(XcmV1MultiLocation,u64,Option)', - InvalidResponderVersion: '(XcmV1MultiLocation,u64)', - ResponseTaken: 'u64', - AssetsTrapped: '(H256,XcmV1MultiLocation,XcmVersionedMultiAssets)', - VersionChangeNotified: '(XcmV1MultiLocation,u32)', - SupportedVersionChanged: '(XcmV1MultiLocation,u32)', - NotifyTargetSendFail: '(XcmV1MultiLocation,u64,XcmV2TraitsError)', - NotifyTargetMigrationFail: '(XcmVersionedMultiLocation,u64)' - } + RmrkTraitsResourceBasicResource: { + src: 'Option', + metadata: 'Option', + license: 'Option', + thumb: 'Option' }, /** - * Lookup282: xcm::v2::traits::Outcome + * Lookup304: rmrk_traits::resource::ComposableResource, sp_core::bounded::bounded_vec::BoundedVec> **/ - XcmV2TraitsOutcome: { - _enum: { - Complete: 'u64', - Incomplete: '(u64,XcmV2TraitsError)', - Error: 'XcmV2TraitsError' - } + RmrkTraitsResourceComposableResource: { + parts: 'Vec', + base: 'u32', + src: 'Option', + metadata: 'Option', + license: 'Option', + thumb: 'Option' }, /** - * Lookup284: cumulus_pallet_xcm::pallet::Event + * Lookup305: rmrk_traits::resource::SlotResource> **/ - CumulusPalletXcmEvent: { - _enum: { - InvalidFormat: '[u8;8]', - UnsupportedVersion: '[u8;8]', - ExecutedDownward: '([u8;8],XcmV2TraitsOutcome)' - } + RmrkTraitsResourceSlotResource: { + base: 'u32', + src: 'Option', + metadata: 'Option', + slot: 'u32', + license: 'Option', + thumb: 'Option' }, /** - * Lookup285: cumulus_pallet_dmp_queue::pallet::Event + * Lookup308: pallet_rmrk_equip::pallet::Call **/ - CumulusPalletDmpQueueEvent: { + PalletRmrkEquipCall: { _enum: { - InvalidFormat: { - messageId: '[u8;32]', - }, - UnsupportedVersion: { - messageId: '[u8;32]', - }, - ExecutedDownward: { - messageId: '[u8;32]', - outcome: 'XcmV2TraitsOutcome', - }, - WeightExhausted: { - messageId: '[u8;32]', - remainingWeight: 'u64', - requiredWeight: 'u64', + create_base: { + baseType: 'Bytes', + symbol: 'Bytes', + parts: 'Vec', }, - OverweightEnqueued: { - messageId: '[u8;32]', - overweightIndex: 'u64', - requiredWeight: 'u64', + theme_add: { + baseId: 'u32', + theme: 'RmrkTraitsTheme', }, - OverweightServiced: { - overweightIndex: 'u64', - weightUsed: 'u64' + equippable: { + baseId: 'u32', + slotId: 'u32', + equippables: 'RmrkTraitsPartEquippableList' } } }, /** - * Lookup286: pallet_unique::RawEvent> + * Lookup311: rmrk_traits::part::PartType, sp_core::bounded::bounded_vec::BoundedVec> **/ - PalletUniqueRawEvent: { + RmrkTraitsPartPartType: { _enum: { - CollectionSponsorRemoved: 'u32', - CollectionAdminAdded: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', - CollectionOwnedChanged: '(u32,AccountId32)', - CollectionSponsorSet: '(u32,AccountId32)', - SponsorshipConfirmed: '(u32,AccountId32)', - CollectionAdminRemoved: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', - AllowListAddressRemoved: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', - AllowListAddressAdded: '(u32,PalletEvmAccountBasicCrossAccountIdRepr)', - CollectionLimitSet: 'u32', - CollectionPermissionSet: 'u32' + FixedPart: 'RmrkTraitsPartFixedPart', + SlotPart: 'RmrkTraitsPartSlotPart' } }, /** - * Lookup287: pallet_unique_scheduler::pallet::Event + * Lookup313: rmrk_traits::part::FixedPart> **/ - PalletUniqueSchedulerEvent: { - _enum: { - Scheduled: { - when: 'u32', - index: 'u32', - }, - Canceled: { - when: 'u32', - index: 'u32', - }, - Dispatched: { - task: '(u32,u32)', - id: 'Option<[u8;16]>', - result: 'Result', - }, - CallLookupFailed: { - task: '(u32,u32)', - id: 'Option<[u8;16]>', - error: 'FrameSupportScheduleLookupError' - } - } + RmrkTraitsPartFixedPart: { + id: 'u32', + z: 'u32', + src: 'Bytes' }, /** - * Lookup289: frame_support::traits::schedule::LookupError + * Lookup314: rmrk_traits::part::SlotPart, sp_core::bounded::bounded_vec::BoundedVec> **/ - FrameSupportScheduleLookupError: { - _enum: ['Unknown', 'BadFormat'] + RmrkTraitsPartSlotPart: { + id: 'u32', + equippable: 'RmrkTraitsPartEquippableList', + src: 'Bytes', + z: 'u32' }, /** - * Lookup290: pallet_common::pallet::Event + * Lookup315: rmrk_traits::part::EquippableList> **/ - PalletCommonEvent: { + RmrkTraitsPartEquippableList: { _enum: { - CollectionCreated: '(u32,u8,AccountId32)', - CollectionDestroyed: 'u32', - ItemCreated: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,u128)', - ItemDestroyed: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,u128)', - Transfer: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,PalletEvmAccountBasicCrossAccountIdRepr,u128)', - Approved: '(u32,u32,PalletEvmAccountBasicCrossAccountIdRepr,PalletEvmAccountBasicCrossAccountIdRepr,u128)', - CollectionPropertySet: '(u32,Bytes)', - CollectionPropertyDeleted: '(u32,Bytes)', - TokenPropertySet: '(u32,u32,Bytes)', - TokenPropertyDeleted: '(u32,u32,Bytes)', - PropertyPermissionSet: '(u32,Bytes)' + All: 'Null', + Empty: 'Null', + Custom: 'Vec' } }, /** - * Lookup291: pallet_structure::pallet::Event + * Lookup317: rmrk_traits::theme::Theme, sp_core::bounded::bounded_vec::BoundedVec>, S>> **/ - PalletStructureEvent: { - _enum: { - Executed: 'Result' - } + RmrkTraitsTheme: { + name: 'Bytes', + properties: 'Vec', + inherit: 'bool' }, /** - * Lookup292: pallet_rmrk_core::pallet::Event + * Lookup319: rmrk_traits::theme::ThemeProperty> **/ - PalletRmrkCoreEvent: { - _enum: { - CollectionCreated: { - issuer: 'AccountId32', - collectionId: 'u32', - }, - CollectionDestroyed: { - issuer: 'AccountId32', - collectionId: 'u32', - }, - IssuerChanged: { - oldIssuer: 'AccountId32', - newIssuer: 'AccountId32', - collectionId: 'u32', - }, - CollectionLocked: { - issuer: 'AccountId32', - collectionId: 'u32', - }, - NftMinted: { - owner: 'AccountId32', - collectionId: 'u32', - nftId: 'u32', - }, - NFTBurned: { - owner: 'AccountId32', - nftId: 'u32', - }, - NFTSent: { - sender: 'AccountId32', - recipient: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', - collectionId: 'u32', - nftId: 'u32', - approvalRequired: 'bool', - }, - NFTAccepted: { - sender: 'AccountId32', - recipient: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', - collectionId: 'u32', - nftId: 'u32', - }, - NFTRejected: { - sender: 'AccountId32', - collectionId: 'u32', - nftId: 'u32', - }, - PropertySet: { - collectionId: 'u32', - maybeNftId: 'Option', - key: 'Bytes', - value: 'Bytes', + RmrkTraitsThemeThemeProperty: { + key: 'Bytes', + value: 'Bytes' + }, + /** + * Lookup321: pallet_app_promotion::pallet::Call + **/ + PalletAppPromotionCall: { + _enum: { + set_admin_address: { + admin: 'PalletEvmAccountBasicCrossAccountIdRepr', }, - ResourceAdded: { - nftId: 'u32', - resourceId: 'u32', + stake: { + amount: 'u128', }, - ResourceRemoval: { - nftId: 'u32', - resourceId: 'u32', + unstake: 'Null', + sponsor_collection: { + collectionId: 'u32', }, - ResourceAccepted: { - nftId: 'u32', - resourceId: 'u32', + stop_sponsoring_collection: { + collectionId: 'u32', }, - ResourceRemovalAccepted: { - nftId: 'u32', - resourceId: 'u32', + sponsor_contract: { + contractId: 'H160', }, - PrioritySet: { - collectionId: 'u32', - nftId: 'u32' + stop_sponsoring_contract: { + contractId: 'H160', + }, + payout_stakers: { + stakersNumber: 'Option' } } }, /** - * Lookup293: pallet_rmrk_equip::pallet::Event + * Lookup322: pallet_foreign_assets::module::Call **/ - PalletRmrkEquipEvent: { + PalletForeignAssetsModuleCall: { _enum: { - BaseCreated: { - issuer: 'AccountId32', - baseId: 'u32', + register_foreign_asset: { + owner: 'AccountId32', + location: 'XcmVersionedMultiLocation', + metadata: 'PalletForeignAssetsModuleAssetMetadata', }, - EquippablesUpdated: { - baseId: 'u32', - slotId: 'u32' + update_foreign_asset: { + foreignAssetId: 'u32', + location: 'XcmVersionedMultiLocation', + metadata: 'PalletForeignAssetsModuleAssetMetadata' } } }, /** - * Lookup294: pallet_evm::pallet::Event + * Lookup323: pallet_evm::pallet::Call **/ - PalletEvmEvent: { + PalletEvmCall: { _enum: { - Log: 'EthereumLog', - Created: 'H160', - CreatedFailed: 'H160', - Executed: 'H160', - ExecutedFailed: 'H160', - BalanceDeposit: '(AccountId32,H160,U256)', - BalanceWithdraw: '(AccountId32,H160,U256)' + withdraw: { + address: 'H160', + value: 'u128', + }, + call: { + source: 'H160', + target: 'H160', + input: 'Bytes', + value: 'U256', + gasLimit: 'u64', + maxFeePerGas: 'U256', + maxPriorityFeePerGas: 'Option', + nonce: 'Option', + accessList: 'Vec<(H160,Vec)>', + }, + create: { + source: 'H160', + init: 'Bytes', + value: 'U256', + gasLimit: 'u64', + maxFeePerGas: 'U256', + maxPriorityFeePerGas: 'Option', + nonce: 'Option', + accessList: 'Vec<(H160,Vec)>', + }, + create2: { + source: 'H160', + init: 'Bytes', + salt: 'H256', + value: 'U256', + gasLimit: 'u64', + maxFeePerGas: 'U256', + maxPriorityFeePerGas: 'Option', + nonce: 'Option', + accessList: 'Vec<(H160,Vec)>' + } } }, /** - * Lookup295: ethereum::log::Log - **/ - EthereumLog: { - address: 'H160', - topics: 'Vec', - data: 'Bytes' - }, - /** - * Lookup296: pallet_ethereum::pallet::Event + * Lookup327: pallet_ethereum::pallet::Call **/ - PalletEthereumEvent: { + PalletEthereumCall: { _enum: { - Executed: '(H160,H160,H256,EvmCoreErrorExitReason)' + transact: { + transaction: 'EthereumTransactionTransactionV2' + } } }, /** - * Lookup297: evm_core::error::ExitReason + * Lookup328: ethereum::transaction::TransactionV2 **/ - EvmCoreErrorExitReason: { + EthereumTransactionTransactionV2: { _enum: { - Succeed: 'EvmCoreErrorExitSucceed', - Error: 'EvmCoreErrorExitError', - Revert: 'EvmCoreErrorExitRevert', - Fatal: 'EvmCoreErrorExitFatal' + Legacy: 'EthereumTransactionLegacyTransaction', + EIP2930: 'EthereumTransactionEip2930Transaction', + EIP1559: 'EthereumTransactionEip1559Transaction' } }, /** - * Lookup298: evm_core::error::ExitSucceed + * Lookup329: ethereum::transaction::LegacyTransaction **/ - EvmCoreErrorExitSucceed: { - _enum: ['Stopped', 'Returned', 'Suicided'] + EthereumTransactionLegacyTransaction: { + nonce: 'U256', + gasPrice: 'U256', + gasLimit: 'U256', + action: 'EthereumTransactionTransactionAction', + value: 'U256', + input: 'Bytes', + signature: 'EthereumTransactionTransactionSignature' }, /** - * Lookup299: evm_core::error::ExitError + * Lookup330: ethereum::transaction::TransactionAction **/ - EvmCoreErrorExitError: { + EthereumTransactionTransactionAction: { _enum: { - StackUnderflow: 'Null', - StackOverflow: 'Null', - InvalidJump: 'Null', - InvalidRange: 'Null', - DesignatedInvalid: 'Null', - CallTooDeep: 'Null', - CreateCollision: 'Null', - CreateContractLimit: 'Null', - OutOfOffset: 'Null', - OutOfGas: 'Null', - OutOfFund: 'Null', - PCUnderflow: 'Null', - CreateEmpty: 'Null', - Other: 'Text', - InvalidCode: 'Null' + Call: 'H160', + Create: 'Null' } }, /** - * Lookup302: evm_core::error::ExitRevert + * Lookup331: ethereum::transaction::TransactionSignature **/ - EvmCoreErrorExitRevert: { - _enum: ['Reverted'] + EthereumTransactionTransactionSignature: { + v: 'u64', + r: 'H256', + s: 'H256' }, /** - * Lookup303: evm_core::error::ExitFatal + * Lookup333: ethereum::transaction::EIP2930Transaction **/ - EvmCoreErrorExitFatal: { - _enum: { - NotSupported: 'Null', - UnhandledInterrupt: 'Null', - CallErrorAsFatal: 'EvmCoreErrorExitError', - Other: 'Text' - } + EthereumTransactionEip2930Transaction: { + chainId: 'u64', + nonce: 'U256', + gasPrice: 'U256', + gasLimit: 'U256', + action: 'EthereumTransactionTransactionAction', + value: 'U256', + input: 'Bytes', + accessList: 'Vec', + oddYParity: 'bool', + r: 'H256', + s: 'H256' }, /** - * Lookup304: pallet_maintenance::pallet::Event + * Lookup335: ethereum::transaction::AccessListItem **/ - PalletMaintenanceEvent: { - _enum: ['MaintenanceEnabled', 'MaintenanceDisabled'] + EthereumTransactionAccessListItem: { + address: 'H160', + storageKeys: 'Vec' }, /** - * Lookup305: frame_system::Phase + * Lookup336: ethereum::transaction::EIP1559Transaction **/ - FrameSystemPhase: { - _enum: { - ApplyExtrinsic: 'u32', - Finalization: 'Null', - Initialization: 'Null' - } + EthereumTransactionEip1559Transaction: { + chainId: 'u64', + nonce: 'U256', + maxPriorityFeePerGas: 'U256', + maxFeePerGas: 'U256', + gasLimit: 'U256', + action: 'EthereumTransactionTransactionAction', + value: 'U256', + input: 'Bytes', + accessList: 'Vec', + oddYParity: 'bool', + r: 'H256', + s: 'H256' }, /** - * Lookup307: frame_system::LastRuntimeUpgradeInfo + * Lookup337: pallet_evm_migration::pallet::Call **/ - FrameSystemLastRuntimeUpgradeInfo: { - specVersion: 'Compact', - specName: 'Text' + PalletEvmMigrationCall: { + _enum: { + begin: { + address: 'H160', + }, + set_data: { + address: 'H160', + data: 'Vec<(H256,H256)>', + }, + finish: { + address: 'H160', + code: 'Bytes' + } + } }, /** - * Lookup308: frame_system::limits::BlockWeights + * Lookup340: pallet_maintenance::pallet::Call **/ - FrameSystemLimitsBlockWeights: { - baseBlock: 'u64', - maxBlock: 'u64', - perClass: 'FrameSupportWeightsPerDispatchClassWeightsPerClass' + PalletMaintenanceCall: { + _enum: ['enable', 'disable'] }, /** - * Lookup309: frame_support::weights::PerDispatchClass + * Lookup341: pallet_test_utils::pallet::Call **/ - FrameSupportWeightsPerDispatchClassWeightsPerClass: { - normal: 'FrameSystemLimitsWeightsPerClass', - operational: 'FrameSystemLimitsWeightsPerClass', - mandatory: 'FrameSystemLimitsWeightsPerClass' + PalletTestUtilsCall: { + _enum: { + enable: 'Null', + set_test_value: { + value: 'u32', + }, + set_test_value_and_rollback: { + value: 'u32', + }, + inc_test_value: 'Null', + self_canceling_inc: { + id: '[u8;16]', + maxTestValue: 'u32', + }, + just_take_fee: 'Null' + } }, /** - * Lookup310: frame_system::limits::WeightsPerClass + * Lookup342: pallet_sudo::pallet::Error **/ - FrameSystemLimitsWeightsPerClass: { - baseExtrinsic: 'u64', - maxExtrinsic: 'Option', - maxTotal: 'Option', - reserved: 'Option' + PalletSudoError: { + _enum: ['RequireSudo'] }, /** - * Lookup312: frame_system::limits::BlockLength + * Lookup344: orml_vesting::module::Error **/ - FrameSystemLimitsBlockLength: { - max: 'FrameSupportWeightsPerDispatchClassU32' + OrmlVestingModuleError: { + _enum: ['ZeroVestingPeriod', 'ZeroVestingPeriodCount', 'InsufficientBalanceToLock', 'TooManyVestingSchedules', 'AmountLow', 'MaxVestingSchedulesExceeded'] }, /** - * Lookup313: frame_support::weights::PerDispatchClass + * Lookup345: orml_xtokens::module::Error **/ - FrameSupportWeightsPerDispatchClassU32: { - normal: 'u32', - operational: 'u32', - mandatory: 'u32' + OrmlXtokensModuleError: { + _enum: ['AssetHasNoReserve', 'NotCrossChainTransfer', 'InvalidDest', 'NotCrossChainTransferableCurrency', 'UnweighableMessage', 'XcmExecutionFailed', 'CannotReanchor', 'InvalidAncestry', 'InvalidAsset', 'DestinationNotInvertible', 'BadVersion', 'DistinctReserveForAssetAndFee', 'ZeroFee', 'ZeroAmount', 'TooManyAssetsBeingSent', 'AssetIndexNonExistent', 'FeeNotEnough', 'NotSupportedMultiLocation', 'MinXcmFeeNotDefined'] }, /** - * Lookup314: frame_support::weights::RuntimeDbWeight + * Lookup348: orml_tokens::BalanceLock **/ - FrameSupportWeightsRuntimeDbWeight: { - read: 'u64', - write: 'u64' + OrmlTokensBalanceLock: { + id: '[u8;8]', + amount: 'u128' }, /** - * Lookup315: sp_version::RuntimeVersion + * Lookup350: orml_tokens::AccountData **/ - SpVersionRuntimeVersion: { - specName: 'Text', - implName: 'Text', - authoringVersion: 'u32', - specVersion: 'u32', - implVersion: 'u32', - apis: 'Vec<([u8;8],u32)>', - transactionVersion: 'u32', - stateVersion: 'u8' + OrmlTokensAccountData: { + free: 'u128', + reserved: 'u128', + frozen: 'u128' }, /** - * Lookup319: frame_system::pallet::Error + * Lookup352: orml_tokens::ReserveData **/ - FrameSystemError: { - _enum: ['InvalidSpecName', 'SpecVersionNeedsToIncrease', 'FailedToExtractRuntimeVersion', 'NonDefaultComposite', 'NonZeroRefCount', 'CallFiltered'] + OrmlTokensReserveData: { + id: 'Null', + amount: 'u128' }, /** - * Lookup321: orml_vesting::module::Error + * Lookup354: orml_tokens::module::Error **/ - OrmlVestingModuleError: { - _enum: ['ZeroVestingPeriod', 'ZeroVestingPeriodCount', 'InsufficientBalanceToLock', 'TooManyVestingSchedules', 'AmountLow', 'MaxVestingSchedulesExceeded'] + OrmlTokensModuleError: { + _enum: ['BalanceTooLow', 'AmountIntoBalanceFailed', 'LiquidityRestrictions', 'MaxLocksExceeded', 'KeepAlive', 'ExistentialDeposit', 'DeadAccount', 'TooManyReserves'] }, /** - * Lookup323: cumulus_pallet_xcmp_queue::InboundChannelDetails + * Lookup356: cumulus_pallet_xcmp_queue::InboundChannelDetails **/ CumulusPalletXcmpQueueInboundChannelDetails: { sender: 'u32', @@ -2562,19 +2988,19 @@ export default { messageMetadata: 'Vec<(u32,PolkadotParachainPrimitivesXcmpMessageFormat)>' }, /** - * Lookup324: cumulus_pallet_xcmp_queue::InboundState + * Lookup357: cumulus_pallet_xcmp_queue::InboundState **/ CumulusPalletXcmpQueueInboundState: { _enum: ['Ok', 'Suspended'] }, /** - * Lookup327: polkadot_parachain::primitives::XcmpMessageFormat + * Lookup360: polkadot_parachain::primitives::XcmpMessageFormat **/ PolkadotParachainPrimitivesXcmpMessageFormat: { _enum: ['ConcatenatedVersionedXcm', 'ConcatenatedEncodedBlob', 'Signals'] }, /** - * Lookup330: cumulus_pallet_xcmp_queue::OutboundChannelDetails + * Lookup363: cumulus_pallet_xcmp_queue::OutboundChannelDetails **/ CumulusPalletXcmpQueueOutboundChannelDetails: { recipient: 'u32', @@ -2584,46 +3010,46 @@ export default { lastIndex: 'u16' }, /** - * Lookup331: cumulus_pallet_xcmp_queue::OutboundState + * Lookup364: cumulus_pallet_xcmp_queue::OutboundState **/ CumulusPalletXcmpQueueOutboundState: { _enum: ['Ok', 'Suspended'] }, /** - * Lookup333: cumulus_pallet_xcmp_queue::QueueConfigData + * Lookup366: cumulus_pallet_xcmp_queue::QueueConfigData **/ CumulusPalletXcmpQueueQueueConfigData: { suspendThreshold: 'u32', dropThreshold: 'u32', resumeThreshold: 'u32', - thresholdWeight: 'u64', - weightRestrictDecay: 'u64', - xcmpMaxIndividualWeight: 'u64' + thresholdWeight: 'Weight', + weightRestrictDecay: 'Weight', + xcmpMaxIndividualWeight: 'Weight' }, /** - * Lookup335: cumulus_pallet_xcmp_queue::pallet::Error + * Lookup368: cumulus_pallet_xcmp_queue::pallet::Error **/ CumulusPalletXcmpQueueError: { _enum: ['FailedToSend', 'BadXcmOrigin', 'BadXcm', 'BadOverweightIndex', 'WeightOverLimit'] }, /** - * Lookup336: pallet_xcm::pallet::Error + * Lookup369: pallet_xcm::pallet::Error **/ PalletXcmError: { _enum: ['Unreachable', 'SendFailure', 'Filtered', 'UnweighableMessage', 'DestinationNotInvertible', 'Empty', 'CannotReanchor', 'TooManyAssets', 'InvalidOrigin', 'BadVersion', 'BadLocation', 'NoSubscription', 'AlreadySubscribed'] }, /** - * Lookup337: cumulus_pallet_xcm::pallet::Error + * Lookup370: cumulus_pallet_xcm::pallet::Error **/ CumulusPalletXcmError: 'Null', /** - * Lookup338: cumulus_pallet_dmp_queue::ConfigData + * Lookup371: cumulus_pallet_dmp_queue::ConfigData **/ CumulusPalletDmpQueueConfigData: { - maxIndividual: 'u64' + maxIndividual: 'Weight' }, /** - * Lookup339: cumulus_pallet_dmp_queue::PageIndexData + * Lookup372: cumulus_pallet_dmp_queue::PageIndexData **/ CumulusPalletDmpQueuePageIndexData: { beginUsed: 'u32', @@ -2631,19 +3057,19 @@ export default { overweightCount: 'u64' }, /** - * Lookup342: cumulus_pallet_dmp_queue::pallet::Error + * Lookup375: cumulus_pallet_dmp_queue::pallet::Error **/ CumulusPalletDmpQueueError: { _enum: ['Unknown', 'OverLimit'] }, /** - * Lookup346: pallet_unique::Error + * Lookup379: pallet_unique::Error **/ PalletUniqueError: { _enum: ['CollectionDecimalPointLimitExceeded', 'ConfirmUnsetSponsorFail', 'EmptyArgument', 'RepartitionCalledOnNonRefungibleCollection'] }, /** - * Lookup349: pallet_unique_scheduler::ScheduledV3, BlockNumber, opal_runtime::OriginCaller, sp_core::crypto::AccountId32> + * Lookup382: pallet_unique_scheduler::ScheduledV3, BlockNumber, opal_runtime::OriginCaller, sp_core::crypto::AccountId32> **/ PalletUniqueSchedulerScheduledV3: { maybeId: 'Option<[u8;16]>', @@ -2653,11 +3079,11 @@ export default { origin: 'OpalRuntimeOriginCaller' }, /** - * Lookup350: opal_runtime::OriginCaller + * Lookup383: opal_runtime::OriginCaller **/ OpalRuntimeOriginCaller: { _enum: { - __Unused0: 'Null', + system: 'FrameSupportDispatchRawOrigin', __Unused1: 'Null', __Unused2: 'Null', __Unused3: 'Null', @@ -2693,7 +3119,7 @@ export default { __Unused33: 'Null', __Unused34: 'Null', __Unused35: 'Null', - system: 'FrameSupportDispatchRawOrigin', + __Unused36: 'Null', __Unused37: 'Null', __Unused38: 'Null', __Unused39: 'Null', @@ -2762,7 +3188,7 @@ export default { } }, /** - * Lookup351: frame_support::dispatch::RawOrigin + * Lookup384: frame_support::dispatch::RawOrigin **/ FrameSupportDispatchRawOrigin: { _enum: { @@ -2772,7 +3198,7 @@ export default { } }, /** - * Lookup352: pallet_xcm::pallet::Origin + * Lookup385: pallet_xcm::pallet::Origin **/ PalletXcmOrigin: { _enum: { @@ -2781,7 +3207,7 @@ export default { } }, /** - * Lookup353: cumulus_pallet_xcm::pallet::Origin + * Lookup386: cumulus_pallet_xcm::pallet::Origin **/ CumulusPalletXcmOrigin: { _enum: { @@ -2790,7 +3216,7 @@ export default { } }, /** - * Lookup354: pallet_ethereum::RawOrigin + * Lookup387: pallet_ethereum::RawOrigin **/ PalletEthereumRawOrigin: { _enum: { @@ -2798,17 +3224,17 @@ export default { } }, /** - * Lookup355: sp_core::Void + * Lookup388: sp_core::Void **/ SpCoreVoid: 'Null', /** - * Lookup356: pallet_unique_scheduler::pallet::Error + * Lookup389: pallet_unique_scheduler::pallet::Error **/ PalletUniqueSchedulerError: { _enum: ['FailedToSchedule', 'NotFound', 'TargetBlockNumberInPast', 'RescheduleNoChange'] }, /** - * Lookup357: up_data_structs::Collection + * Lookup390: up_data_structs::Collection **/ UpDataStructsCollection: { owner: 'AccountId32', @@ -2816,15 +3242,15 @@ export default { name: 'Vec', description: 'Vec', tokenPrefix: 'Bytes', - sponsorship: 'UpDataStructsSponsorshipState', + sponsorship: 'UpDataStructsSponsorshipStateAccountId32', limits: 'UpDataStructsCollectionLimits', permissions: 'UpDataStructsCollectionPermissions', - externalCollection: 'bool' + flags: '[u8;1]' }, /** - * Lookup358: up_data_structs::SponsorshipState + * Lookup391: up_data_structs::SponsorshipState **/ - UpDataStructsSponsorshipState: { + UpDataStructsSponsorshipStateAccountId32: { _enum: { Disabled: 'Null', Unconfirmed: 'AccountId32', @@ -2832,7 +3258,7 @@ export default { } }, /** - * Lookup359: up_data_structs::Properties + * Lookup393: up_data_structs::Properties **/ UpDataStructsProperties: { map: 'UpDataStructsPropertiesMapBoundedVec', @@ -2840,15 +3266,15 @@ export default { spaceLimit: 'u32' }, /** - * Lookup360: up_data_structs::PropertiesMap> + * Lookup394: up_data_structs::PropertiesMap> **/ UpDataStructsPropertiesMapBoundedVec: 'BTreeMap', /** - * Lookup365: up_data_structs::PropertiesMap + * Lookup399: up_data_structs::PropertiesMap **/ UpDataStructsPropertiesMapPropertyPermission: 'BTreeMap', /** - * Lookup372: up_data_structs::CollectionStats + * Lookup406: up_data_structs::CollectionStats **/ UpDataStructsCollectionStats: { created: 'u32', @@ -2856,25 +3282,26 @@ export default { alive: 'u32' }, /** - * Lookup373: up_data_structs::TokenChild + * Lookup407: up_data_structs::TokenChild **/ UpDataStructsTokenChild: { token: 'u32', collection: 'u32' }, /** - * Lookup374: PhantomType::up_data_structs + * Lookup408: PhantomType::up_data_structs **/ PhantomTypeUpDataStructs: '[(UpDataStructsTokenData,UpDataStructsRpcCollection,RmrkTraitsCollectionCollectionInfo,RmrkTraitsNftNftInfo,RmrkTraitsResourceResourceInfo,RmrkTraitsPropertyPropertyInfo,RmrkTraitsBaseBaseInfo,RmrkTraitsPartPartType,RmrkTraitsTheme,RmrkTraitsNftNftChild);0]', /** - * Lookup376: up_data_structs::TokenData> + * Lookup410: up_data_structs::TokenData> **/ UpDataStructsTokenData: { properties: 'Vec', - owner: 'Option' + owner: 'Option', + pieces: 'u128' }, /** - * Lookup378: up_data_structs::RpcCollection + * Lookup412: up_data_structs::RpcCollection **/ UpDataStructsRpcCollection: { owner: 'AccountId32', @@ -2882,15 +3309,23 @@ export default { name: 'Vec', description: 'Vec', tokenPrefix: 'Bytes', - sponsorship: 'UpDataStructsSponsorshipState', + sponsorship: 'UpDataStructsSponsorshipStateAccountId32', limits: 'UpDataStructsCollectionLimits', permissions: 'UpDataStructsCollectionPermissions', tokenPropertyPermissions: 'Vec', properties: 'Vec', - readOnly: 'bool' + readOnly: 'bool', + flags: 'UpDataStructsRpcCollectionFlags' }, /** - * Lookup379: rmrk_traits::collection::CollectionInfo, frame_support::storage::bounded_vec::BoundedVec, sp_core::crypto::AccountId32> + * Lookup413: up_data_structs::RpcCollectionFlags + **/ + UpDataStructsRpcCollectionFlags: { + foreign: 'bool', + erc721metadata: 'bool' + }, + /** + * Lookup414: rmrk_traits::collection::CollectionInfo, sp_core::bounded::bounded_vec::BoundedVec, sp_core::crypto::AccountId32> **/ RmrkTraitsCollectionCollectionInfo: { issuer: 'AccountId32', @@ -2900,7 +3335,7 @@ export default { nftsCount: 'u32' }, /** - * Lookup380: rmrk_traits::nft::NftInfo> + * Lookup415: rmrk_traits::nft::NftInfo> **/ RmrkTraitsNftNftInfo: { owner: 'RmrkTraitsNftAccountIdOrCollectionNftTuple', @@ -2910,14 +3345,14 @@ export default { pending: 'bool' }, /** - * Lookup382: rmrk_traits::nft::RoyaltyInfo + * Lookup417: rmrk_traits::nft::RoyaltyInfo **/ RmrkTraitsNftRoyaltyInfo: { recipient: 'AccountId32', amount: 'Permill' }, /** - * Lookup383: rmrk_traits::resource::ResourceInfo, frame_support::storage::bounded_vec::BoundedVec> + * Lookup418: rmrk_traits::resource::ResourceInfo, sp_core::bounded::bounded_vec::BoundedVec> **/ RmrkTraitsResourceResourceInfo: { id: 'u32', @@ -2926,14 +3361,14 @@ export default { pendingRemoval: 'bool' }, /** - * Lookup384: rmrk_traits::property::PropertyInfo, frame_support::storage::bounded_vec::BoundedVec> + * Lookup419: rmrk_traits::property::PropertyInfo, sp_core::bounded::bounded_vec::BoundedVec> **/ RmrkTraitsPropertyPropertyInfo: { key: 'Bytes', value: 'Bytes' }, /** - * Lookup385: rmrk_traits::base::BaseInfo> + * Lookup420: rmrk_traits::base::BaseInfo> **/ RmrkTraitsBaseBaseInfo: { issuer: 'AccountId32', @@ -2941,80 +3376,92 @@ export default { symbol: 'Bytes' }, /** - * Lookup386: rmrk_traits::nft::NftChild + * Lookup421: rmrk_traits::nft::NftChild **/ RmrkTraitsNftNftChild: { collectionId: 'u32', nftId: 'u32' }, /** - * Lookup388: pallet_common::pallet::Error + * Lookup423: pallet_common::pallet::Error **/ PalletCommonError: { _enum: ['CollectionNotFound', 'MustBeTokenOwner', 'NoPermission', 'CantDestroyNotEmptyCollection', 'PublicMintingNotAllowed', 'AddressNotInAllowlist', 'CollectionNameLimitExceeded', 'CollectionDescriptionLimitExceeded', 'CollectionTokenPrefixLimitExceeded', 'TotalCollectionsLimitExceeded', 'CollectionAdminCountExceeded', 'CollectionLimitBoundsExceeded', 'OwnerPermissionsCantBeReverted', 'TransferNotAllowed', 'AccountTokenLimitExceeded', 'CollectionTokenLimitExceeded', 'MetadataFlagFrozen', 'TokenNotFound', 'TokenValueTooLow', 'ApprovedValueTooLow', 'CantApproveMoreThanOwned', 'AddressIsZero', 'UnsupportedOperation', 'NotSufficientFounds', 'UserIsNotAllowedToNest', 'SourceCollectionIsNotAllowedToNest', 'CollectionFieldSizeExceeded', 'NoSpaceForProperty', 'PropertyLimitReached', 'PropertyKeyIsTooLong', 'InvalidCharacterInPropertyKey', 'EmptyPropertyKey', 'CollectionIsExternal', 'CollectionIsInternal'] }, /** - * Lookup390: pallet_fungible::pallet::Error + * Lookup425: pallet_fungible::pallet::Error **/ PalletFungibleError: { _enum: ['NotFungibleDataUsedToMintFungibleCollectionToken', 'FungibleItemsHaveNoId', 'FungibleItemsDontHaveData', 'FungibleDisallowsNesting', 'SettingPropertiesNotAllowed'] }, /** - * Lookup391: pallet_refungible::ItemData + * Lookup426: pallet_refungible::ItemData **/ PalletRefungibleItemData: { constData: 'Bytes' }, /** - * Lookup395: pallet_refungible::pallet::Error + * Lookup431: pallet_refungible::pallet::Error **/ PalletRefungibleError: { _enum: ['NotRefungibleDataUsedToMintFungibleCollectionToken', 'WrongRefungiblePieces', 'RepartitionWhileNotOwningAllPieces', 'RefungibleDisallowsNesting', 'SettingPropertiesNotAllowed'] }, /** - * Lookup396: pallet_nonfungible::ItemData> + * Lookup432: pallet_nonfungible::ItemData> **/ PalletNonfungibleItemData: { owner: 'PalletEvmAccountBasicCrossAccountIdRepr' }, /** - * Lookup398: up_data_structs::PropertyScope + * Lookup434: up_data_structs::PropertyScope **/ UpDataStructsPropertyScope: { _enum: ['None', 'Rmrk'] }, /** - * Lookup400: pallet_nonfungible::pallet::Error + * Lookup436: pallet_nonfungible::pallet::Error **/ PalletNonfungibleError: { _enum: ['NotNonfungibleDataUsedToMintFungibleCollectionToken', 'NonfungibleItemsHaveNoAmount', 'CantBurnNftWithChildren'] }, /** - * Lookup401: pallet_structure::pallet::Error + * Lookup437: pallet_structure::pallet::Error **/ PalletStructureError: { _enum: ['OuroborosDetected', 'DepthLimit', 'BreadthLimit', 'TokenNotFound'] }, /** - * Lookup402: pallet_rmrk_core::pallet::Error + * Lookup438: pallet_rmrk_core::pallet::Error **/ PalletRmrkCoreError: { - _enum: ['CorruptedCollectionType', 'NftTypeEncodeError', 'RmrkPropertyKeyIsTooLong', 'RmrkPropertyValueIsTooLong', 'RmrkPropertyIsNotFound', 'UnableToDecodeRmrkData', 'CollectionNotEmpty', 'NoAvailableCollectionId', 'NoAvailableNftId', 'CollectionUnknown', 'NoPermission', 'NonTransferable', 'CollectionFullOrLocked', 'ResourceDoesntExist', 'CannotSendToDescendentOrSelf', 'CannotAcceptNonOwnedNft', 'CannotRejectNonOwnedNft', 'CannotRejectNonPendingNft', 'ResourceNotPending', 'NoAvailableResourceId'] + _enum: ['CorruptedCollectionType', 'RmrkPropertyKeyIsTooLong', 'RmrkPropertyValueIsTooLong', 'RmrkPropertyIsNotFound', 'UnableToDecodeRmrkData', 'CollectionNotEmpty', 'NoAvailableCollectionId', 'NoAvailableNftId', 'CollectionUnknown', 'NoPermission', 'NonTransferable', 'CollectionFullOrLocked', 'ResourceDoesntExist', 'CannotSendToDescendentOrSelf', 'CannotAcceptNonOwnedNft', 'CannotRejectNonOwnedNft', 'CannotRejectNonPendingNft', 'ResourceNotPending', 'NoAvailableResourceId'] }, /** - * Lookup404: pallet_rmrk_equip::pallet::Error + * Lookup440: pallet_rmrk_equip::pallet::Error **/ PalletRmrkEquipError: { _enum: ['PermissionError', 'NoAvailableBaseId', 'NoAvailablePartId', 'BaseDoesntExist', 'NeedsDefaultThemeFirst', 'PartDoesntExist', 'NoEquippableOnFixedPart'] }, /** - * Lookup407: pallet_evm::pallet::Error + * Lookup446: pallet_app_promotion::pallet::Error + **/ + PalletAppPromotionError: { + _enum: ['AdminNotSet', 'NoPermission', 'NotSufficientFunds', 'PendingForBlockOverflow', 'SponsorNotSet', 'IncorrectLockedBalanceOperation'] + }, + /** + * Lookup447: pallet_foreign_assets::module::Error + **/ + PalletForeignAssetsModuleError: { + _enum: ['BadLocation', 'MultiLocationExisted', 'AssetIdNotExists', 'AssetIdExisted'] + }, + /** + * Lookup450: pallet_evm::pallet::Error **/ PalletEvmError: { _enum: ['BalanceLow', 'FeeOverflow', 'PaymentOverflow', 'WithdrawFailed', 'GasPriceTooLow', 'InvalidNonce'] }, /** - * Lookup410: fp_rpc::TransactionStatus + * Lookup453: fp_rpc::TransactionStatus **/ FpRpcTransactionStatus: { transactionHash: 'H256', @@ -3026,11 +3473,11 @@ export default { logsBloom: 'EthbloomBloom' }, /** - * Lookup412: ethbloom::Bloom + * Lookup455: ethbloom::Bloom **/ EthbloomBloom: '[u8;256]', /** - * Lookup414: ethereum::receipt::ReceiptV3 + * Lookup457: ethereum::receipt::ReceiptV3 **/ EthereumReceiptReceiptV3: { _enum: { @@ -3040,7 +3487,7 @@ export default { } }, /** - * Lookup415: ethereum::receipt::EIP658ReceiptData + * Lookup458: ethereum::receipt::EIP658ReceiptData **/ EthereumReceiptEip658ReceiptData: { statusCode: 'u8', @@ -3049,7 +3496,7 @@ export default { logs: 'Vec' }, /** - * Lookup416: ethereum::block::Block + * Lookup459: ethereum::block::Block **/ EthereumBlock: { header: 'EthereumHeader', @@ -3057,7 +3504,7 @@ export default { ommers: 'Vec' }, /** - * Lookup417: ethereum::header::Header + * Lookup460: ethereum::header::Header **/ EthereumHeader: { parentHash: 'H256', @@ -3077,45 +3524,61 @@ export default { nonce: 'EthereumTypesHashH64' }, /** - * Lookup418: ethereum_types::hash::H64 + * Lookup461: ethereum_types::hash::H64 **/ EthereumTypesHashH64: '[u8;8]', /** - * Lookup423: pallet_ethereum::pallet::Error + * Lookup466: pallet_ethereum::pallet::Error **/ PalletEthereumError: { _enum: ['InvalidSignature', 'PreLogExists'] }, /** - * Lookup424: pallet_evm_coder_substrate::pallet::Error + * Lookup467: pallet_evm_coder_substrate::pallet::Error **/ PalletEvmCoderSubstrateError: { _enum: ['OutOfGas', 'OutOfFund'] }, /** - * Lookup425: pallet_evm_contract_helpers::SponsoringModeT + * Lookup468: up_data_structs::SponsorshipState> + **/ + UpDataStructsSponsorshipStateBasicCrossAccountIdRepr: { + _enum: { + Disabled: 'Null', + Unconfirmed: 'PalletEvmAccountBasicCrossAccountIdRepr', + Confirmed: 'PalletEvmAccountBasicCrossAccountIdRepr' + } + }, + /** + * Lookup469: pallet_evm_contract_helpers::SponsoringModeT **/ PalletEvmContractHelpersSponsoringModeT: { _enum: ['Disabled', 'Allowlisted', 'Generous'] }, /** - * Lookup427: pallet_evm_contract_helpers::pallet::Error + * Lookup475: pallet_evm_contract_helpers::pallet::Error **/ PalletEvmContractHelpersError: { - _enum: ['NoPermission'] + _enum: ['NoPermission', 'NoPendingSponsor', 'TooManyMethodsHaveSponsoredLimit'] }, /** - * Lookup428: pallet_evm_migration::pallet::Error + * Lookup476: pallet_evm_migration::pallet::Error **/ PalletEvmMigrationError: { _enum: ['AccountNotEmpty', 'AccountIsNotMigrating'] }, /** - * Lookup429: pallet_maintenance::pallet::Error + * Lookup477: pallet_maintenance::pallet::Error **/ PalletMaintenanceError: 'Null', /** - * Lookup431: sp_runtime::MultiSignature + * Lookup478: pallet_test_utils::pallet::Error + **/ + PalletTestUtilsError: { + _enum: ['TestPalletDisabled', 'TriggerRollback'] + }, + /** + * Lookup480: sp_runtime::MultiSignature **/ SpRuntimeMultiSignature: { _enum: { @@ -3125,47 +3588,51 @@ export default { } }, /** - * Lookup432: sp_core::ed25519::Signature + * Lookup481: sp_core::ed25519::Signature **/ SpCoreEd25519Signature: '[u8;64]', /** - * Lookup434: sp_core::sr25519::Signature + * Lookup483: sp_core::sr25519::Signature **/ SpCoreSr25519Signature: '[u8;64]', /** - * Lookup435: sp_core::ecdsa::Signature + * Lookup484: sp_core::ecdsa::Signature **/ SpCoreEcdsaSignature: '[u8;65]', /** - * Lookup438: frame_system::extensions::check_spec_version::CheckSpecVersion + * Lookup487: frame_system::extensions::check_spec_version::CheckSpecVersion **/ FrameSystemExtensionsCheckSpecVersion: 'Null', /** - * Lookup439: frame_system::extensions::check_genesis::CheckGenesis + * Lookup488: frame_system::extensions::check_tx_version::CheckTxVersion + **/ + FrameSystemExtensionsCheckTxVersion: 'Null', + /** + * Lookup489: frame_system::extensions::check_genesis::CheckGenesis **/ FrameSystemExtensionsCheckGenesis: 'Null', /** - * Lookup442: frame_system::extensions::check_nonce::CheckNonce + * Lookup492: frame_system::extensions::check_nonce::CheckNonce **/ FrameSystemExtensionsCheckNonce: 'Compact', /** - * Lookup443: frame_system::extensions::check_weight::CheckWeight + * Lookup493: frame_system::extensions::check_weight::CheckWeight **/ FrameSystemExtensionsCheckWeight: 'Null', /** - * Lookup444: opal_runtime::CheckMaintenance + * Lookup494: opal_runtime::runtime_common::maintenance::CheckMaintenance **/ - OpalRuntimeCheckMaintenance: 'Null', + OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance: 'Null', /** - * Lookup445: pallet_template_transaction_payment::ChargeTransactionPayment + * Lookup495: pallet_template_transaction_payment::ChargeTransactionPayment **/ PalletTemplateTransactionPaymentChargeTransactionPayment: 'Compact', /** - * Lookup446: opal_runtime::Runtime + * Lookup496: opal_runtime::Runtime **/ OpalRuntimeRuntime: 'Null', /** - * Lookup447: pallet_ethereum::FakeTransactionFinalizer + * Lookup497: pallet_ethereum::FakeTransactionFinalizer **/ PalletEthereumFakeTransactionFinalizer: 'Null' }; diff --git a/tests/src/interfaces/registry.ts b/tests/src/interfaces/registry.ts index 098e4a6486..f22d0712d5 100644 --- a/tests/src/interfaces/registry.ts +++ b/tests/src/interfaces/registry.ts @@ -1,10 +1,14 @@ // Auto-generated via `yarn polkadot-types-from-defs`, do not edit /* eslint-disable */ -import type { CumulusPalletDmpQueueCall, CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueueError, CumulusPalletDmpQueueEvent, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemCall, CumulusPalletParachainSystemError, CumulusPalletParachainSystemEvent, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmCall, CumulusPalletXcmError, CumulusPalletXcmEvent, CumulusPalletXcmOrigin, CumulusPalletXcmpQueueCall, CumulusPalletXcmpQueueError, CumulusPalletXcmpQueueEvent, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueInboundState, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueOutboundState, CumulusPalletXcmpQueueQueueConfigData, CumulusPrimitivesParachainInherentParachainInherentData, EthbloomBloom, EthereumBlock, EthereumHeader, EthereumLog, EthereumReceiptEip658ReceiptData, EthereumReceiptReceiptV3, EthereumTransactionAccessListItem, EthereumTransactionEip1559Transaction, EthereumTransactionEip2930Transaction, EthereumTransactionLegacyTransaction, EthereumTransactionTransactionAction, EthereumTransactionTransactionSignature, EthereumTransactionTransactionV2, EthereumTypesHashH64, EvmCoreErrorExitError, EvmCoreErrorExitFatal, EvmCoreErrorExitReason, EvmCoreErrorExitRevert, EvmCoreErrorExitSucceed, FpRpcTransactionStatus, FrameSupportDispatchRawOrigin, FrameSupportPalletId, FrameSupportScheduleLookupError, FrameSupportScheduleMaybeHashed, FrameSupportTokensMiscBalanceStatus, FrameSupportWeightsDispatchClass, FrameSupportWeightsDispatchInfo, FrameSupportWeightsPays, FrameSupportWeightsPerDispatchClassU32, FrameSupportWeightsPerDispatchClassU64, FrameSupportWeightsPerDispatchClassWeightsPerClass, FrameSupportWeightsRuntimeDbWeight, FrameSystemAccountInfo, FrameSystemCall, FrameSystemError, FrameSystemEvent, FrameSystemEventRecord, FrameSystemExtensionsCheckGenesis, FrameSystemExtensionsCheckNonce, FrameSystemExtensionsCheckSpecVersion, FrameSystemExtensionsCheckWeight, FrameSystemLastRuntimeUpgradeInfo, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, FrameSystemLimitsWeightsPerClass, FrameSystemPhase, OpalRuntimeCheckMaintenance, OpalRuntimeOriginCaller, OpalRuntimeRuntime, OrmlVestingModuleCall, OrmlVestingModuleError, OrmlVestingModuleEvent, OrmlVestingVestingSchedule, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesCall, PalletBalancesError, PalletBalancesEvent, PalletBalancesReasons, PalletBalancesReleases, PalletBalancesReserveData, PalletCommonError, PalletCommonEvent, PalletEthereumCall, PalletEthereumError, PalletEthereumEvent, PalletEthereumFakeTransactionFinalizer, PalletEthereumRawOrigin, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmCall, PalletEvmCoderSubstrateError, PalletEvmContractHelpersError, PalletEvmContractHelpersSponsoringModeT, PalletEvmError, PalletEvmEvent, PalletEvmMigrationCall, PalletEvmMigrationError, PalletFungibleError, PalletInflationCall, PalletMaintenanceCall, PalletMaintenanceError, PalletMaintenanceEvent, PalletNonfungibleError, PalletNonfungibleItemData, PalletRefungibleError, PalletRefungibleItemData, PalletRmrkCoreCall, PalletRmrkCoreError, PalletRmrkCoreEvent, PalletRmrkEquipCall, PalletRmrkEquipError, PalletRmrkEquipEvent, PalletStructureCall, PalletStructureError, PalletStructureEvent, PalletSudoCall, PalletSudoError, PalletSudoEvent, PalletTemplateTransactionPaymentCall, PalletTemplateTransactionPaymentChargeTransactionPayment, PalletTimestampCall, PalletTransactionPaymentReleases, PalletTreasuryCall, PalletTreasuryError, PalletTreasuryEvent, PalletTreasuryProposal, PalletUniqueCall, PalletUniqueError, PalletUniqueRawEvent, PalletUniqueSchedulerCall, PalletUniqueSchedulerError, PalletUniqueSchedulerEvent, PalletUniqueSchedulerScheduledV3, PalletXcmCall, PalletXcmError, PalletXcmEvent, PalletXcmOrigin, PhantomTypeUpDataStructs, PolkadotCorePrimitivesInboundDownwardMessage, PolkadotCorePrimitivesInboundHrmpMessage, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotParachainPrimitivesXcmpMessageFormat, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2AbridgedHrmpChannel, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, RmrkTraitsBaseBaseInfo, RmrkTraitsCollectionCollectionInfo, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsNftNftChild, RmrkTraitsNftNftInfo, RmrkTraitsNftRoyaltyInfo, RmrkTraitsPartEquippableList, RmrkTraitsPartFixedPart, RmrkTraitsPartPartType, RmrkTraitsPartSlotPart, RmrkTraitsPropertyPropertyInfo, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceInfo, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, RmrkTraitsThemeThemeProperty, SpCoreEcdsaSignature, SpCoreEd25519Signature, SpCoreSr25519Signature, SpCoreVoid, SpRuntimeArithmeticError, SpRuntimeDigest, SpRuntimeDigestDigestItem, SpRuntimeDispatchError, SpRuntimeModuleError, SpRuntimeMultiSignature, SpRuntimeTokenError, SpRuntimeTransactionalError, SpTrieStorageProof, SpVersionRuntimeVersion, UpDataStructsAccessMode, UpDataStructsCollection, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCollectionStats, UpDataStructsCreateCollectionData, UpDataStructsCreateFungibleData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsCreateNftData, UpDataStructsCreateNftExData, UpDataStructsCreateReFungibleData, UpDataStructsCreateRefungibleExData, UpDataStructsNestingPermissions, UpDataStructsOwnerRestrictedSet, UpDataStructsProperties, UpDataStructsPropertiesMapBoundedVec, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsRpcCollection, UpDataStructsSponsoringRateLimit, UpDataStructsSponsorshipState, UpDataStructsTokenChild, UpDataStructsTokenData, XcmDoubleEncoded, XcmV0Junction, XcmV0JunctionBodyId, XcmV0JunctionBodyPart, XcmV0JunctionNetworkId, XcmV0MultiAsset, XcmV0MultiLocation, XcmV0Order, XcmV0OriginKind, XcmV0Response, XcmV0Xcm, XcmV1Junction, XcmV1MultiAsset, XcmV1MultiLocation, XcmV1MultiassetAssetId, XcmV1MultiassetAssetInstance, XcmV1MultiassetFungibility, XcmV1MultiassetMultiAssetFilter, XcmV1MultiassetMultiAssets, XcmV1MultiassetWildFungibility, XcmV1MultiassetWildMultiAsset, XcmV1MultilocationJunctions, XcmV1Order, XcmV1Response, XcmV1Xcm, XcmV2Instruction, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2WeightLimit, XcmV2Xcm, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from '@polkadot/types/lookup'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/types/types/registry'; + +import type { CumulusPalletDmpQueueCall, CumulusPalletDmpQueueConfigData, CumulusPalletDmpQueueError, CumulusPalletDmpQueueEvent, CumulusPalletDmpQueuePageIndexData, CumulusPalletParachainSystemCall, CumulusPalletParachainSystemError, CumulusPalletParachainSystemEvent, CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot, CumulusPalletXcmCall, CumulusPalletXcmError, CumulusPalletXcmEvent, CumulusPalletXcmOrigin, CumulusPalletXcmpQueueCall, CumulusPalletXcmpQueueError, CumulusPalletXcmpQueueEvent, CumulusPalletXcmpQueueInboundChannelDetails, CumulusPalletXcmpQueueInboundState, CumulusPalletXcmpQueueOutboundChannelDetails, CumulusPalletXcmpQueueOutboundState, CumulusPalletXcmpQueueQueueConfigData, CumulusPrimitivesParachainInherentParachainInherentData, EthbloomBloom, EthereumBlock, EthereumHeader, EthereumLog, EthereumReceiptEip658ReceiptData, EthereumReceiptReceiptV3, EthereumTransactionAccessListItem, EthereumTransactionEip1559Transaction, EthereumTransactionEip2930Transaction, EthereumTransactionLegacyTransaction, EthereumTransactionTransactionAction, EthereumTransactionTransactionSignature, EthereumTransactionTransactionV2, EthereumTypesHashH64, EvmCoreErrorExitError, EvmCoreErrorExitFatal, EvmCoreErrorExitReason, EvmCoreErrorExitRevert, EvmCoreErrorExitSucceed, FpRpcTransactionStatus, FrameSupportDispatchDispatchClass, FrameSupportDispatchDispatchInfo, FrameSupportDispatchPays, FrameSupportDispatchPerDispatchClassU32, FrameSupportDispatchPerDispatchClassWeight, FrameSupportDispatchPerDispatchClassWeightsPerClass, FrameSupportDispatchRawOrigin, FrameSupportPalletId, FrameSupportScheduleLookupError, FrameSupportScheduleMaybeHashed, FrameSupportTokensMiscBalanceStatus, FrameSystemAccountInfo, FrameSystemCall, FrameSystemError, FrameSystemEvent, FrameSystemEventRecord, FrameSystemExtensionsCheckGenesis, FrameSystemExtensionsCheckNonce, FrameSystemExtensionsCheckSpecVersion, FrameSystemExtensionsCheckTxVersion, FrameSystemExtensionsCheckWeight, FrameSystemLastRuntimeUpgradeInfo, FrameSystemLimitsBlockLength, FrameSystemLimitsBlockWeights, FrameSystemLimitsWeightsPerClass, FrameSystemPhase, OpalRuntimeOriginCaller, OpalRuntimeRuntime, OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance, OrmlTokensAccountData, OrmlTokensBalanceLock, OrmlTokensModuleCall, OrmlTokensModuleError, OrmlTokensModuleEvent, OrmlTokensReserveData, OrmlVestingModuleCall, OrmlVestingModuleError, OrmlVestingModuleEvent, OrmlVestingVestingSchedule, OrmlXtokensModuleCall, OrmlXtokensModuleError, OrmlXtokensModuleEvent, PalletAppPromotionCall, PalletAppPromotionError, PalletAppPromotionEvent, PalletBalancesAccountData, PalletBalancesBalanceLock, PalletBalancesCall, PalletBalancesError, PalletBalancesEvent, PalletBalancesReasons, PalletBalancesReleases, PalletBalancesReserveData, PalletCommonError, PalletCommonEvent, PalletConfigurationCall, PalletEthereumCall, PalletEthereumError, PalletEthereumEvent, PalletEthereumFakeTransactionFinalizer, PalletEthereumRawOrigin, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmCall, PalletEvmCoderSubstrateError, PalletEvmContractHelpersError, PalletEvmContractHelpersEvent, PalletEvmContractHelpersSponsoringModeT, PalletEvmError, PalletEvmEvent, PalletEvmMigrationCall, PalletEvmMigrationError, PalletForeignAssetsAssetIds, PalletForeignAssetsModuleAssetMetadata, PalletForeignAssetsModuleCall, PalletForeignAssetsModuleError, PalletForeignAssetsModuleEvent, PalletForeignAssetsNativeCurrency, PalletFungibleError, PalletInflationCall, PalletMaintenanceCall, PalletMaintenanceError, PalletMaintenanceEvent, PalletNonfungibleError, PalletNonfungibleItemData, PalletRefungibleError, PalletRefungibleItemData, PalletRmrkCoreCall, PalletRmrkCoreError, PalletRmrkCoreEvent, PalletRmrkEquipCall, PalletRmrkEquipError, PalletRmrkEquipEvent, PalletStructureCall, PalletStructureError, PalletStructureEvent, PalletSudoCall, PalletSudoError, PalletSudoEvent, PalletTemplateTransactionPaymentCall, PalletTemplateTransactionPaymentChargeTransactionPayment, PalletTestUtilsCall, PalletTestUtilsError, PalletTestUtilsEvent, PalletTimestampCall, PalletTransactionPaymentEvent, PalletTransactionPaymentReleases, PalletTreasuryCall, PalletTreasuryError, PalletTreasuryEvent, PalletTreasuryProposal, PalletUniqueCall, PalletUniqueError, PalletUniqueRawEvent, PalletUniqueSchedulerCall, PalletUniqueSchedulerError, PalletUniqueSchedulerEvent, PalletUniqueSchedulerScheduledV3, PalletXcmCall, PalletXcmError, PalletXcmEvent, PalletXcmOrigin, PhantomTypeUpDataStructs, PolkadotCorePrimitivesInboundDownwardMessage, PolkadotCorePrimitivesInboundHrmpMessage, PolkadotCorePrimitivesOutboundHrmpMessage, PolkadotParachainPrimitivesXcmpMessageFormat, PolkadotPrimitivesV2AbridgedHostConfiguration, PolkadotPrimitivesV2AbridgedHrmpChannel, PolkadotPrimitivesV2PersistedValidationData, PolkadotPrimitivesV2UpgradeRestriction, RmrkTraitsBaseBaseInfo, RmrkTraitsCollectionCollectionInfo, RmrkTraitsNftAccountIdOrCollectionNftTuple, RmrkTraitsNftNftChild, RmrkTraitsNftNftInfo, RmrkTraitsNftRoyaltyInfo, RmrkTraitsPartEquippableList, RmrkTraitsPartFixedPart, RmrkTraitsPartPartType, RmrkTraitsPartSlotPart, RmrkTraitsPropertyPropertyInfo, RmrkTraitsResourceBasicResource, RmrkTraitsResourceComposableResource, RmrkTraitsResourceResourceInfo, RmrkTraitsResourceResourceTypes, RmrkTraitsResourceSlotResource, RmrkTraitsTheme, RmrkTraitsThemeThemeProperty, SpCoreEcdsaSignature, SpCoreEd25519Signature, SpCoreSr25519Signature, SpCoreVoid, SpRuntimeArithmeticError, SpRuntimeDigest, SpRuntimeDigestDigestItem, SpRuntimeDispatchError, SpRuntimeModuleError, SpRuntimeMultiSignature, SpRuntimeTokenError, SpRuntimeTransactionalError, SpTrieStorageProof, SpVersionRuntimeVersion, SpWeightsRuntimeDbWeight, UpDataStructsAccessMode, UpDataStructsCollection, UpDataStructsCollectionLimits, UpDataStructsCollectionMode, UpDataStructsCollectionPermissions, UpDataStructsCollectionStats, UpDataStructsCreateCollectionData, UpDataStructsCreateFungibleData, UpDataStructsCreateItemData, UpDataStructsCreateItemExData, UpDataStructsCreateNftData, UpDataStructsCreateNftExData, UpDataStructsCreateReFungibleData, UpDataStructsCreateRefungibleExMultipleOwners, UpDataStructsCreateRefungibleExSingleOwner, UpDataStructsNestingPermissions, UpDataStructsOwnerRestrictedSet, UpDataStructsProperties, UpDataStructsPropertiesMapBoundedVec, UpDataStructsPropertiesMapPropertyPermission, UpDataStructsProperty, UpDataStructsPropertyKeyPermission, UpDataStructsPropertyPermission, UpDataStructsPropertyScope, UpDataStructsRpcCollection, UpDataStructsRpcCollectionFlags, UpDataStructsSponsoringRateLimit, UpDataStructsSponsorshipStateAccountId32, UpDataStructsSponsorshipStateBasicCrossAccountIdRepr, UpDataStructsTokenChild, UpDataStructsTokenData, XcmDoubleEncoded, XcmV0Junction, XcmV0JunctionBodyId, XcmV0JunctionBodyPart, XcmV0JunctionNetworkId, XcmV0MultiAsset, XcmV0MultiLocation, XcmV0Order, XcmV0OriginKind, XcmV0Response, XcmV0Xcm, XcmV1Junction, XcmV1MultiAsset, XcmV1MultiLocation, XcmV1MultiassetAssetId, XcmV1MultiassetAssetInstance, XcmV1MultiassetFungibility, XcmV1MultiassetMultiAssetFilter, XcmV1MultiassetMultiAssets, XcmV1MultiassetWildFungibility, XcmV1MultiassetWildMultiAsset, XcmV1MultilocationJunctions, XcmV1Order, XcmV1Response, XcmV1Xcm, XcmV2Instruction, XcmV2Response, XcmV2TraitsError, XcmV2TraitsOutcome, XcmV2WeightLimit, XcmV2Xcm, XcmVersionedMultiAsset, XcmVersionedMultiAssets, XcmVersionedMultiLocation, XcmVersionedXcm } from '@polkadot/types/lookup'; declare module '@polkadot/types/types/registry' { - export interface InterfaceTypes { + interface InterfaceTypes { CumulusPalletDmpQueueCall: CumulusPalletDmpQueueCall; CumulusPalletDmpQueueConfigData: CumulusPalletDmpQueueConfigData; CumulusPalletDmpQueueError: CumulusPalletDmpQueueError; @@ -47,18 +51,17 @@ declare module '@polkadot/types/types/registry' { EvmCoreErrorExitRevert: EvmCoreErrorExitRevert; EvmCoreErrorExitSucceed: EvmCoreErrorExitSucceed; FpRpcTransactionStatus: FpRpcTransactionStatus; + FrameSupportDispatchDispatchClass: FrameSupportDispatchDispatchClass; + FrameSupportDispatchDispatchInfo: FrameSupportDispatchDispatchInfo; + FrameSupportDispatchPays: FrameSupportDispatchPays; + FrameSupportDispatchPerDispatchClassU32: FrameSupportDispatchPerDispatchClassU32; + FrameSupportDispatchPerDispatchClassWeight: FrameSupportDispatchPerDispatchClassWeight; + FrameSupportDispatchPerDispatchClassWeightsPerClass: FrameSupportDispatchPerDispatchClassWeightsPerClass; FrameSupportDispatchRawOrigin: FrameSupportDispatchRawOrigin; FrameSupportPalletId: FrameSupportPalletId; FrameSupportScheduleLookupError: FrameSupportScheduleLookupError; FrameSupportScheduleMaybeHashed: FrameSupportScheduleMaybeHashed; FrameSupportTokensMiscBalanceStatus: FrameSupportTokensMiscBalanceStatus; - FrameSupportWeightsDispatchClass: FrameSupportWeightsDispatchClass; - FrameSupportWeightsDispatchInfo: FrameSupportWeightsDispatchInfo; - FrameSupportWeightsPays: FrameSupportWeightsPays; - FrameSupportWeightsPerDispatchClassU32: FrameSupportWeightsPerDispatchClassU32; - FrameSupportWeightsPerDispatchClassU64: FrameSupportWeightsPerDispatchClassU64; - FrameSupportWeightsPerDispatchClassWeightsPerClass: FrameSupportWeightsPerDispatchClassWeightsPerClass; - FrameSupportWeightsRuntimeDbWeight: FrameSupportWeightsRuntimeDbWeight; FrameSystemAccountInfo: FrameSystemAccountInfo; FrameSystemCall: FrameSystemCall; FrameSystemError: FrameSystemError; @@ -67,19 +70,32 @@ declare module '@polkadot/types/types/registry' { FrameSystemExtensionsCheckGenesis: FrameSystemExtensionsCheckGenesis; FrameSystemExtensionsCheckNonce: FrameSystemExtensionsCheckNonce; FrameSystemExtensionsCheckSpecVersion: FrameSystemExtensionsCheckSpecVersion; + FrameSystemExtensionsCheckTxVersion: FrameSystemExtensionsCheckTxVersion; FrameSystemExtensionsCheckWeight: FrameSystemExtensionsCheckWeight; FrameSystemLastRuntimeUpgradeInfo: FrameSystemLastRuntimeUpgradeInfo; FrameSystemLimitsBlockLength: FrameSystemLimitsBlockLength; FrameSystemLimitsBlockWeights: FrameSystemLimitsBlockWeights; FrameSystemLimitsWeightsPerClass: FrameSystemLimitsWeightsPerClass; FrameSystemPhase: FrameSystemPhase; - OpalRuntimeCheckMaintenance: OpalRuntimeCheckMaintenance; OpalRuntimeOriginCaller: OpalRuntimeOriginCaller; OpalRuntimeRuntime: OpalRuntimeRuntime; + OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance: OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance; + OrmlTokensAccountData: OrmlTokensAccountData; + OrmlTokensBalanceLock: OrmlTokensBalanceLock; + OrmlTokensModuleCall: OrmlTokensModuleCall; + OrmlTokensModuleError: OrmlTokensModuleError; + OrmlTokensModuleEvent: OrmlTokensModuleEvent; + OrmlTokensReserveData: OrmlTokensReserveData; OrmlVestingModuleCall: OrmlVestingModuleCall; OrmlVestingModuleError: OrmlVestingModuleError; OrmlVestingModuleEvent: OrmlVestingModuleEvent; OrmlVestingVestingSchedule: OrmlVestingVestingSchedule; + OrmlXtokensModuleCall: OrmlXtokensModuleCall; + OrmlXtokensModuleError: OrmlXtokensModuleError; + OrmlXtokensModuleEvent: OrmlXtokensModuleEvent; + PalletAppPromotionCall: PalletAppPromotionCall; + PalletAppPromotionError: PalletAppPromotionError; + PalletAppPromotionEvent: PalletAppPromotionEvent; PalletBalancesAccountData: PalletBalancesAccountData; PalletBalancesBalanceLock: PalletBalancesBalanceLock; PalletBalancesCall: PalletBalancesCall; @@ -90,6 +106,7 @@ declare module '@polkadot/types/types/registry' { PalletBalancesReserveData: PalletBalancesReserveData; PalletCommonError: PalletCommonError; PalletCommonEvent: PalletCommonEvent; + PalletConfigurationCall: PalletConfigurationCall; PalletEthereumCall: PalletEthereumCall; PalletEthereumError: PalletEthereumError; PalletEthereumEvent: PalletEthereumEvent; @@ -99,11 +116,18 @@ declare module '@polkadot/types/types/registry' { PalletEvmCall: PalletEvmCall; PalletEvmCoderSubstrateError: PalletEvmCoderSubstrateError; PalletEvmContractHelpersError: PalletEvmContractHelpersError; + PalletEvmContractHelpersEvent: PalletEvmContractHelpersEvent; PalletEvmContractHelpersSponsoringModeT: PalletEvmContractHelpersSponsoringModeT; PalletEvmError: PalletEvmError; PalletEvmEvent: PalletEvmEvent; PalletEvmMigrationCall: PalletEvmMigrationCall; PalletEvmMigrationError: PalletEvmMigrationError; + PalletForeignAssetsAssetIds: PalletForeignAssetsAssetIds; + PalletForeignAssetsModuleAssetMetadata: PalletForeignAssetsModuleAssetMetadata; + PalletForeignAssetsModuleCall: PalletForeignAssetsModuleCall; + PalletForeignAssetsModuleError: PalletForeignAssetsModuleError; + PalletForeignAssetsModuleEvent: PalletForeignAssetsModuleEvent; + PalletForeignAssetsNativeCurrency: PalletForeignAssetsNativeCurrency; PalletFungibleError: PalletFungibleError; PalletInflationCall: PalletInflationCall; PalletMaintenanceCall: PalletMaintenanceCall; @@ -127,7 +151,11 @@ declare module '@polkadot/types/types/registry' { PalletSudoEvent: PalletSudoEvent; PalletTemplateTransactionPaymentCall: PalletTemplateTransactionPaymentCall; PalletTemplateTransactionPaymentChargeTransactionPayment: PalletTemplateTransactionPaymentChargeTransactionPayment; + PalletTestUtilsCall: PalletTestUtilsCall; + PalletTestUtilsError: PalletTestUtilsError; + PalletTestUtilsEvent: PalletTestUtilsEvent; PalletTimestampCall: PalletTimestampCall; + PalletTransactionPaymentEvent: PalletTransactionPaymentEvent; PalletTransactionPaymentReleases: PalletTransactionPaymentReleases; PalletTreasuryCall: PalletTreasuryCall; PalletTreasuryError: PalletTreasuryError; @@ -185,6 +213,7 @@ declare module '@polkadot/types/types/registry' { SpRuntimeTransactionalError: SpRuntimeTransactionalError; SpTrieStorageProof: SpTrieStorageProof; SpVersionRuntimeVersion: SpVersionRuntimeVersion; + SpWeightsRuntimeDbWeight: SpWeightsRuntimeDbWeight; UpDataStructsAccessMode: UpDataStructsAccessMode; UpDataStructsCollection: UpDataStructsCollection; UpDataStructsCollectionLimits: UpDataStructsCollectionLimits; @@ -198,7 +227,8 @@ declare module '@polkadot/types/types/registry' { UpDataStructsCreateNftData: UpDataStructsCreateNftData; UpDataStructsCreateNftExData: UpDataStructsCreateNftExData; UpDataStructsCreateReFungibleData: UpDataStructsCreateReFungibleData; - UpDataStructsCreateRefungibleExData: UpDataStructsCreateRefungibleExData; + UpDataStructsCreateRefungibleExMultipleOwners: UpDataStructsCreateRefungibleExMultipleOwners; + UpDataStructsCreateRefungibleExSingleOwner: UpDataStructsCreateRefungibleExSingleOwner; UpDataStructsNestingPermissions: UpDataStructsNestingPermissions; UpDataStructsOwnerRestrictedSet: UpDataStructsOwnerRestrictedSet; UpDataStructsProperties: UpDataStructsProperties; @@ -209,8 +239,10 @@ declare module '@polkadot/types/types/registry' { UpDataStructsPropertyPermission: UpDataStructsPropertyPermission; UpDataStructsPropertyScope: UpDataStructsPropertyScope; UpDataStructsRpcCollection: UpDataStructsRpcCollection; + UpDataStructsRpcCollectionFlags: UpDataStructsRpcCollectionFlags; UpDataStructsSponsoringRateLimit: UpDataStructsSponsoringRateLimit; - UpDataStructsSponsorshipState: UpDataStructsSponsorshipState; + UpDataStructsSponsorshipStateAccountId32: UpDataStructsSponsorshipStateAccountId32; + UpDataStructsSponsorshipStateBasicCrossAccountIdRepr: UpDataStructsSponsorshipStateBasicCrossAccountIdRepr; UpDataStructsTokenChild: UpDataStructsTokenChild; UpDataStructsTokenData: UpDataStructsTokenData; XcmDoubleEncoded: XcmDoubleEncoded; @@ -244,6 +276,7 @@ declare module '@polkadot/types/types/registry' { XcmV2TraitsOutcome: XcmV2TraitsOutcome; XcmV2WeightLimit: XcmV2WeightLimit; XcmV2Xcm: XcmV2Xcm; + XcmVersionedMultiAsset: XcmVersionedMultiAsset; XcmVersionedMultiAssets: XcmVersionedMultiAssets; XcmVersionedMultiLocation: XcmVersionedMultiLocation; XcmVersionedXcm: XcmVersionedXcm; diff --git a/tests/src/interfaces/types-lookup.ts b/tests/src/interfaces/types-lookup.ts index 3c7191e2ea..eb47894d6a 100644 --- a/tests/src/interfaces/types-lookup.ts +++ b/tests/src/interfaces/types-lookup.ts @@ -1,111 +1,170 @@ // Auto-generated via `yarn polkadot-types-from-defs`, do not edit /* eslint-disable */ -declare module '@polkadot/types/lookup' { - import type { BTreeMap, BTreeSet, Bytes, Compact, Enum, Null, Option, Result, Struct, Text, U256, U8aFixed, Vec, bool, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; - import type { ITuple } from '@polkadot/types-codec/types'; - import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill } from '@polkadot/types/interfaces/runtime'; - import type { Event } from '@polkadot/types/interfaces/system'; +// import type lookup before we augment - in some environments +// this is required to allow for ambient/previous definitions +import '@polkadot/types/lookup'; - /** @name PolkadotPrimitivesV2PersistedValidationData (2) */ - export interface PolkadotPrimitivesV2PersistedValidationData extends Struct { - readonly parentHead: Bytes; - readonly relayParentNumber: u32; - readonly relayParentStorageRoot: H256; - readonly maxPovSize: u32; - } +import type { BTreeMap, BTreeSet, Bytes, Compact, Enum, Null, Option, Result, Struct, Text, U256, U8aFixed, Vec, bool, u128, u16, u32, u64, u8 } from '@polkadot/types-codec'; +import type { ITuple } from '@polkadot/types-codec/types'; +import type { AccountId32, Call, H160, H256, MultiAddress, Perbill, Permill, Weight } from '@polkadot/types/interfaces/runtime'; +import type { Event } from '@polkadot/types/interfaces/system'; - /** @name PolkadotPrimitivesV2UpgradeRestriction (9) */ - export interface PolkadotPrimitivesV2UpgradeRestriction extends Enum { - readonly isPresent: boolean; - readonly type: 'Present'; +declare module '@polkadot/types/lookup' { + /** @name FrameSystemAccountInfo (3) */ + interface FrameSystemAccountInfo extends Struct { + readonly nonce: u32; + readonly consumers: u32; + readonly providers: u32; + readonly sufficients: u32; + readonly data: PalletBalancesAccountData; } - /** @name SpTrieStorageProof (10) */ - export interface SpTrieStorageProof extends Struct { - readonly trieNodes: BTreeSet; + /** @name PalletBalancesAccountData (5) */ + interface PalletBalancesAccountData extends Struct { + readonly free: u128; + readonly reserved: u128; + readonly miscFrozen: u128; + readonly feeFrozen: u128; } - /** @name CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot (13) */ - export interface CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot extends Struct { - readonly dmqMqcHead: H256; - readonly relayDispatchQueueSize: ITuple<[u32, u32]>; - readonly ingressChannels: Vec>; - readonly egressChannels: Vec>; + /** @name FrameSupportDispatchPerDispatchClassWeight (7) */ + interface FrameSupportDispatchPerDispatchClassWeight extends Struct { + readonly normal: Weight; + readonly operational: Weight; + readonly mandatory: Weight; } - /** @name PolkadotPrimitivesV2AbridgedHrmpChannel (18) */ - export interface PolkadotPrimitivesV2AbridgedHrmpChannel extends Struct { - readonly maxCapacity: u32; - readonly maxTotalSize: u32; - readonly maxMessageSize: u32; - readonly msgCount: u32; - readonly totalSize: u32; - readonly mqcHead: Option; + /** @name SpRuntimeDigest (12) */ + interface SpRuntimeDigest extends Struct { + readonly logs: Vec; } - /** @name PolkadotPrimitivesV2AbridgedHostConfiguration (20) */ - export interface PolkadotPrimitivesV2AbridgedHostConfiguration extends Struct { - readonly maxCodeSize: u32; - readonly maxHeadDataSize: u32; - readonly maxUpwardQueueCount: u32; - readonly maxUpwardQueueSize: u32; - readonly maxUpwardMessageSize: u32; - readonly maxUpwardMessageNumPerCandidate: u32; - readonly hrmpMaxMessageNumPerCandidate: u32; - readonly validationUpgradeCooldown: u32; - readonly validationUpgradeDelay: u32; + /** @name SpRuntimeDigestDigestItem (14) */ + interface SpRuntimeDigestDigestItem extends Enum { + readonly isOther: boolean; + readonly asOther: Bytes; + readonly isConsensus: boolean; + readonly asConsensus: ITuple<[U8aFixed, Bytes]>; + readonly isSeal: boolean; + readonly asSeal: ITuple<[U8aFixed, Bytes]>; + readonly isPreRuntime: boolean; + readonly asPreRuntime: ITuple<[U8aFixed, Bytes]>; + readonly isRuntimeEnvironmentUpdated: boolean; + readonly type: 'Other' | 'Consensus' | 'Seal' | 'PreRuntime' | 'RuntimeEnvironmentUpdated'; } - /** @name PolkadotCorePrimitivesOutboundHrmpMessage (26) */ - export interface PolkadotCorePrimitivesOutboundHrmpMessage extends Struct { - readonly recipient: u32; - readonly data: Bytes; + /** @name FrameSystemEventRecord (17) */ + interface FrameSystemEventRecord extends Struct { + readonly phase: FrameSystemPhase; + readonly event: Event; + readonly topics: Vec; } - /** @name CumulusPalletParachainSystemCall (28) */ - export interface CumulusPalletParachainSystemCall extends Enum { - readonly isSetValidationData: boolean; - readonly asSetValidationData: { - readonly data: CumulusPrimitivesParachainInherentParachainInherentData; + /** @name FrameSystemEvent (19) */ + interface FrameSystemEvent extends Enum { + readonly isExtrinsicSuccess: boolean; + readonly asExtrinsicSuccess: { + readonly dispatchInfo: FrameSupportDispatchDispatchInfo; } & Struct; - readonly isSudoSendUpwardMessage: boolean; - readonly asSudoSendUpwardMessage: { - readonly message: Bytes; + readonly isExtrinsicFailed: boolean; + readonly asExtrinsicFailed: { + readonly dispatchError: SpRuntimeDispatchError; + readonly dispatchInfo: FrameSupportDispatchDispatchInfo; } & Struct; - readonly isAuthorizeUpgrade: boolean; - readonly asAuthorizeUpgrade: { - readonly codeHash: H256; + readonly isCodeUpdated: boolean; + readonly isNewAccount: boolean; + readonly asNewAccount: { + readonly account: AccountId32; } & Struct; - readonly isEnactAuthorizedUpgrade: boolean; - readonly asEnactAuthorizedUpgrade: { - readonly code: Bytes; + readonly isKilledAccount: boolean; + readonly asKilledAccount: { + readonly account: AccountId32; } & Struct; - readonly type: 'SetValidationData' | 'SudoSendUpwardMessage' | 'AuthorizeUpgrade' | 'EnactAuthorizedUpgrade'; + readonly isRemarked: boolean; + readonly asRemarked: { + readonly sender: AccountId32; + readonly hash_: H256; + } & Struct; + readonly type: 'ExtrinsicSuccess' | 'ExtrinsicFailed' | 'CodeUpdated' | 'NewAccount' | 'KilledAccount' | 'Remarked'; } - /** @name CumulusPrimitivesParachainInherentParachainInherentData (29) */ - export interface CumulusPrimitivesParachainInherentParachainInherentData extends Struct { - readonly validationData: PolkadotPrimitivesV2PersistedValidationData; - readonly relayChainState: SpTrieStorageProof; - readonly downwardMessages: Vec; - readonly horizontalMessages: BTreeMap>; + /** @name FrameSupportDispatchDispatchInfo (20) */ + interface FrameSupportDispatchDispatchInfo extends Struct { + readonly weight: Weight; + readonly class: FrameSupportDispatchDispatchClass; + readonly paysFee: FrameSupportDispatchPays; } - /** @name PolkadotCorePrimitivesInboundDownwardMessage (31) */ - export interface PolkadotCorePrimitivesInboundDownwardMessage extends Struct { - readonly sentAt: u32; - readonly msg: Bytes; + /** @name FrameSupportDispatchDispatchClass (21) */ + interface FrameSupportDispatchDispatchClass extends Enum { + readonly isNormal: boolean; + readonly isOperational: boolean; + readonly isMandatory: boolean; + readonly type: 'Normal' | 'Operational' | 'Mandatory'; } - /** @name PolkadotCorePrimitivesInboundHrmpMessage (34) */ - export interface PolkadotCorePrimitivesInboundHrmpMessage extends Struct { - readonly sentAt: u32; - readonly data: Bytes; + /** @name FrameSupportDispatchPays (22) */ + interface FrameSupportDispatchPays extends Enum { + readonly isYes: boolean; + readonly isNo: boolean; + readonly type: 'Yes' | 'No'; + } + + /** @name SpRuntimeDispatchError (23) */ + interface SpRuntimeDispatchError extends Enum { + readonly isOther: boolean; + readonly isCannotLookup: boolean; + readonly isBadOrigin: boolean; + readonly isModule: boolean; + readonly asModule: SpRuntimeModuleError; + readonly isConsumerRemaining: boolean; + readonly isNoProviders: boolean; + readonly isTooManyConsumers: boolean; + readonly isToken: boolean; + readonly asToken: SpRuntimeTokenError; + readonly isArithmetic: boolean; + readonly asArithmetic: SpRuntimeArithmeticError; + readonly isTransactional: boolean; + readonly asTransactional: SpRuntimeTransactionalError; + readonly type: 'Other' | 'CannotLookup' | 'BadOrigin' | 'Module' | 'ConsumerRemaining' | 'NoProviders' | 'TooManyConsumers' | 'Token' | 'Arithmetic' | 'Transactional'; + } + + /** @name SpRuntimeModuleError (24) */ + interface SpRuntimeModuleError extends Struct { + readonly index: u8; + readonly error: U8aFixed; + } + + /** @name SpRuntimeTokenError (25) */ + interface SpRuntimeTokenError extends Enum { + readonly isNoFunds: boolean; + readonly isWouldDie: boolean; + readonly isBelowMinimum: boolean; + readonly isCannotCreate: boolean; + readonly isUnknownAsset: boolean; + readonly isFrozen: boolean; + readonly isUnsupported: boolean; + readonly type: 'NoFunds' | 'WouldDie' | 'BelowMinimum' | 'CannotCreate' | 'UnknownAsset' | 'Frozen' | 'Unsupported'; + } + + /** @name SpRuntimeArithmeticError (26) */ + interface SpRuntimeArithmeticError extends Enum { + readonly isUnderflow: boolean; + readonly isOverflow: boolean; + readonly isDivisionByZero: boolean; + readonly type: 'Underflow' | 'Overflow' | 'DivisionByZero'; + } + + /** @name SpRuntimeTransactionalError (27) */ + interface SpRuntimeTransactionalError extends Enum { + readonly isLimitReached: boolean; + readonly isNoLayer: boolean; + readonly type: 'LimitReached' | 'NoLayer'; } - /** @name CumulusPalletParachainSystemEvent (37) */ - export interface CumulusPalletParachainSystemEvent extends Enum { + /** @name CumulusPalletParachainSystemEvent (28) */ + interface CumulusPalletParachainSystemEvent extends Enum { readonly isValidationFunctionStored: boolean; readonly isValidationFunctionApplied: boolean; readonly asValidationFunctionApplied: { @@ -122,100 +181,14 @@ declare module '@polkadot/types/lookup' { } & Struct; readonly isDownwardMessagesProcessed: boolean; readonly asDownwardMessagesProcessed: { - readonly weightUsed: u64; + readonly weightUsed: Weight; readonly dmqHead: H256; } & Struct; readonly type: 'ValidationFunctionStored' | 'ValidationFunctionApplied' | 'ValidationFunctionDiscarded' | 'UpgradeAuthorized' | 'DownwardMessagesReceived' | 'DownwardMessagesProcessed'; } - /** @name CumulusPalletParachainSystemError (38) */ - export interface CumulusPalletParachainSystemError extends Enum { - readonly isOverlappingUpgrades: boolean; - readonly isProhibitedByPolkadot: boolean; - readonly isTooBig: boolean; - readonly isValidationDataNotAvailable: boolean; - readonly isHostConfigurationNotAvailable: boolean; - readonly isNotScheduled: boolean; - readonly isNothingAuthorized: boolean; - readonly isUnauthorized: boolean; - readonly type: 'OverlappingUpgrades' | 'ProhibitedByPolkadot' | 'TooBig' | 'ValidationDataNotAvailable' | 'HostConfigurationNotAvailable' | 'NotScheduled' | 'NothingAuthorized' | 'Unauthorized'; - } - - /** @name PalletBalancesAccountData (41) */ - export interface PalletBalancesAccountData extends Struct { - readonly free: u128; - readonly reserved: u128; - readonly miscFrozen: u128; - readonly feeFrozen: u128; - } - - /** @name PalletBalancesBalanceLock (43) */ - export interface PalletBalancesBalanceLock extends Struct { - readonly id: U8aFixed; - readonly amount: u128; - readonly reasons: PalletBalancesReasons; - } - - /** @name PalletBalancesReasons (45) */ - export interface PalletBalancesReasons extends Enum { - readonly isFee: boolean; - readonly isMisc: boolean; - readonly isAll: boolean; - readonly type: 'Fee' | 'Misc' | 'All'; - } - - /** @name PalletBalancesReserveData (48) */ - export interface PalletBalancesReserveData extends Struct { - readonly id: U8aFixed; - readonly amount: u128; - } - - /** @name PalletBalancesReleases (51) */ - export interface PalletBalancesReleases extends Enum { - readonly isV100: boolean; - readonly isV200: boolean; - readonly type: 'V100' | 'V200'; - } - - /** @name PalletBalancesCall (52) */ - export interface PalletBalancesCall extends Enum { - readonly isTransfer: boolean; - readonly asTransfer: { - readonly dest: MultiAddress; - readonly value: Compact; - } & Struct; - readonly isSetBalance: boolean; - readonly asSetBalance: { - readonly who: MultiAddress; - readonly newFree: Compact; - readonly newReserved: Compact; - } & Struct; - readonly isForceTransfer: boolean; - readonly asForceTransfer: { - readonly source: MultiAddress; - readonly dest: MultiAddress; - readonly value: Compact; - } & Struct; - readonly isTransferKeepAlive: boolean; - readonly asTransferKeepAlive: { - readonly dest: MultiAddress; - readonly value: Compact; - } & Struct; - readonly isTransferAll: boolean; - readonly asTransferAll: { - readonly dest: MultiAddress; - readonly keepAlive: bool; - } & Struct; - readonly isForceUnreserve: boolean; - readonly asForceUnreserve: { - readonly who: MultiAddress; - readonly amount: u128; - } & Struct; - readonly type: 'Transfer' | 'SetBalance' | 'ForceTransfer' | 'TransferKeepAlive' | 'TransferAll' | 'ForceUnreserve'; - } - - /** @name PalletBalancesEvent (58) */ - export interface PalletBalancesEvent extends Enum { + /** @name PalletBalancesEvent (29) */ + interface PalletBalancesEvent extends Enum { readonly isEndowed: boolean; readonly asEndowed: { readonly account: AccountId32; @@ -273,81 +246,33 @@ declare module '@polkadot/types/lookup' { readonly type: 'Endowed' | 'DustLost' | 'Transfer' | 'BalanceSet' | 'Reserved' | 'Unreserved' | 'ReserveRepatriated' | 'Deposit' | 'Withdraw' | 'Slashed'; } - /** @name FrameSupportTokensMiscBalanceStatus (59) */ - export interface FrameSupportTokensMiscBalanceStatus extends Enum { + /** @name FrameSupportTokensMiscBalanceStatus (30) */ + interface FrameSupportTokensMiscBalanceStatus extends Enum { readonly isFree: boolean; readonly isReserved: boolean; readonly type: 'Free' | 'Reserved'; } - /** @name PalletBalancesError (60) */ - export interface PalletBalancesError extends Enum { - readonly isVestingBalance: boolean; - readonly isLiquidityRestrictions: boolean; - readonly isInsufficientBalance: boolean; - readonly isExistentialDeposit: boolean; - readonly isKeepAlive: boolean; - readonly isExistingVestingSchedule: boolean; - readonly isDeadAccount: boolean; - readonly isTooManyReserves: boolean; - readonly type: 'VestingBalance' | 'LiquidityRestrictions' | 'InsufficientBalance' | 'ExistentialDeposit' | 'KeepAlive' | 'ExistingVestingSchedule' | 'DeadAccount' | 'TooManyReserves'; - } - - /** @name PalletTimestampCall (63) */ - export interface PalletTimestampCall extends Enum { - readonly isSet: boolean; - readonly asSet: { - readonly now: Compact; + /** @name PalletTransactionPaymentEvent (31) */ + interface PalletTransactionPaymentEvent extends Enum { + readonly isTransactionFeePaid: boolean; + readonly asTransactionFeePaid: { + readonly who: AccountId32; + readonly actualFee: u128; + readonly tip: u128; } & Struct; - readonly type: 'Set'; - } - - /** @name PalletTransactionPaymentReleases (66) */ - export interface PalletTransactionPaymentReleases extends Enum { - readonly isV1Ancient: boolean; - readonly isV2: boolean; - readonly type: 'V1Ancient' | 'V2'; - } - - /** @name PalletTreasuryProposal (67) */ - export interface PalletTreasuryProposal extends Struct { - readonly proposer: AccountId32; - readonly value: u128; - readonly beneficiary: AccountId32; - readonly bond: u128; + readonly type: 'TransactionFeePaid'; } - /** @name PalletTreasuryCall (70) */ - export interface PalletTreasuryCall extends Enum { - readonly isProposeSpend: boolean; - readonly asProposeSpend: { - readonly value: Compact; - readonly beneficiary: MultiAddress; + /** @name PalletTreasuryEvent (32) */ + interface PalletTreasuryEvent extends Enum { + readonly isProposed: boolean; + readonly asProposed: { + readonly proposalIndex: u32; } & Struct; - readonly isRejectProposal: boolean; - readonly asRejectProposal: { - readonly proposalId: Compact; - } & Struct; - readonly isApproveProposal: boolean; - readonly asApproveProposal: { - readonly proposalId: Compact; - } & Struct; - readonly isRemoveApproval: boolean; - readonly asRemoveApproval: { - readonly proposalId: Compact; - } & Struct; - readonly type: 'ProposeSpend' | 'RejectProposal' | 'ApproveProposal' | 'RemoveApproval'; - } - - /** @name PalletTreasuryEvent (72) */ - export interface PalletTreasuryEvent extends Enum { - readonly isProposed: boolean; - readonly asProposed: { - readonly proposalIndex: u32; - } & Struct; - readonly isSpending: boolean; - readonly asSpending: { - readonly budgetRemaining: u128; + readonly isSpending: boolean; + readonly asSpending: { + readonly budgetRemaining: u128; } & Struct; readonly isAwarded: boolean; readonly asAwarded: { @@ -372,246 +297,120 @@ declare module '@polkadot/types/lookup' { readonly asDeposit: { readonly value: u128; } & Struct; - readonly type: 'Proposed' | 'Spending' | 'Awarded' | 'Rejected' | 'Burnt' | 'Rollover' | 'Deposit'; - } - - /** @name FrameSupportPalletId (75) */ - export interface FrameSupportPalletId extends U8aFixed {} - - /** @name PalletTreasuryError (76) */ - export interface PalletTreasuryError extends Enum { - readonly isInsufficientProposersBalance: boolean; - readonly isInvalidIndex: boolean; - readonly isTooManyApprovals: boolean; - readonly isProposalNotApproved: boolean; - readonly type: 'InsufficientProposersBalance' | 'InvalidIndex' | 'TooManyApprovals' | 'ProposalNotApproved'; - } - - /** @name PalletSudoCall (77) */ - export interface PalletSudoCall extends Enum { - readonly isSudo: boolean; - readonly asSudo: { - readonly call: Call; - } & Struct; - readonly isSudoUncheckedWeight: boolean; - readonly asSudoUncheckedWeight: { - readonly call: Call; - readonly weight: u64; - } & Struct; - readonly isSetKey: boolean; - readonly asSetKey: { - readonly new_: MultiAddress; - } & Struct; - readonly isSudoAs: boolean; - readonly asSudoAs: { - readonly who: MultiAddress; - readonly call: Call; + readonly isSpendApproved: boolean; + readonly asSpendApproved: { + readonly proposalIndex: u32; + readonly amount: u128; + readonly beneficiary: AccountId32; } & Struct; - readonly type: 'Sudo' | 'SudoUncheckedWeight' | 'SetKey' | 'SudoAs'; + readonly type: 'Proposed' | 'Spending' | 'Awarded' | 'Rejected' | 'Burnt' | 'Rollover' | 'Deposit' | 'SpendApproved'; } - /** @name FrameSystemCall (79) */ - export interface FrameSystemCall extends Enum { - readonly isFillBlock: boolean; - readonly asFillBlock: { - readonly ratio: Perbill; - } & Struct; - readonly isRemark: boolean; - readonly asRemark: { - readonly remark: Bytes; - } & Struct; - readonly isSetHeapPages: boolean; - readonly asSetHeapPages: { - readonly pages: u64; - } & Struct; - readonly isSetCode: boolean; - readonly asSetCode: { - readonly code: Bytes; - } & Struct; - readonly isSetCodeWithoutChecks: boolean; - readonly asSetCodeWithoutChecks: { - readonly code: Bytes; - } & Struct; - readonly isSetStorage: boolean; - readonly asSetStorage: { - readonly items: Vec>; - } & Struct; - readonly isKillStorage: boolean; - readonly asKillStorage: { - readonly keys_: Vec; + /** @name PalletSudoEvent (33) */ + interface PalletSudoEvent extends Enum { + readonly isSudid: boolean; + readonly asSudid: { + readonly sudoResult: Result; } & Struct; - readonly isKillPrefix: boolean; - readonly asKillPrefix: { - readonly prefix: Bytes; - readonly subkeys: u32; + readonly isKeyChanged: boolean; + readonly asKeyChanged: { + readonly oldSudoer: Option; } & Struct; - readonly isRemarkWithEvent: boolean; - readonly asRemarkWithEvent: { - readonly remark: Bytes; + readonly isSudoAsDone: boolean; + readonly asSudoAsDone: { + readonly sudoResult: Result; } & Struct; - readonly type: 'FillBlock' | 'Remark' | 'SetHeapPages' | 'SetCode' | 'SetCodeWithoutChecks' | 'SetStorage' | 'KillStorage' | 'KillPrefix' | 'RemarkWithEvent'; + readonly type: 'Sudid' | 'KeyChanged' | 'SudoAsDone'; } - /** @name OrmlVestingModuleCall (83) */ - export interface OrmlVestingModuleCall extends Enum { - readonly isClaim: boolean; - readonly isVestedTransfer: boolean; - readonly asVestedTransfer: { - readonly dest: MultiAddress; - readonly schedule: OrmlVestingVestingSchedule; + /** @name OrmlVestingModuleEvent (37) */ + interface OrmlVestingModuleEvent extends Enum { + readonly isVestingScheduleAdded: boolean; + readonly asVestingScheduleAdded: { + readonly from: AccountId32; + readonly to: AccountId32; + readonly vestingSchedule: OrmlVestingVestingSchedule; } & Struct; - readonly isUpdateVestingSchedules: boolean; - readonly asUpdateVestingSchedules: { - readonly who: MultiAddress; - readonly vestingSchedules: Vec; + readonly isClaimed: boolean; + readonly asClaimed: { + readonly who: AccountId32; + readonly amount: u128; } & Struct; - readonly isClaimFor: boolean; - readonly asClaimFor: { - readonly dest: MultiAddress; + readonly isVestingSchedulesUpdated: boolean; + readonly asVestingSchedulesUpdated: { + readonly who: AccountId32; } & Struct; - readonly type: 'Claim' | 'VestedTransfer' | 'UpdateVestingSchedules' | 'ClaimFor'; + readonly type: 'VestingScheduleAdded' | 'Claimed' | 'VestingSchedulesUpdated'; } - /** @name OrmlVestingVestingSchedule (84) */ - export interface OrmlVestingVestingSchedule extends Struct { + /** @name OrmlVestingVestingSchedule (38) */ + interface OrmlVestingVestingSchedule extends Struct { readonly start: u32; readonly period: u32; readonly periodCount: u32; readonly perPeriod: Compact; } - /** @name CumulusPalletXcmpQueueCall (86) */ - export interface CumulusPalletXcmpQueueCall extends Enum { - readonly isServiceOverweight: boolean; - readonly asServiceOverweight: { - readonly index: u64; - readonly weightLimit: u64; - } & Struct; - readonly isSuspendXcmExecution: boolean; - readonly isResumeXcmExecution: boolean; - readonly isUpdateSuspendThreshold: boolean; - readonly asUpdateSuspendThreshold: { - readonly new_: u32; - } & Struct; - readonly isUpdateDropThreshold: boolean; - readonly asUpdateDropThreshold: { - readonly new_: u32; - } & Struct; - readonly isUpdateResumeThreshold: boolean; - readonly asUpdateResumeThreshold: { - readonly new_: u32; - } & Struct; - readonly isUpdateThresholdWeight: boolean; - readonly asUpdateThresholdWeight: { - readonly new_: u64; - } & Struct; - readonly isUpdateWeightRestrictDecay: boolean; - readonly asUpdateWeightRestrictDecay: { - readonly new_: u64; - } & Struct; - readonly isUpdateXcmpMaxIndividualWeight: boolean; - readonly asUpdateXcmpMaxIndividualWeight: { - readonly new_: u64; + /** @name OrmlXtokensModuleEvent (40) */ + interface OrmlXtokensModuleEvent extends Enum { + readonly isTransferredMultiAssets: boolean; + readonly asTransferredMultiAssets: { + readonly sender: AccountId32; + readonly assets: XcmV1MultiassetMultiAssets; + readonly fee: XcmV1MultiAsset; + readonly dest: XcmV1MultiLocation; } & Struct; - readonly type: 'ServiceOverweight' | 'SuspendXcmExecution' | 'ResumeXcmExecution' | 'UpdateSuspendThreshold' | 'UpdateDropThreshold' | 'UpdateResumeThreshold' | 'UpdateThresholdWeight' | 'UpdateWeightRestrictDecay' | 'UpdateXcmpMaxIndividualWeight'; + readonly type: 'TransferredMultiAssets'; } - /** @name PalletXcmCall (87) */ - export interface PalletXcmCall extends Enum { - readonly isSend: boolean; - readonly asSend: { - readonly dest: XcmVersionedMultiLocation; - readonly message: XcmVersionedXcm; - } & Struct; - readonly isTeleportAssets: boolean; - readonly asTeleportAssets: { - readonly dest: XcmVersionedMultiLocation; - readonly beneficiary: XcmVersionedMultiLocation; - readonly assets: XcmVersionedMultiAssets; - readonly feeAssetItem: u32; - } & Struct; - readonly isReserveTransferAssets: boolean; - readonly asReserveTransferAssets: { - readonly dest: XcmVersionedMultiLocation; - readonly beneficiary: XcmVersionedMultiLocation; - readonly assets: XcmVersionedMultiAssets; - readonly feeAssetItem: u32; - } & Struct; - readonly isExecute: boolean; - readonly asExecute: { - readonly message: XcmVersionedXcm; - readonly maxWeight: u64; - } & Struct; - readonly isForceXcmVersion: boolean; - readonly asForceXcmVersion: { - readonly location: XcmV1MultiLocation; - readonly xcmVersion: u32; - } & Struct; - readonly isForceDefaultXcmVersion: boolean; - readonly asForceDefaultXcmVersion: { - readonly maybeXcmVersion: Option; - } & Struct; - readonly isForceSubscribeVersionNotify: boolean; - readonly asForceSubscribeVersionNotify: { - readonly location: XcmVersionedMultiLocation; - } & Struct; - readonly isForceUnsubscribeVersionNotify: boolean; - readonly asForceUnsubscribeVersionNotify: { - readonly location: XcmVersionedMultiLocation; - } & Struct; - readonly isLimitedReserveTransferAssets: boolean; - readonly asLimitedReserveTransferAssets: { - readonly dest: XcmVersionedMultiLocation; - readonly beneficiary: XcmVersionedMultiLocation; - readonly assets: XcmVersionedMultiAssets; - readonly feeAssetItem: u32; - readonly weightLimit: XcmV2WeightLimit; - } & Struct; - readonly isLimitedTeleportAssets: boolean; - readonly asLimitedTeleportAssets: { - readonly dest: XcmVersionedMultiLocation; - readonly beneficiary: XcmVersionedMultiLocation; - readonly assets: XcmVersionedMultiAssets; - readonly feeAssetItem: u32; - readonly weightLimit: XcmV2WeightLimit; - } & Struct; - readonly type: 'Send' | 'TeleportAssets' | 'ReserveTransferAssets' | 'Execute' | 'ForceXcmVersion' | 'ForceDefaultXcmVersion' | 'ForceSubscribeVersionNotify' | 'ForceUnsubscribeVersionNotify' | 'LimitedReserveTransferAssets' | 'LimitedTeleportAssets'; + /** @name XcmV1MultiassetMultiAssets (41) */ + interface XcmV1MultiassetMultiAssets extends Vec {} + + /** @name XcmV1MultiAsset (43) */ + interface XcmV1MultiAsset extends Struct { + readonly id: XcmV1MultiassetAssetId; + readonly fun: XcmV1MultiassetFungibility; } - /** @name XcmVersionedMultiLocation (88) */ - export interface XcmVersionedMultiLocation extends Enum { - readonly isV0: boolean; - readonly asV0: XcmV0MultiLocation; - readonly isV1: boolean; - readonly asV1: XcmV1MultiLocation; - readonly type: 'V0' | 'V1'; + /** @name XcmV1MultiassetAssetId (44) */ + interface XcmV1MultiassetAssetId extends Enum { + readonly isConcrete: boolean; + readonly asConcrete: XcmV1MultiLocation; + readonly isAbstract: boolean; + readonly asAbstract: Bytes; + readonly type: 'Concrete' | 'Abstract'; } - /** @name XcmV0MultiLocation (89) */ - export interface XcmV0MultiLocation extends Enum { - readonly isNull: boolean; + /** @name XcmV1MultiLocation (45) */ + interface XcmV1MultiLocation extends Struct { + readonly parents: u8; + readonly interior: XcmV1MultilocationJunctions; + } + + /** @name XcmV1MultilocationJunctions (46) */ + interface XcmV1MultilocationJunctions extends Enum { + readonly isHere: boolean; readonly isX1: boolean; - readonly asX1: XcmV0Junction; + readonly asX1: XcmV1Junction; readonly isX2: boolean; - readonly asX2: ITuple<[XcmV0Junction, XcmV0Junction]>; + readonly asX2: ITuple<[XcmV1Junction, XcmV1Junction]>; readonly isX3: boolean; - readonly asX3: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly asX3: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction]>; readonly isX4: boolean; - readonly asX4: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly asX4: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; readonly isX5: boolean; - readonly asX5: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly asX5: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; readonly isX6: boolean; - readonly asX6: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly asX6: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; readonly isX7: boolean; - readonly asX7: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly asX7: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; readonly isX8: boolean; - readonly asX8: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; - readonly type: 'Null' | 'X1' | 'X2' | 'X3' | 'X4' | 'X5' | 'X6' | 'X7' | 'X8'; + readonly asX8: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; + readonly type: 'Here' | 'X1' | 'X2' | 'X3' | 'X4' | 'X5' | 'X6' | 'X7' | 'X8'; } - /** @name XcmV0Junction (90) */ - export interface XcmV0Junction extends Enum { - readonly isParent: boolean; + /** @name XcmV1Junction (47) */ + interface XcmV1Junction extends Enum { readonly isParachain: boolean; readonly asParachain: Compact; readonly isAccountId32: boolean; @@ -641,11 +440,11 @@ declare module '@polkadot/types/lookup' { readonly id: XcmV0JunctionBodyId; readonly part: XcmV0JunctionBodyPart; } & Struct; - readonly type: 'Parent' | 'Parachain' | 'AccountId32' | 'AccountIndex64' | 'AccountKey20' | 'PalletInstance' | 'GeneralIndex' | 'GeneralKey' | 'OnlyChild' | 'Plurality'; + readonly type: 'Parachain' | 'AccountId32' | 'AccountIndex64' | 'AccountKey20' | 'PalletInstance' | 'GeneralIndex' | 'GeneralKey' | 'OnlyChild' | 'Plurality'; } - /** @name XcmV0JunctionNetworkId (91) */ - export interface XcmV0JunctionNetworkId extends Enum { + /** @name XcmV0JunctionNetworkId (49) */ + interface XcmV0JunctionNetworkId extends Enum { readonly isAny: boolean; readonly isNamed: boolean; readonly asNamed: Bytes; @@ -654,8 +453,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'Any' | 'Named' | 'Polkadot' | 'Kusama'; } - /** @name XcmV0JunctionBodyId (92) */ - export interface XcmV0JunctionBodyId extends Enum { + /** @name XcmV0JunctionBodyId (53) */ + interface XcmV0JunctionBodyId extends Enum { readonly isUnit: boolean; readonly isNamed: boolean; readonly asNamed: Bytes; @@ -668,8 +467,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'Unit' | 'Named' | 'Index' | 'Executive' | 'Technical' | 'Legislative' | 'Judicial'; } - /** @name XcmV0JunctionBodyPart (93) */ - export interface XcmV0JunctionBodyPart extends Enum { + /** @name XcmV0JunctionBodyPart (54) */ + interface XcmV0JunctionBodyPart extends Enum { readonly isVoice: boolean; readonly isMembers: boolean; readonly asMembers: { @@ -693,116 +492,294 @@ declare module '@polkadot/types/lookup' { readonly type: 'Voice' | 'Members' | 'Fraction' | 'AtLeastProportion' | 'MoreThanProportion'; } - /** @name XcmV1MultiLocation (94) */ - export interface XcmV1MultiLocation extends Struct { - readonly parents: u8; - readonly interior: XcmV1MultilocationJunctions; + /** @name XcmV1MultiassetFungibility (55) */ + interface XcmV1MultiassetFungibility extends Enum { + readonly isFungible: boolean; + readonly asFungible: Compact; + readonly isNonFungible: boolean; + readonly asNonFungible: XcmV1MultiassetAssetInstance; + readonly type: 'Fungible' | 'NonFungible'; } - /** @name XcmV1MultilocationJunctions (95) */ - export interface XcmV1MultilocationJunctions extends Enum { - readonly isHere: boolean; - readonly isX1: boolean; - readonly asX1: XcmV1Junction; - readonly isX2: boolean; - readonly asX2: ITuple<[XcmV1Junction, XcmV1Junction]>; - readonly isX3: boolean; - readonly asX3: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly isX4: boolean; - readonly asX4: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly isX5: boolean; - readonly asX5: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly isX6: boolean; - readonly asX6: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly isX7: boolean; - readonly asX7: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly isX8: boolean; - readonly asX8: ITuple<[XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction, XcmV1Junction]>; - readonly type: 'Here' | 'X1' | 'X2' | 'X3' | 'X4' | 'X5' | 'X6' | 'X7' | 'X8'; + /** @name XcmV1MultiassetAssetInstance (56) */ + interface XcmV1MultiassetAssetInstance extends Enum { + readonly isUndefined: boolean; + readonly isIndex: boolean; + readonly asIndex: Compact; + readonly isArray4: boolean; + readonly asArray4: U8aFixed; + readonly isArray8: boolean; + readonly asArray8: U8aFixed; + readonly isArray16: boolean; + readonly asArray16: U8aFixed; + readonly isArray32: boolean; + readonly asArray32: U8aFixed; + readonly isBlob: boolean; + readonly asBlob: Bytes; + readonly type: 'Undefined' | 'Index' | 'Array4' | 'Array8' | 'Array16' | 'Array32' | 'Blob'; } - /** @name XcmV1Junction (96) */ - export interface XcmV1Junction extends Enum { - readonly isParachain: boolean; - readonly asParachain: Compact; - readonly isAccountId32: boolean; - readonly asAccountId32: { - readonly network: XcmV0JunctionNetworkId; - readonly id: U8aFixed; + /** @name OrmlTokensModuleEvent (59) */ + interface OrmlTokensModuleEvent extends Enum { + readonly isEndowed: boolean; + readonly asEndowed: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; } & Struct; - readonly isAccountIndex64: boolean; - readonly asAccountIndex64: { - readonly network: XcmV0JunctionNetworkId; - readonly index: Compact; + readonly isDustLost: boolean; + readonly asDustLost: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; } & Struct; - readonly isAccountKey20: boolean; - readonly asAccountKey20: { - readonly network: XcmV0JunctionNetworkId; - readonly key: U8aFixed; + readonly isTransfer: boolean; + readonly asTransfer: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly from: AccountId32; + readonly to: AccountId32; + readonly amount: u128; } & Struct; - readonly isPalletInstance: boolean; - readonly asPalletInstance: u8; - readonly isGeneralIndex: boolean; - readonly asGeneralIndex: Compact; - readonly isGeneralKey: boolean; - readonly asGeneralKey: Bytes; - readonly isOnlyChild: boolean; - readonly isPlurality: boolean; - readonly asPlurality: { - readonly id: XcmV0JunctionBodyId; - readonly part: XcmV0JunctionBodyPart; + readonly isReserved: boolean; + readonly asReserved: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; } & Struct; - readonly type: 'Parachain' | 'AccountId32' | 'AccountIndex64' | 'AccountKey20' | 'PalletInstance' | 'GeneralIndex' | 'GeneralKey' | 'OnlyChild' | 'Plurality'; + readonly isUnreserved: boolean; + readonly asUnreserved: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isReserveRepatriated: boolean; + readonly asReserveRepatriated: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly from: AccountId32; + readonly to: AccountId32; + readonly amount: u128; + readonly status: FrameSupportTokensMiscBalanceStatus; + } & Struct; + readonly isBalanceSet: boolean; + readonly asBalanceSet: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly free: u128; + readonly reserved: u128; + } & Struct; + readonly isTotalIssuanceSet: boolean; + readonly asTotalIssuanceSet: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + } & Struct; + readonly isWithdrawn: boolean; + readonly asWithdrawn: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isSlashed: boolean; + readonly asSlashed: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly freeAmount: u128; + readonly reservedAmount: u128; + } & Struct; + readonly isDeposited: boolean; + readonly asDeposited: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isLockSet: boolean; + readonly asLockSet: { + readonly lockId: U8aFixed; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + readonly amount: u128; + } & Struct; + readonly isLockRemoved: boolean; + readonly asLockRemoved: { + readonly lockId: U8aFixed; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly who: AccountId32; + } & Struct; + readonly type: 'Endowed' | 'DustLost' | 'Transfer' | 'Reserved' | 'Unreserved' | 'ReserveRepatriated' | 'BalanceSet' | 'TotalIssuanceSet' | 'Withdrawn' | 'Slashed' | 'Deposited' | 'LockSet' | 'LockRemoved'; } - /** @name XcmVersionedXcm (97) */ - export interface XcmVersionedXcm extends Enum { - readonly isV0: boolean; - readonly asV0: XcmV0Xcm; - readonly isV1: boolean; - readonly asV1: XcmV1Xcm; - readonly isV2: boolean; - readonly asV2: XcmV2Xcm; - readonly type: 'V0' | 'V1' | 'V2'; + /** @name PalletForeignAssetsAssetIds (60) */ + interface PalletForeignAssetsAssetIds extends Enum { + readonly isForeignAssetId: boolean; + readonly asForeignAssetId: u32; + readonly isNativeAssetId: boolean; + readonly asNativeAssetId: PalletForeignAssetsNativeCurrency; + readonly type: 'ForeignAssetId' | 'NativeAssetId'; } - /** @name XcmV0Xcm (98) */ - export interface XcmV0Xcm extends Enum { - readonly isWithdrawAsset: boolean; - readonly asWithdrawAsset: { - readonly assets: Vec; - readonly effects: Vec; + /** @name PalletForeignAssetsNativeCurrency (61) */ + interface PalletForeignAssetsNativeCurrency extends Enum { + readonly isHere: boolean; + readonly isParent: boolean; + readonly type: 'Here' | 'Parent'; + } + + /** @name CumulusPalletXcmpQueueEvent (62) */ + interface CumulusPalletXcmpQueueEvent extends Enum { + readonly isSuccess: boolean; + readonly asSuccess: { + readonly messageHash: Option; + readonly weight: Weight; } & Struct; - readonly isReserveAssetDeposit: boolean; - readonly asReserveAssetDeposit: { - readonly assets: Vec; - readonly effects: Vec; + readonly isFail: boolean; + readonly asFail: { + readonly messageHash: Option; + readonly error: XcmV2TraitsError; + readonly weight: Weight; } & Struct; - readonly isTeleportAsset: boolean; - readonly asTeleportAsset: { - readonly assets: Vec; - readonly effects: Vec; + readonly isBadVersion: boolean; + readonly asBadVersion: { + readonly messageHash: Option; + } & Struct; + readonly isBadFormat: boolean; + readonly asBadFormat: { + readonly messageHash: Option; + } & Struct; + readonly isUpwardMessageSent: boolean; + readonly asUpwardMessageSent: { + readonly messageHash: Option; + } & Struct; + readonly isXcmpMessageSent: boolean; + readonly asXcmpMessageSent: { + readonly messageHash: Option; + } & Struct; + readonly isOverweightEnqueued: boolean; + readonly asOverweightEnqueued: { + readonly sender: u32; + readonly sentAt: u32; + readonly index: u64; + readonly required: Weight; + } & Struct; + readonly isOverweightServiced: boolean; + readonly asOverweightServiced: { + readonly index: u64; + readonly used: Weight; } & Struct; + readonly type: 'Success' | 'Fail' | 'BadVersion' | 'BadFormat' | 'UpwardMessageSent' | 'XcmpMessageSent' | 'OverweightEnqueued' | 'OverweightServiced'; + } + + /** @name XcmV2TraitsError (64) */ + interface XcmV2TraitsError extends Enum { + readonly isOverflow: boolean; + readonly isUnimplemented: boolean; + readonly isUntrustedReserveLocation: boolean; + readonly isUntrustedTeleportLocation: boolean; + readonly isMultiLocationFull: boolean; + readonly isMultiLocationNotInvertible: boolean; + readonly isBadOrigin: boolean; + readonly isInvalidLocation: boolean; + readonly isAssetNotFound: boolean; + readonly isFailedToTransactAsset: boolean; + readonly isNotWithdrawable: boolean; + readonly isLocationCannotHold: boolean; + readonly isExceedsMaxMessageSize: boolean; + readonly isDestinationUnsupported: boolean; + readonly isTransport: boolean; + readonly isUnroutable: boolean; + readonly isUnknownClaim: boolean; + readonly isFailedToDecode: boolean; + readonly isMaxWeightInvalid: boolean; + readonly isNotHoldingFees: boolean; + readonly isTooExpensive: boolean; + readonly isTrap: boolean; + readonly asTrap: u64; + readonly isUnhandledXcmVersion: boolean; + readonly isWeightLimitReached: boolean; + readonly asWeightLimitReached: u64; + readonly isBarrier: boolean; + readonly isWeightNotComputable: boolean; + readonly type: 'Overflow' | 'Unimplemented' | 'UntrustedReserveLocation' | 'UntrustedTeleportLocation' | 'MultiLocationFull' | 'MultiLocationNotInvertible' | 'BadOrigin' | 'InvalidLocation' | 'AssetNotFound' | 'FailedToTransactAsset' | 'NotWithdrawable' | 'LocationCannotHold' | 'ExceedsMaxMessageSize' | 'DestinationUnsupported' | 'Transport' | 'Unroutable' | 'UnknownClaim' | 'FailedToDecode' | 'MaxWeightInvalid' | 'NotHoldingFees' | 'TooExpensive' | 'Trap' | 'UnhandledXcmVersion' | 'WeightLimitReached' | 'Barrier' | 'WeightNotComputable'; + } + + /** @name PalletXcmEvent (66) */ + interface PalletXcmEvent extends Enum { + readonly isAttempted: boolean; + readonly asAttempted: XcmV2TraitsOutcome; + readonly isSent: boolean; + readonly asSent: ITuple<[XcmV1MultiLocation, XcmV1MultiLocation, XcmV2Xcm]>; + readonly isUnexpectedResponse: boolean; + readonly asUnexpectedResponse: ITuple<[XcmV1MultiLocation, u64]>; + readonly isResponseReady: boolean; + readonly asResponseReady: ITuple<[u64, XcmV2Response]>; + readonly isNotified: boolean; + readonly asNotified: ITuple<[u64, u8, u8]>; + readonly isNotifyOverweight: boolean; + readonly asNotifyOverweight: ITuple<[u64, u8, u8, Weight, Weight]>; + readonly isNotifyDispatchError: boolean; + readonly asNotifyDispatchError: ITuple<[u64, u8, u8]>; + readonly isNotifyDecodeFailed: boolean; + readonly asNotifyDecodeFailed: ITuple<[u64, u8, u8]>; + readonly isInvalidResponder: boolean; + readonly asInvalidResponder: ITuple<[XcmV1MultiLocation, u64, Option]>; + readonly isInvalidResponderVersion: boolean; + readonly asInvalidResponderVersion: ITuple<[XcmV1MultiLocation, u64]>; + readonly isResponseTaken: boolean; + readonly asResponseTaken: u64; + readonly isAssetsTrapped: boolean; + readonly asAssetsTrapped: ITuple<[H256, XcmV1MultiLocation, XcmVersionedMultiAssets]>; + readonly isVersionChangeNotified: boolean; + readonly asVersionChangeNotified: ITuple<[XcmV1MultiLocation, u32]>; + readonly isSupportedVersionChanged: boolean; + readonly asSupportedVersionChanged: ITuple<[XcmV1MultiLocation, u32]>; + readonly isNotifyTargetSendFail: boolean; + readonly asNotifyTargetSendFail: ITuple<[XcmV1MultiLocation, u64, XcmV2TraitsError]>; + readonly isNotifyTargetMigrationFail: boolean; + readonly asNotifyTargetMigrationFail: ITuple<[XcmVersionedMultiLocation, u64]>; + readonly type: 'Attempted' | 'Sent' | 'UnexpectedResponse' | 'ResponseReady' | 'Notified' | 'NotifyOverweight' | 'NotifyDispatchError' | 'NotifyDecodeFailed' | 'InvalidResponder' | 'InvalidResponderVersion' | 'ResponseTaken' | 'AssetsTrapped' | 'VersionChangeNotified' | 'SupportedVersionChanged' | 'NotifyTargetSendFail' | 'NotifyTargetMigrationFail'; + } + + /** @name XcmV2TraitsOutcome (67) */ + interface XcmV2TraitsOutcome extends Enum { + readonly isComplete: boolean; + readonly asComplete: u64; + readonly isIncomplete: boolean; + readonly asIncomplete: ITuple<[u64, XcmV2TraitsError]>; + readonly isError: boolean; + readonly asError: XcmV2TraitsError; + readonly type: 'Complete' | 'Incomplete' | 'Error'; + } + + /** @name XcmV2Xcm (68) */ + interface XcmV2Xcm extends Vec {} + + /** @name XcmV2Instruction (70) */ + interface XcmV2Instruction extends Enum { + readonly isWithdrawAsset: boolean; + readonly asWithdrawAsset: XcmV1MultiassetMultiAssets; + readonly isReserveAssetDeposited: boolean; + readonly asReserveAssetDeposited: XcmV1MultiassetMultiAssets; + readonly isReceiveTeleportedAsset: boolean; + readonly asReceiveTeleportedAsset: XcmV1MultiassetMultiAssets; readonly isQueryResponse: boolean; readonly asQueryResponse: { readonly queryId: Compact; - readonly response: XcmV0Response; + readonly response: XcmV2Response; + readonly maxWeight: Compact; } & Struct; readonly isTransferAsset: boolean; readonly asTransferAsset: { - readonly assets: Vec; - readonly dest: XcmV0MultiLocation; + readonly assets: XcmV1MultiassetMultiAssets; + readonly beneficiary: XcmV1MultiLocation; } & Struct; readonly isTransferReserveAsset: boolean; readonly asTransferReserveAsset: { - readonly assets: Vec; - readonly dest: XcmV0MultiLocation; - readonly effects: Vec; + readonly assets: XcmV1MultiassetMultiAssets; + readonly dest: XcmV1MultiLocation; + readonly xcm: XcmV2Xcm; } & Struct; readonly isTransact: boolean; readonly asTransact: { readonly originType: XcmV0OriginKind; - readonly requireWeightAtMost: u64; + readonly requireWeightAtMost: Compact; readonly call: XcmDoubleEncoded; } & Struct; readonly isHrmpNewChannelOpenRequest: boolean; @@ -821,16 +798,151 @@ declare module '@polkadot/types/lookup' { readonly sender: Compact; readonly recipient: Compact; } & Struct; - readonly isRelayedFrom: boolean; - readonly asRelayedFrom: { - readonly who: XcmV0MultiLocation; - readonly message: XcmV0Xcm; + readonly isClearOrigin: boolean; + readonly isDescendOrigin: boolean; + readonly asDescendOrigin: XcmV1MultilocationJunctions; + readonly isReportError: boolean; + readonly asReportError: { + readonly queryId: Compact; + readonly dest: XcmV1MultiLocation; + readonly maxResponseWeight: Compact; } & Struct; - readonly type: 'WithdrawAsset' | 'ReserveAssetDeposit' | 'TeleportAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'RelayedFrom'; - } + readonly isDepositAsset: boolean; + readonly asDepositAsset: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly maxAssets: Compact; + readonly beneficiary: XcmV1MultiLocation; + } & Struct; + readonly isDepositReserveAsset: boolean; + readonly asDepositReserveAsset: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly maxAssets: Compact; + readonly dest: XcmV1MultiLocation; + readonly xcm: XcmV2Xcm; + } & Struct; + readonly isExchangeAsset: boolean; + readonly asExchangeAsset: { + readonly give: XcmV1MultiassetMultiAssetFilter; + readonly receive: XcmV1MultiassetMultiAssets; + } & Struct; + readonly isInitiateReserveWithdraw: boolean; + readonly asInitiateReserveWithdraw: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly reserve: XcmV1MultiLocation; + readonly xcm: XcmV2Xcm; + } & Struct; + readonly isInitiateTeleport: boolean; + readonly asInitiateTeleport: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly dest: XcmV1MultiLocation; + readonly xcm: XcmV2Xcm; + } & Struct; + readonly isQueryHolding: boolean; + readonly asQueryHolding: { + readonly queryId: Compact; + readonly dest: XcmV1MultiLocation; + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly maxResponseWeight: Compact; + } & Struct; + readonly isBuyExecution: boolean; + readonly asBuyExecution: { + readonly fees: XcmV1MultiAsset; + readonly weightLimit: XcmV2WeightLimit; + } & Struct; + readonly isRefundSurplus: boolean; + readonly isSetErrorHandler: boolean; + readonly asSetErrorHandler: XcmV2Xcm; + readonly isSetAppendix: boolean; + readonly asSetAppendix: XcmV2Xcm; + readonly isClearError: boolean; + readonly isClaimAsset: boolean; + readonly asClaimAsset: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly ticket: XcmV1MultiLocation; + } & Struct; + readonly isTrap: boolean; + readonly asTrap: Compact; + readonly isSubscribeVersion: boolean; + readonly asSubscribeVersion: { + readonly queryId: Compact; + readonly maxResponseWeight: Compact; + } & Struct; + readonly isUnsubscribeVersion: boolean; + readonly type: 'WithdrawAsset' | 'ReserveAssetDeposited' | 'ReceiveTeleportedAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'ClearOrigin' | 'DescendOrigin' | 'ReportError' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution' | 'RefundSurplus' | 'SetErrorHandler' | 'SetAppendix' | 'ClearError' | 'ClaimAsset' | 'Trap' | 'SubscribeVersion' | 'UnsubscribeVersion'; + } + + /** @name XcmV2Response (71) */ + interface XcmV2Response extends Enum { + readonly isNull: boolean; + readonly isAssets: boolean; + readonly asAssets: XcmV1MultiassetMultiAssets; + readonly isExecutionResult: boolean; + readonly asExecutionResult: Option>; + readonly isVersion: boolean; + readonly asVersion: u32; + readonly type: 'Null' | 'Assets' | 'ExecutionResult' | 'Version'; + } + + /** @name XcmV0OriginKind (74) */ + interface XcmV0OriginKind extends Enum { + readonly isNative: boolean; + readonly isSovereignAccount: boolean; + readonly isSuperuser: boolean; + readonly isXcm: boolean; + readonly type: 'Native' | 'SovereignAccount' | 'Superuser' | 'Xcm'; + } + + /** @name XcmDoubleEncoded (75) */ + interface XcmDoubleEncoded extends Struct { + readonly encoded: Bytes; + } + + /** @name XcmV1MultiassetMultiAssetFilter (76) */ + interface XcmV1MultiassetMultiAssetFilter extends Enum { + readonly isDefinite: boolean; + readonly asDefinite: XcmV1MultiassetMultiAssets; + readonly isWild: boolean; + readonly asWild: XcmV1MultiassetWildMultiAsset; + readonly type: 'Definite' | 'Wild'; + } + + /** @name XcmV1MultiassetWildMultiAsset (77) */ + interface XcmV1MultiassetWildMultiAsset extends Enum { + readonly isAll: boolean; + readonly isAllOf: boolean; + readonly asAllOf: { + readonly id: XcmV1MultiassetAssetId; + readonly fun: XcmV1MultiassetWildFungibility; + } & Struct; + readonly type: 'All' | 'AllOf'; + } + + /** @name XcmV1MultiassetWildFungibility (78) */ + interface XcmV1MultiassetWildFungibility extends Enum { + readonly isFungible: boolean; + readonly isNonFungible: boolean; + readonly type: 'Fungible' | 'NonFungible'; + } + + /** @name XcmV2WeightLimit (79) */ + interface XcmV2WeightLimit extends Enum { + readonly isUnlimited: boolean; + readonly isLimited: boolean; + readonly asLimited: Compact; + readonly type: 'Unlimited' | 'Limited'; + } + + /** @name XcmVersionedMultiAssets (81) */ + interface XcmVersionedMultiAssets extends Enum { + readonly isV0: boolean; + readonly asV0: Vec; + readonly isV1: boolean; + readonly asV1: XcmV1MultiassetMultiAssets; + readonly type: 'V0' | 'V1'; + } - /** @name XcmV0MultiAsset (100) */ - export interface XcmV0MultiAsset extends Enum { + /** @name XcmV0MultiAsset (83) */ + interface XcmV0MultiAsset extends Enum { readonly isNone: boolean; readonly isAll: boolean; readonly isAllFungible: boolean; @@ -874,1883 +986,2199 @@ declare module '@polkadot/types/lookup' { readonly type: 'None' | 'All' | 'AllFungible' | 'AllNonFungible' | 'AllAbstractFungible' | 'AllAbstractNonFungible' | 'AllConcreteFungible' | 'AllConcreteNonFungible' | 'AbstractFungible' | 'AbstractNonFungible' | 'ConcreteFungible' | 'ConcreteNonFungible'; } - /** @name XcmV1MultiassetAssetInstance (101) */ - export interface XcmV1MultiassetAssetInstance extends Enum { - readonly isUndefined: boolean; - readonly isIndex: boolean; - readonly asIndex: Compact; - readonly isArray4: boolean; - readonly asArray4: U8aFixed; - readonly isArray8: boolean; - readonly asArray8: U8aFixed; - readonly isArray16: boolean; - readonly asArray16: U8aFixed; - readonly isArray32: boolean; - readonly asArray32: U8aFixed; - readonly isBlob: boolean; - readonly asBlob: Bytes; - readonly type: 'Undefined' | 'Index' | 'Array4' | 'Array8' | 'Array16' | 'Array32' | 'Blob'; + /** @name XcmV0MultiLocation (84) */ + interface XcmV0MultiLocation extends Enum { + readonly isNull: boolean; + readonly isX1: boolean; + readonly asX1: XcmV0Junction; + readonly isX2: boolean; + readonly asX2: ITuple<[XcmV0Junction, XcmV0Junction]>; + readonly isX3: boolean; + readonly asX3: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly isX4: boolean; + readonly asX4: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly isX5: boolean; + readonly asX5: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly isX6: boolean; + readonly asX6: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly isX7: boolean; + readonly asX7: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly isX8: boolean; + readonly asX8: ITuple<[XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction, XcmV0Junction]>; + readonly type: 'Null' | 'X1' | 'X2' | 'X3' | 'X4' | 'X5' | 'X6' | 'X7' | 'X8'; } - /** @name XcmV0Order (104) */ - export interface XcmV0Order extends Enum { - readonly isNull: boolean; - readonly isDepositAsset: boolean; - readonly asDepositAsset: { - readonly assets: Vec; - readonly dest: XcmV0MultiLocation; - } & Struct; - readonly isDepositReserveAsset: boolean; - readonly asDepositReserveAsset: { - readonly assets: Vec; - readonly dest: XcmV0MultiLocation; - readonly effects: Vec; - } & Struct; - readonly isExchangeAsset: boolean; - readonly asExchangeAsset: { - readonly give: Vec; - readonly receive: Vec; - } & Struct; - readonly isInitiateReserveWithdraw: boolean; - readonly asInitiateReserveWithdraw: { - readonly assets: Vec; - readonly reserve: XcmV0MultiLocation; - readonly effects: Vec; + /** @name XcmV0Junction (85) */ + interface XcmV0Junction extends Enum { + readonly isParent: boolean; + readonly isParachain: boolean; + readonly asParachain: Compact; + readonly isAccountId32: boolean; + readonly asAccountId32: { + readonly network: XcmV0JunctionNetworkId; + readonly id: U8aFixed; } & Struct; - readonly isInitiateTeleport: boolean; - readonly asInitiateTeleport: { - readonly assets: Vec; - readonly dest: XcmV0MultiLocation; - readonly effects: Vec; + readonly isAccountIndex64: boolean; + readonly asAccountIndex64: { + readonly network: XcmV0JunctionNetworkId; + readonly index: Compact; } & Struct; - readonly isQueryHolding: boolean; - readonly asQueryHolding: { - readonly queryId: Compact; - readonly dest: XcmV0MultiLocation; - readonly assets: Vec; + readonly isAccountKey20: boolean; + readonly asAccountKey20: { + readonly network: XcmV0JunctionNetworkId; + readonly key: U8aFixed; } & Struct; - readonly isBuyExecution: boolean; - readonly asBuyExecution: { - readonly fees: XcmV0MultiAsset; - readonly weight: u64; - readonly debt: u64; - readonly haltOnError: bool; - readonly xcm: Vec; + readonly isPalletInstance: boolean; + readonly asPalletInstance: u8; + readonly isGeneralIndex: boolean; + readonly asGeneralIndex: Compact; + readonly isGeneralKey: boolean; + readonly asGeneralKey: Bytes; + readonly isOnlyChild: boolean; + readonly isPlurality: boolean; + readonly asPlurality: { + readonly id: XcmV0JunctionBodyId; + readonly part: XcmV0JunctionBodyPart; } & Struct; - readonly type: 'Null' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution'; - } - - /** @name XcmV0Response (106) */ - export interface XcmV0Response extends Enum { - readonly isAssets: boolean; - readonly asAssets: Vec; - readonly type: 'Assets'; + readonly type: 'Parent' | 'Parachain' | 'AccountId32' | 'AccountIndex64' | 'AccountKey20' | 'PalletInstance' | 'GeneralIndex' | 'GeneralKey' | 'OnlyChild' | 'Plurality'; } - /** @name XcmV0OriginKind (107) */ - export interface XcmV0OriginKind extends Enum { - readonly isNative: boolean; - readonly isSovereignAccount: boolean; - readonly isSuperuser: boolean; - readonly isXcm: boolean; - readonly type: 'Native' | 'SovereignAccount' | 'Superuser' | 'Xcm'; + /** @name XcmVersionedMultiLocation (86) */ + interface XcmVersionedMultiLocation extends Enum { + readonly isV0: boolean; + readonly asV0: XcmV0MultiLocation; + readonly isV1: boolean; + readonly asV1: XcmV1MultiLocation; + readonly type: 'V0' | 'V1'; } - /** @name XcmDoubleEncoded (108) */ - export interface XcmDoubleEncoded extends Struct { - readonly encoded: Bytes; + /** @name CumulusPalletXcmEvent (87) */ + interface CumulusPalletXcmEvent extends Enum { + readonly isInvalidFormat: boolean; + readonly asInvalidFormat: U8aFixed; + readonly isUnsupportedVersion: boolean; + readonly asUnsupportedVersion: U8aFixed; + readonly isExecutedDownward: boolean; + readonly asExecutedDownward: ITuple<[U8aFixed, XcmV2TraitsOutcome]>; + readonly type: 'InvalidFormat' | 'UnsupportedVersion' | 'ExecutedDownward'; } - /** @name XcmV1Xcm (109) */ - export interface XcmV1Xcm extends Enum { - readonly isWithdrawAsset: boolean; - readonly asWithdrawAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly effects: Vec; + /** @name CumulusPalletDmpQueueEvent (88) */ + interface CumulusPalletDmpQueueEvent extends Enum { + readonly isInvalidFormat: boolean; + readonly asInvalidFormat: { + readonly messageId: U8aFixed; } & Struct; - readonly isReserveAssetDeposited: boolean; - readonly asReserveAssetDeposited: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly effects: Vec; + readonly isUnsupportedVersion: boolean; + readonly asUnsupportedVersion: { + readonly messageId: U8aFixed; } & Struct; - readonly isReceiveTeleportedAsset: boolean; - readonly asReceiveTeleportedAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly effects: Vec; - } & Struct; - readonly isQueryResponse: boolean; - readonly asQueryResponse: { - readonly queryId: Compact; - readonly response: XcmV1Response; - } & Struct; - readonly isTransferAsset: boolean; - readonly asTransferAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly beneficiary: XcmV1MultiLocation; - } & Struct; - readonly isTransferReserveAsset: boolean; - readonly asTransferReserveAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly dest: XcmV1MultiLocation; - readonly effects: Vec; - } & Struct; - readonly isTransact: boolean; - readonly asTransact: { - readonly originType: XcmV0OriginKind; - readonly requireWeightAtMost: u64; - readonly call: XcmDoubleEncoded; - } & Struct; - readonly isHrmpNewChannelOpenRequest: boolean; - readonly asHrmpNewChannelOpenRequest: { - readonly sender: Compact; - readonly maxMessageSize: Compact; - readonly maxCapacity: Compact; - } & Struct; - readonly isHrmpChannelAccepted: boolean; - readonly asHrmpChannelAccepted: { - readonly recipient: Compact; + readonly isExecutedDownward: boolean; + readonly asExecutedDownward: { + readonly messageId: U8aFixed; + readonly outcome: XcmV2TraitsOutcome; } & Struct; - readonly isHrmpChannelClosing: boolean; - readonly asHrmpChannelClosing: { - readonly initiator: Compact; - readonly sender: Compact; - readonly recipient: Compact; + readonly isWeightExhausted: boolean; + readonly asWeightExhausted: { + readonly messageId: U8aFixed; + readonly remainingWeight: Weight; + readonly requiredWeight: Weight; } & Struct; - readonly isRelayedFrom: boolean; - readonly asRelayedFrom: { - readonly who: XcmV1MultilocationJunctions; - readonly message: XcmV1Xcm; + readonly isOverweightEnqueued: boolean; + readonly asOverweightEnqueued: { + readonly messageId: U8aFixed; + readonly overweightIndex: u64; + readonly requiredWeight: Weight; } & Struct; - readonly isSubscribeVersion: boolean; - readonly asSubscribeVersion: { - readonly queryId: Compact; - readonly maxResponseWeight: Compact; + readonly isOverweightServiced: boolean; + readonly asOverweightServiced: { + readonly overweightIndex: u64; + readonly weightUsed: Weight; } & Struct; - readonly isUnsubscribeVersion: boolean; - readonly type: 'WithdrawAsset' | 'ReserveAssetDeposited' | 'ReceiveTeleportedAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'RelayedFrom' | 'SubscribeVersion' | 'UnsubscribeVersion'; - } - - /** @name XcmV1MultiassetMultiAssets (110) */ - export interface XcmV1MultiassetMultiAssets extends Vec {} - - /** @name XcmV1MultiAsset (112) */ - export interface XcmV1MultiAsset extends Struct { - readonly id: XcmV1MultiassetAssetId; - readonly fun: XcmV1MultiassetFungibility; + readonly type: 'InvalidFormat' | 'UnsupportedVersion' | 'ExecutedDownward' | 'WeightExhausted' | 'OverweightEnqueued' | 'OverweightServiced'; } - /** @name XcmV1MultiassetAssetId (113) */ - export interface XcmV1MultiassetAssetId extends Enum { - readonly isConcrete: boolean; - readonly asConcrete: XcmV1MultiLocation; - readonly isAbstract: boolean; - readonly asAbstract: Bytes; - readonly type: 'Concrete' | 'Abstract'; + /** @name PalletUniqueRawEvent (89) */ + interface PalletUniqueRawEvent extends Enum { + readonly isCollectionSponsorRemoved: boolean; + readonly asCollectionSponsorRemoved: u32; + readonly isCollectionAdminAdded: boolean; + readonly asCollectionAdminAdded: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; + readonly isCollectionOwnedChanged: boolean; + readonly asCollectionOwnedChanged: ITuple<[u32, AccountId32]>; + readonly isCollectionSponsorSet: boolean; + readonly asCollectionSponsorSet: ITuple<[u32, AccountId32]>; + readonly isSponsorshipConfirmed: boolean; + readonly asSponsorshipConfirmed: ITuple<[u32, AccountId32]>; + readonly isCollectionAdminRemoved: boolean; + readonly asCollectionAdminRemoved: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; + readonly isAllowListAddressRemoved: boolean; + readonly asAllowListAddressRemoved: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; + readonly isAllowListAddressAdded: boolean; + readonly asAllowListAddressAdded: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; + readonly isCollectionLimitSet: boolean; + readonly asCollectionLimitSet: u32; + readonly isCollectionPermissionSet: boolean; + readonly asCollectionPermissionSet: u32; + readonly type: 'CollectionSponsorRemoved' | 'CollectionAdminAdded' | 'CollectionOwnedChanged' | 'CollectionSponsorSet' | 'SponsorshipConfirmed' | 'CollectionAdminRemoved' | 'AllowListAddressRemoved' | 'AllowListAddressAdded' | 'CollectionLimitSet' | 'CollectionPermissionSet'; } - /** @name XcmV1MultiassetFungibility (114) */ - export interface XcmV1MultiassetFungibility extends Enum { - readonly isFungible: boolean; - readonly asFungible: Compact; - readonly isNonFungible: boolean; - readonly asNonFungible: XcmV1MultiassetAssetInstance; - readonly type: 'Fungible' | 'NonFungible'; + /** @name PalletEvmAccountBasicCrossAccountIdRepr (90) */ + interface PalletEvmAccountBasicCrossAccountIdRepr extends Enum { + readonly isSubstrate: boolean; + readonly asSubstrate: AccountId32; + readonly isEthereum: boolean; + readonly asEthereum: H160; + readonly type: 'Substrate' | 'Ethereum'; } - /** @name XcmV1Order (116) */ - export interface XcmV1Order extends Enum { - readonly isNoop: boolean; - readonly isDepositAsset: boolean; - readonly asDepositAsset: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly maxAssets: u32; - readonly beneficiary: XcmV1MultiLocation; - } & Struct; - readonly isDepositReserveAsset: boolean; - readonly asDepositReserveAsset: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly maxAssets: u32; - readonly dest: XcmV1MultiLocation; - readonly effects: Vec; - } & Struct; - readonly isExchangeAsset: boolean; - readonly asExchangeAsset: { - readonly give: XcmV1MultiassetMultiAssetFilter; - readonly receive: XcmV1MultiassetMultiAssets; + /** @name PalletUniqueSchedulerEvent (93) */ + interface PalletUniqueSchedulerEvent extends Enum { + readonly isScheduled: boolean; + readonly asScheduled: { + readonly when: u32; + readonly index: u32; } & Struct; - readonly isInitiateReserveWithdraw: boolean; - readonly asInitiateReserveWithdraw: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly reserve: XcmV1MultiLocation; - readonly effects: Vec; + readonly isCanceled: boolean; + readonly asCanceled: { + readonly when: u32; + readonly index: u32; } & Struct; - readonly isInitiateTeleport: boolean; - readonly asInitiateTeleport: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly dest: XcmV1MultiLocation; - readonly effects: Vec; + readonly isPriorityChanged: boolean; + readonly asPriorityChanged: { + readonly when: u32; + readonly index: u32; + readonly priority: u8; } & Struct; - readonly isQueryHolding: boolean; - readonly asQueryHolding: { - readonly queryId: Compact; - readonly dest: XcmV1MultiLocation; - readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly isDispatched: boolean; + readonly asDispatched: { + readonly task: ITuple<[u32, u32]>; + readonly id: Option; + readonly result: Result; } & Struct; - readonly isBuyExecution: boolean; - readonly asBuyExecution: { - readonly fees: XcmV1MultiAsset; - readonly weight: u64; - readonly debt: u64; - readonly haltOnError: bool; - readonly instructions: Vec; + readonly isCallLookupFailed: boolean; + readonly asCallLookupFailed: { + readonly task: ITuple<[u32, u32]>; + readonly id: Option; + readonly error: FrameSupportScheduleLookupError; } & Struct; - readonly type: 'Noop' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution'; - } - - /** @name XcmV1MultiassetMultiAssetFilter (117) */ - export interface XcmV1MultiassetMultiAssetFilter extends Enum { - readonly isDefinite: boolean; - readonly asDefinite: XcmV1MultiassetMultiAssets; - readonly isWild: boolean; - readonly asWild: XcmV1MultiassetWildMultiAsset; - readonly type: 'Definite' | 'Wild'; + readonly type: 'Scheduled' | 'Canceled' | 'PriorityChanged' | 'Dispatched' | 'CallLookupFailed'; } - /** @name XcmV1MultiassetWildMultiAsset (118) */ - export interface XcmV1MultiassetWildMultiAsset extends Enum { - readonly isAll: boolean; - readonly isAllOf: boolean; - readonly asAllOf: { - readonly id: XcmV1MultiassetAssetId; - readonly fun: XcmV1MultiassetWildFungibility; - } & Struct; - readonly type: 'All' | 'AllOf'; + /** @name FrameSupportScheduleLookupError (96) */ + interface FrameSupportScheduleLookupError extends Enum { + readonly isUnknown: boolean; + readonly isBadFormat: boolean; + readonly type: 'Unknown' | 'BadFormat'; } - /** @name XcmV1MultiassetWildFungibility (119) */ - export interface XcmV1MultiassetWildFungibility extends Enum { - readonly isFungible: boolean; - readonly isNonFungible: boolean; - readonly type: 'Fungible' | 'NonFungible'; + /** @name PalletCommonEvent (97) */ + interface PalletCommonEvent extends Enum { + readonly isCollectionCreated: boolean; + readonly asCollectionCreated: ITuple<[u32, u8, AccountId32]>; + readonly isCollectionDestroyed: boolean; + readonly asCollectionDestroyed: u32; + readonly isItemCreated: boolean; + readonly asItemCreated: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; + readonly isItemDestroyed: boolean; + readonly asItemDestroyed: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; + readonly isTransfer: boolean; + readonly asTransfer: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; + readonly isApproved: boolean; + readonly asApproved: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; + readonly isCollectionPropertySet: boolean; + readonly asCollectionPropertySet: ITuple<[u32, Bytes]>; + readonly isCollectionPropertyDeleted: boolean; + readonly asCollectionPropertyDeleted: ITuple<[u32, Bytes]>; + readonly isTokenPropertySet: boolean; + readonly asTokenPropertySet: ITuple<[u32, u32, Bytes]>; + readonly isTokenPropertyDeleted: boolean; + readonly asTokenPropertyDeleted: ITuple<[u32, u32, Bytes]>; + readonly isPropertyPermissionSet: boolean; + readonly asPropertyPermissionSet: ITuple<[u32, Bytes]>; + readonly type: 'CollectionCreated' | 'CollectionDestroyed' | 'ItemCreated' | 'ItemDestroyed' | 'Transfer' | 'Approved' | 'CollectionPropertySet' | 'CollectionPropertyDeleted' | 'TokenPropertySet' | 'TokenPropertyDeleted' | 'PropertyPermissionSet'; } - /** @name XcmV1Response (121) */ - export interface XcmV1Response extends Enum { - readonly isAssets: boolean; - readonly asAssets: XcmV1MultiassetMultiAssets; - readonly isVersion: boolean; - readonly asVersion: u32; - readonly type: 'Assets' | 'Version'; + /** @name PalletStructureEvent (100) */ + interface PalletStructureEvent extends Enum { + readonly isExecuted: boolean; + readonly asExecuted: Result; + readonly type: 'Executed'; } - /** @name XcmV2Xcm (122) */ - export interface XcmV2Xcm extends Vec {} - - /** @name XcmV2Instruction (124) */ - export interface XcmV2Instruction extends Enum { - readonly isWithdrawAsset: boolean; - readonly asWithdrawAsset: XcmV1MultiassetMultiAssets; - readonly isReserveAssetDeposited: boolean; - readonly asReserveAssetDeposited: XcmV1MultiassetMultiAssets; - readonly isReceiveTeleportedAsset: boolean; - readonly asReceiveTeleportedAsset: XcmV1MultiassetMultiAssets; - readonly isQueryResponse: boolean; - readonly asQueryResponse: { - readonly queryId: Compact; - readonly response: XcmV2Response; - readonly maxWeight: Compact; + /** @name PalletRmrkCoreEvent (101) */ + interface PalletRmrkCoreEvent extends Enum { + readonly isCollectionCreated: boolean; + readonly asCollectionCreated: { + readonly issuer: AccountId32; + readonly collectionId: u32; } & Struct; - readonly isTransferAsset: boolean; - readonly asTransferAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly beneficiary: XcmV1MultiLocation; + readonly isCollectionDestroyed: boolean; + readonly asCollectionDestroyed: { + readonly issuer: AccountId32; + readonly collectionId: u32; } & Struct; - readonly isTransferReserveAsset: boolean; - readonly asTransferReserveAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly dest: XcmV1MultiLocation; - readonly xcm: XcmV2Xcm; + readonly isIssuerChanged: boolean; + readonly asIssuerChanged: { + readonly oldIssuer: AccountId32; + readonly newIssuer: AccountId32; + readonly collectionId: u32; } & Struct; - readonly isTransact: boolean; - readonly asTransact: { - readonly originType: XcmV0OriginKind; - readonly requireWeightAtMost: Compact; - readonly call: XcmDoubleEncoded; + readonly isCollectionLocked: boolean; + readonly asCollectionLocked: { + readonly issuer: AccountId32; + readonly collectionId: u32; } & Struct; - readonly isHrmpNewChannelOpenRequest: boolean; - readonly asHrmpNewChannelOpenRequest: { - readonly sender: Compact; - readonly maxMessageSize: Compact; - readonly maxCapacity: Compact; + readonly isNftMinted: boolean; + readonly asNftMinted: { + readonly owner: AccountId32; + readonly collectionId: u32; + readonly nftId: u32; } & Struct; - readonly isHrmpChannelAccepted: boolean; - readonly asHrmpChannelAccepted: { - readonly recipient: Compact; + readonly isNftBurned: boolean; + readonly asNftBurned: { + readonly owner: AccountId32; + readonly nftId: u32; } & Struct; - readonly isHrmpChannelClosing: boolean; - readonly asHrmpChannelClosing: { - readonly initiator: Compact; - readonly sender: Compact; - readonly recipient: Compact; + readonly isNftSent: boolean; + readonly asNftSent: { + readonly sender: AccountId32; + readonly recipient: RmrkTraitsNftAccountIdOrCollectionNftTuple; + readonly collectionId: u32; + readonly nftId: u32; + readonly approvalRequired: bool; } & Struct; - readonly isClearOrigin: boolean; - readonly isDescendOrigin: boolean; - readonly asDescendOrigin: XcmV1MultilocationJunctions; - readonly isReportError: boolean; - readonly asReportError: { - readonly queryId: Compact; - readonly dest: XcmV1MultiLocation; - readonly maxResponseWeight: Compact; + readonly isNftAccepted: boolean; + readonly asNftAccepted: { + readonly sender: AccountId32; + readonly recipient: RmrkTraitsNftAccountIdOrCollectionNftTuple; + readonly collectionId: u32; + readonly nftId: u32; } & Struct; - readonly isDepositAsset: boolean; - readonly asDepositAsset: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly maxAssets: Compact; - readonly beneficiary: XcmV1MultiLocation; + readonly isNftRejected: boolean; + readonly asNftRejected: { + readonly sender: AccountId32; + readonly collectionId: u32; + readonly nftId: u32; } & Struct; - readonly isDepositReserveAsset: boolean; - readonly asDepositReserveAsset: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly maxAssets: Compact; - readonly dest: XcmV1MultiLocation; - readonly xcm: XcmV2Xcm; + readonly isPropertySet: boolean; + readonly asPropertySet: { + readonly collectionId: u32; + readonly maybeNftId: Option; + readonly key: Bytes; + readonly value: Bytes; } & Struct; - readonly isExchangeAsset: boolean; - readonly asExchangeAsset: { - readonly give: XcmV1MultiassetMultiAssetFilter; - readonly receive: XcmV1MultiassetMultiAssets; + readonly isResourceAdded: boolean; + readonly asResourceAdded: { + readonly nftId: u32; + readonly resourceId: u32; } & Struct; - readonly isInitiateReserveWithdraw: boolean; - readonly asInitiateReserveWithdraw: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly reserve: XcmV1MultiLocation; - readonly xcm: XcmV2Xcm; + readonly isResourceRemoval: boolean; + readonly asResourceRemoval: { + readonly nftId: u32; + readonly resourceId: u32; } & Struct; - readonly isInitiateTeleport: boolean; - readonly asInitiateTeleport: { - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly dest: XcmV1MultiLocation; - readonly xcm: XcmV2Xcm; + readonly isResourceAccepted: boolean; + readonly asResourceAccepted: { + readonly nftId: u32; + readonly resourceId: u32; } & Struct; - readonly isQueryHolding: boolean; - readonly asQueryHolding: { - readonly queryId: Compact; - readonly dest: XcmV1MultiLocation; - readonly assets: XcmV1MultiassetMultiAssetFilter; - readonly maxResponseWeight: Compact; + readonly isResourceRemovalAccepted: boolean; + readonly asResourceRemovalAccepted: { + readonly nftId: u32; + readonly resourceId: u32; } & Struct; - readonly isBuyExecution: boolean; - readonly asBuyExecution: { - readonly fees: XcmV1MultiAsset; - readonly weightLimit: XcmV2WeightLimit; + readonly isPrioritySet: boolean; + readonly asPrioritySet: { + readonly collectionId: u32; + readonly nftId: u32; } & Struct; - readonly isRefundSurplus: boolean; - readonly isSetErrorHandler: boolean; - readonly asSetErrorHandler: XcmV2Xcm; - readonly isSetAppendix: boolean; - readonly asSetAppendix: XcmV2Xcm; - readonly isClearError: boolean; - readonly isClaimAsset: boolean; - readonly asClaimAsset: { - readonly assets: XcmV1MultiassetMultiAssets; - readonly ticket: XcmV1MultiLocation; + readonly type: 'CollectionCreated' | 'CollectionDestroyed' | 'IssuerChanged' | 'CollectionLocked' | 'NftMinted' | 'NftBurned' | 'NftSent' | 'NftAccepted' | 'NftRejected' | 'PropertySet' | 'ResourceAdded' | 'ResourceRemoval' | 'ResourceAccepted' | 'ResourceRemovalAccepted' | 'PrioritySet'; + } + + /** @name RmrkTraitsNftAccountIdOrCollectionNftTuple (102) */ + interface RmrkTraitsNftAccountIdOrCollectionNftTuple extends Enum { + readonly isAccountId: boolean; + readonly asAccountId: AccountId32; + readonly isCollectionAndNftTuple: boolean; + readonly asCollectionAndNftTuple: ITuple<[u32, u32]>; + readonly type: 'AccountId' | 'CollectionAndNftTuple'; + } + + /** @name PalletRmrkEquipEvent (107) */ + interface PalletRmrkEquipEvent extends Enum { + readonly isBaseCreated: boolean; + readonly asBaseCreated: { + readonly issuer: AccountId32; + readonly baseId: u32; } & Struct; - readonly isTrap: boolean; - readonly asTrap: Compact; - readonly isSubscribeVersion: boolean; - readonly asSubscribeVersion: { - readonly queryId: Compact; - readonly maxResponseWeight: Compact; + readonly isEquippablesUpdated: boolean; + readonly asEquippablesUpdated: { + readonly baseId: u32; + readonly slotId: u32; } & Struct; - readonly isUnsubscribeVersion: boolean; - readonly type: 'WithdrawAsset' | 'ReserveAssetDeposited' | 'ReceiveTeleportedAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'ClearOrigin' | 'DescendOrigin' | 'ReportError' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution' | 'RefundSurplus' | 'SetErrorHandler' | 'SetAppendix' | 'ClearError' | 'ClaimAsset' | 'Trap' | 'SubscribeVersion' | 'UnsubscribeVersion'; + readonly type: 'BaseCreated' | 'EquippablesUpdated'; } - /** @name XcmV2Response (125) */ - export interface XcmV2Response extends Enum { - readonly isNull: boolean; - readonly isAssets: boolean; - readonly asAssets: XcmV1MultiassetMultiAssets; - readonly isExecutionResult: boolean; - readonly asExecutionResult: Option>; - readonly isVersion: boolean; - readonly asVersion: u32; - readonly type: 'Null' | 'Assets' | 'ExecutionResult' | 'Version'; + /** @name PalletAppPromotionEvent (108) */ + interface PalletAppPromotionEvent extends Enum { + readonly isStakingRecalculation: boolean; + readonly asStakingRecalculation: ITuple<[AccountId32, u128, u128]>; + readonly isStake: boolean; + readonly asStake: ITuple<[AccountId32, u128]>; + readonly isUnstake: boolean; + readonly asUnstake: ITuple<[AccountId32, u128]>; + readonly isSetAdmin: boolean; + readonly asSetAdmin: AccountId32; + readonly type: 'StakingRecalculation' | 'Stake' | 'Unstake' | 'SetAdmin'; + } + + /** @name PalletForeignAssetsModuleEvent (109) */ + interface PalletForeignAssetsModuleEvent extends Enum { + readonly isForeignAssetRegistered: boolean; + readonly asForeignAssetRegistered: { + readonly assetId: u32; + readonly assetAddress: XcmV1MultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isForeignAssetUpdated: boolean; + readonly asForeignAssetUpdated: { + readonly assetId: u32; + readonly assetAddress: XcmV1MultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isAssetRegistered: boolean; + readonly asAssetRegistered: { + readonly assetId: PalletForeignAssetsAssetIds; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isAssetUpdated: boolean; + readonly asAssetUpdated: { + readonly assetId: PalletForeignAssetsAssetIds; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly type: 'ForeignAssetRegistered' | 'ForeignAssetUpdated' | 'AssetRegistered' | 'AssetUpdated'; + } + + /** @name PalletForeignAssetsModuleAssetMetadata (110) */ + interface PalletForeignAssetsModuleAssetMetadata extends Struct { + readonly name: Bytes; + readonly symbol: Bytes; + readonly decimals: u8; + readonly minimalBalance: u128; } - /** @name XcmV2TraitsError (128) */ - export interface XcmV2TraitsError extends Enum { - readonly isOverflow: boolean; - readonly isUnimplemented: boolean; - readonly isUntrustedReserveLocation: boolean; - readonly isUntrustedTeleportLocation: boolean; - readonly isMultiLocationFull: boolean; - readonly isMultiLocationNotInvertible: boolean; - readonly isBadOrigin: boolean; - readonly isInvalidLocation: boolean; - readonly isAssetNotFound: boolean; - readonly isFailedToTransactAsset: boolean; - readonly isNotWithdrawable: boolean; - readonly isLocationCannotHold: boolean; - readonly isExceedsMaxMessageSize: boolean; - readonly isDestinationUnsupported: boolean; - readonly isTransport: boolean; - readonly isUnroutable: boolean; - readonly isUnknownClaim: boolean; - readonly isFailedToDecode: boolean; - readonly isMaxWeightInvalid: boolean; - readonly isNotHoldingFees: boolean; - readonly isTooExpensive: boolean; - readonly isTrap: boolean; - readonly asTrap: u64; - readonly isUnhandledXcmVersion: boolean; - readonly isWeightLimitReached: boolean; - readonly asWeightLimitReached: u64; - readonly isBarrier: boolean; - readonly isWeightNotComputable: boolean; - readonly type: 'Overflow' | 'Unimplemented' | 'UntrustedReserveLocation' | 'UntrustedTeleportLocation' | 'MultiLocationFull' | 'MultiLocationNotInvertible' | 'BadOrigin' | 'InvalidLocation' | 'AssetNotFound' | 'FailedToTransactAsset' | 'NotWithdrawable' | 'LocationCannotHold' | 'ExceedsMaxMessageSize' | 'DestinationUnsupported' | 'Transport' | 'Unroutable' | 'UnknownClaim' | 'FailedToDecode' | 'MaxWeightInvalid' | 'NotHoldingFees' | 'TooExpensive' | 'Trap' | 'UnhandledXcmVersion' | 'WeightLimitReached' | 'Barrier' | 'WeightNotComputable'; + /** @name PalletEvmEvent (111) */ + interface PalletEvmEvent extends Enum { + readonly isLog: boolean; + readonly asLog: EthereumLog; + readonly isCreated: boolean; + readonly asCreated: H160; + readonly isCreatedFailed: boolean; + readonly asCreatedFailed: H160; + readonly isExecuted: boolean; + readonly asExecuted: H160; + readonly isExecutedFailed: boolean; + readonly asExecutedFailed: H160; + readonly isBalanceDeposit: boolean; + readonly asBalanceDeposit: ITuple<[AccountId32, H160, U256]>; + readonly isBalanceWithdraw: boolean; + readonly asBalanceWithdraw: ITuple<[AccountId32, H160, U256]>; + readonly type: 'Log' | 'Created' | 'CreatedFailed' | 'Executed' | 'ExecutedFailed' | 'BalanceDeposit' | 'BalanceWithdraw'; } - /** @name XcmV2WeightLimit (129) */ - export interface XcmV2WeightLimit extends Enum { - readonly isUnlimited: boolean; - readonly isLimited: boolean; - readonly asLimited: Compact; - readonly type: 'Unlimited' | 'Limited'; + /** @name EthereumLog (112) */ + interface EthereumLog extends Struct { + readonly address: H160; + readonly topics: Vec; + readonly data: Bytes; } - /** @name XcmVersionedMultiAssets (130) */ - export interface XcmVersionedMultiAssets extends Enum { - readonly isV0: boolean; - readonly asV0: Vec; - readonly isV1: boolean; - readonly asV1: XcmV1MultiassetMultiAssets; - readonly type: 'V0' | 'V1'; + /** @name PalletEthereumEvent (116) */ + interface PalletEthereumEvent extends Enum { + readonly isExecuted: boolean; + readonly asExecuted: ITuple<[H160, H160, H256, EvmCoreErrorExitReason]>; + readonly type: 'Executed'; } - /** @name CumulusPalletXcmCall (145) */ - export type CumulusPalletXcmCall = Null; - - /** @name CumulusPalletDmpQueueCall (146) */ - export interface CumulusPalletDmpQueueCall extends Enum { - readonly isServiceOverweight: boolean; - readonly asServiceOverweight: { - readonly index: u64; - readonly weightLimit: u64; - } & Struct; - readonly type: 'ServiceOverweight'; + /** @name EvmCoreErrorExitReason (117) */ + interface EvmCoreErrorExitReason extends Enum { + readonly isSucceed: boolean; + readonly asSucceed: EvmCoreErrorExitSucceed; + readonly isError: boolean; + readonly asError: EvmCoreErrorExitError; + readonly isRevert: boolean; + readonly asRevert: EvmCoreErrorExitRevert; + readonly isFatal: boolean; + readonly asFatal: EvmCoreErrorExitFatal; + readonly type: 'Succeed' | 'Error' | 'Revert' | 'Fatal'; } - /** @name PalletInflationCall (147) */ - export interface PalletInflationCall extends Enum { - readonly isStartInflation: boolean; - readonly asStartInflation: { - readonly inflationStartRelayBlock: u32; - } & Struct; - readonly type: 'StartInflation'; + /** @name EvmCoreErrorExitSucceed (118) */ + interface EvmCoreErrorExitSucceed extends Enum { + readonly isStopped: boolean; + readonly isReturned: boolean; + readonly isSuicided: boolean; + readonly type: 'Stopped' | 'Returned' | 'Suicided'; } - /** @name PalletUniqueCall (148) */ - export interface PalletUniqueCall extends Enum { - readonly isCreateCollection: boolean; - readonly asCreateCollection: { - readonly collectionName: Vec; - readonly collectionDescription: Vec; - readonly tokenPrefix: Bytes; - readonly mode: UpDataStructsCollectionMode; - } & Struct; - readonly isCreateCollectionEx: boolean; - readonly asCreateCollectionEx: { - readonly data: UpDataStructsCreateCollectionData; - } & Struct; - readonly isDestroyCollection: boolean; - readonly asDestroyCollection: { - readonly collectionId: u32; - } & Struct; - readonly isAddToAllowList: boolean; - readonly asAddToAllowList: { - readonly collectionId: u32; - readonly address: PalletEvmAccountBasicCrossAccountIdRepr; - } & Struct; - readonly isRemoveFromAllowList: boolean; - readonly asRemoveFromAllowList: { - readonly collectionId: u32; - readonly address: PalletEvmAccountBasicCrossAccountIdRepr; - } & Struct; - readonly isChangeCollectionOwner: boolean; - readonly asChangeCollectionOwner: { - readonly collectionId: u32; - readonly newOwner: AccountId32; - } & Struct; - readonly isAddCollectionAdmin: boolean; - readonly asAddCollectionAdmin: { - readonly collectionId: u32; - readonly newAdminId: PalletEvmAccountBasicCrossAccountIdRepr; - } & Struct; - readonly isRemoveCollectionAdmin: boolean; - readonly asRemoveCollectionAdmin: { - readonly collectionId: u32; - readonly accountId: PalletEvmAccountBasicCrossAccountIdRepr; - } & Struct; - readonly isSetCollectionSponsor: boolean; - readonly asSetCollectionSponsor: { - readonly collectionId: u32; - readonly newSponsor: AccountId32; - } & Struct; - readonly isConfirmSponsorship: boolean; - readonly asConfirmSponsorship: { - readonly collectionId: u32; - } & Struct; - readonly isRemoveCollectionSponsor: boolean; - readonly asRemoveCollectionSponsor: { - readonly collectionId: u32; - } & Struct; - readonly isCreateItem: boolean; - readonly asCreateItem: { - readonly collectionId: u32; - readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; - readonly data: UpDataStructsCreateItemData; - } & Struct; - readonly isCreateMultipleItems: boolean; - readonly asCreateMultipleItems: { - readonly collectionId: u32; - readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; - readonly itemsData: Vec; - } & Struct; - readonly isSetCollectionProperties: boolean; - readonly asSetCollectionProperties: { - readonly collectionId: u32; - readonly properties: Vec; - } & Struct; - readonly isDeleteCollectionProperties: boolean; - readonly asDeleteCollectionProperties: { - readonly collectionId: u32; - readonly propertyKeys: Vec; - } & Struct; - readonly isSetTokenProperties: boolean; - readonly asSetTokenProperties: { - readonly collectionId: u32; - readonly tokenId: u32; - readonly properties: Vec; - } & Struct; - readonly isDeleteTokenProperties: boolean; - readonly asDeleteTokenProperties: { - readonly collectionId: u32; - readonly tokenId: u32; - readonly propertyKeys: Vec; - } & Struct; - readonly isSetTokenPropertyPermissions: boolean; - readonly asSetTokenPropertyPermissions: { - readonly collectionId: u32; - readonly propertyPermissions: Vec; - } & Struct; - readonly isCreateMultipleItemsEx: boolean; - readonly asCreateMultipleItemsEx: { - readonly collectionId: u32; - readonly data: UpDataStructsCreateItemExData; - } & Struct; - readonly isSetTransfersEnabledFlag: boolean; - readonly asSetTransfersEnabledFlag: { - readonly collectionId: u32; - readonly value: bool; + /** @name EvmCoreErrorExitError (119) */ + interface EvmCoreErrorExitError extends Enum { + readonly isStackUnderflow: boolean; + readonly isStackOverflow: boolean; + readonly isInvalidJump: boolean; + readonly isInvalidRange: boolean; + readonly isDesignatedInvalid: boolean; + readonly isCallTooDeep: boolean; + readonly isCreateCollision: boolean; + readonly isCreateContractLimit: boolean; + readonly isOutOfOffset: boolean; + readonly isOutOfGas: boolean; + readonly isOutOfFund: boolean; + readonly isPcUnderflow: boolean; + readonly isCreateEmpty: boolean; + readonly isOther: boolean; + readonly asOther: Text; + readonly isInvalidCode: boolean; + readonly type: 'StackUnderflow' | 'StackOverflow' | 'InvalidJump' | 'InvalidRange' | 'DesignatedInvalid' | 'CallTooDeep' | 'CreateCollision' | 'CreateContractLimit' | 'OutOfOffset' | 'OutOfGas' | 'OutOfFund' | 'PcUnderflow' | 'CreateEmpty' | 'Other' | 'InvalidCode'; + } + + /** @name EvmCoreErrorExitRevert (122) */ + interface EvmCoreErrorExitRevert extends Enum { + readonly isReverted: boolean; + readonly type: 'Reverted'; + } + + /** @name EvmCoreErrorExitFatal (123) */ + interface EvmCoreErrorExitFatal extends Enum { + readonly isNotSupported: boolean; + readonly isUnhandledInterrupt: boolean; + readonly isCallErrorAsFatal: boolean; + readonly asCallErrorAsFatal: EvmCoreErrorExitError; + readonly isOther: boolean; + readonly asOther: Text; + readonly type: 'NotSupported' | 'UnhandledInterrupt' | 'CallErrorAsFatal' | 'Other'; + } + + /** @name PalletEvmContractHelpersEvent (124) */ + interface PalletEvmContractHelpersEvent extends Enum { + readonly isContractSponsorSet: boolean; + readonly asContractSponsorSet: ITuple<[H160, AccountId32]>; + readonly isContractSponsorshipConfirmed: boolean; + readonly asContractSponsorshipConfirmed: ITuple<[H160, AccountId32]>; + readonly isContractSponsorRemoved: boolean; + readonly asContractSponsorRemoved: H160; + readonly type: 'ContractSponsorSet' | 'ContractSponsorshipConfirmed' | 'ContractSponsorRemoved'; + } + + /** @name PalletMaintenanceEvent (125) */ + interface PalletMaintenanceEvent extends Enum { + readonly isMaintenanceEnabled: boolean; + readonly isMaintenanceDisabled: boolean; + readonly type: 'MaintenanceEnabled' | 'MaintenanceDisabled'; + } + + /** @name PalletTestUtilsEvent (126) */ + interface PalletTestUtilsEvent extends Enum { + readonly isValueIsSet: boolean; + readonly isShouldRollback: boolean; + readonly type: 'ValueIsSet' | 'ShouldRollback'; + } + + /** @name FrameSystemPhase (127) */ + interface FrameSystemPhase extends Enum { + readonly isApplyExtrinsic: boolean; + readonly asApplyExtrinsic: u32; + readonly isFinalization: boolean; + readonly isInitialization: boolean; + readonly type: 'ApplyExtrinsic' | 'Finalization' | 'Initialization'; + } + + /** @name FrameSystemLastRuntimeUpgradeInfo (129) */ + interface FrameSystemLastRuntimeUpgradeInfo extends Struct { + readonly specVersion: Compact; + readonly specName: Text; + } + + /** @name FrameSystemCall (130) */ + interface FrameSystemCall extends Enum { + readonly isFillBlock: boolean; + readonly asFillBlock: { + readonly ratio: Perbill; } & Struct; - readonly isBurnItem: boolean; - readonly asBurnItem: { - readonly collectionId: u32; - readonly itemId: u32; - readonly value: u128; + readonly isRemark: boolean; + readonly asRemark: { + readonly remark: Bytes; } & Struct; - readonly isBurnFrom: boolean; - readonly asBurnFrom: { - readonly collectionId: u32; - readonly from: PalletEvmAccountBasicCrossAccountIdRepr; - readonly itemId: u32; - readonly value: u128; + readonly isSetHeapPages: boolean; + readonly asSetHeapPages: { + readonly pages: u64; } & Struct; - readonly isTransfer: boolean; - readonly asTransfer: { - readonly recipient: PalletEvmAccountBasicCrossAccountIdRepr; - readonly collectionId: u32; - readonly itemId: u32; - readonly value: u128; + readonly isSetCode: boolean; + readonly asSetCode: { + readonly code: Bytes; } & Struct; - readonly isApprove: boolean; - readonly asApprove: { - readonly spender: PalletEvmAccountBasicCrossAccountIdRepr; - readonly collectionId: u32; - readonly itemId: u32; - readonly amount: u128; + readonly isSetCodeWithoutChecks: boolean; + readonly asSetCodeWithoutChecks: { + readonly code: Bytes; } & Struct; - readonly isTransferFrom: boolean; - readonly asTransferFrom: { - readonly from: PalletEvmAccountBasicCrossAccountIdRepr; - readonly recipient: PalletEvmAccountBasicCrossAccountIdRepr; - readonly collectionId: u32; - readonly itemId: u32; - readonly value: u128; + readonly isSetStorage: boolean; + readonly asSetStorage: { + readonly items: Vec>; } & Struct; - readonly isSetCollectionLimits: boolean; - readonly asSetCollectionLimits: { - readonly collectionId: u32; - readonly newLimit: UpDataStructsCollectionLimits; + readonly isKillStorage: boolean; + readonly asKillStorage: { + readonly keys_: Vec; } & Struct; - readonly isSetCollectionPermissions: boolean; - readonly asSetCollectionPermissions: { - readonly collectionId: u32; - readonly newLimit: UpDataStructsCollectionPermissions; + readonly isKillPrefix: boolean; + readonly asKillPrefix: { + readonly prefix: Bytes; + readonly subkeys: u32; } & Struct; - readonly isRepartition: boolean; - readonly asRepartition: { - readonly collectionId: u32; - readonly token: u32; - readonly amount: u128; + readonly isRemarkWithEvent: boolean; + readonly asRemarkWithEvent: { + readonly remark: Bytes; } & Struct; - readonly type: 'CreateCollection' | 'CreateCollectionEx' | 'DestroyCollection' | 'AddToAllowList' | 'RemoveFromAllowList' | 'ChangeCollectionOwner' | 'AddCollectionAdmin' | 'RemoveCollectionAdmin' | 'SetCollectionSponsor' | 'ConfirmSponsorship' | 'RemoveCollectionSponsor' | 'CreateItem' | 'CreateMultipleItems' | 'SetCollectionProperties' | 'DeleteCollectionProperties' | 'SetTokenProperties' | 'DeleteTokenProperties' | 'SetTokenPropertyPermissions' | 'CreateMultipleItemsEx' | 'SetTransfersEnabledFlag' | 'BurnItem' | 'BurnFrom' | 'Transfer' | 'Approve' | 'TransferFrom' | 'SetCollectionLimits' | 'SetCollectionPermissions' | 'Repartition'; + readonly type: 'FillBlock' | 'Remark' | 'SetHeapPages' | 'SetCode' | 'SetCodeWithoutChecks' | 'SetStorage' | 'KillStorage' | 'KillPrefix' | 'RemarkWithEvent'; } - /** @name UpDataStructsCollectionMode (154) */ - export interface UpDataStructsCollectionMode extends Enum { - readonly isNft: boolean; - readonly isFungible: boolean; - readonly asFungible: u8; - readonly isReFungible: boolean; - readonly type: 'Nft' | 'Fungible' | 'ReFungible'; + /** @name FrameSystemLimitsBlockWeights (135) */ + interface FrameSystemLimitsBlockWeights extends Struct { + readonly baseBlock: Weight; + readonly maxBlock: Weight; + readonly perClass: FrameSupportDispatchPerDispatchClassWeightsPerClass; } - /** @name UpDataStructsCreateCollectionData (155) */ - export interface UpDataStructsCreateCollectionData extends Struct { - readonly mode: UpDataStructsCollectionMode; - readonly access: Option; - readonly name: Vec; - readonly description: Vec; - readonly tokenPrefix: Bytes; - readonly pendingSponsor: Option; - readonly limits: Option; - readonly permissions: Option; - readonly tokenPropertyPermissions: Vec; - readonly properties: Vec; + /** @name FrameSupportDispatchPerDispatchClassWeightsPerClass (136) */ + interface FrameSupportDispatchPerDispatchClassWeightsPerClass extends Struct { + readonly normal: FrameSystemLimitsWeightsPerClass; + readonly operational: FrameSystemLimitsWeightsPerClass; + readonly mandatory: FrameSystemLimitsWeightsPerClass; } - /** @name UpDataStructsAccessMode (157) */ - export interface UpDataStructsAccessMode extends Enum { - readonly isNormal: boolean; - readonly isAllowList: boolean; - readonly type: 'Normal' | 'AllowList'; + /** @name FrameSystemLimitsWeightsPerClass (137) */ + interface FrameSystemLimitsWeightsPerClass extends Struct { + readonly baseExtrinsic: Weight; + readonly maxExtrinsic: Option; + readonly maxTotal: Option; + readonly reserved: Option; } - /** @name UpDataStructsCollectionLimits (160) */ - export interface UpDataStructsCollectionLimits extends Struct { - readonly accountTokenOwnershipLimit: Option; - readonly sponsoredDataSize: Option; - readonly sponsoredDataRateLimit: Option; - readonly tokenLimit: Option; - readonly sponsorTransferTimeout: Option; - readonly sponsorApproveTimeout: Option; - readonly ownerCanTransfer: Option; - readonly ownerCanDestroy: Option; - readonly transfersEnabled: Option; + /** @name FrameSystemLimitsBlockLength (139) */ + interface FrameSystemLimitsBlockLength extends Struct { + readonly max: FrameSupportDispatchPerDispatchClassU32; } - /** @name UpDataStructsSponsoringRateLimit (162) */ - export interface UpDataStructsSponsoringRateLimit extends Enum { - readonly isSponsoringDisabled: boolean; - readonly isBlocks: boolean; - readonly asBlocks: u32; - readonly type: 'SponsoringDisabled' | 'Blocks'; + /** @name FrameSupportDispatchPerDispatchClassU32 (140) */ + interface FrameSupportDispatchPerDispatchClassU32 extends Struct { + readonly normal: u32; + readonly operational: u32; + readonly mandatory: u32; } - /** @name UpDataStructsCollectionPermissions (165) */ - export interface UpDataStructsCollectionPermissions extends Struct { - readonly access: Option; - readonly mintMode: Option; - readonly nesting: Option; + /** @name SpWeightsRuntimeDbWeight (141) */ + interface SpWeightsRuntimeDbWeight extends Struct { + readonly read: u64; + readonly write: u64; } - /** @name UpDataStructsNestingPermissions (167) */ - export interface UpDataStructsNestingPermissions extends Struct { - readonly tokenOwner: bool; - readonly collectionAdmin: bool; - readonly restricted: Option; + /** @name SpVersionRuntimeVersion (142) */ + interface SpVersionRuntimeVersion extends Struct { + readonly specName: Text; + readonly implName: Text; + readonly authoringVersion: u32; + readonly specVersion: u32; + readonly implVersion: u32; + readonly apis: Vec>; + readonly transactionVersion: u32; + readonly stateVersion: u8; } - /** @name UpDataStructsOwnerRestrictedSet (169) */ - export interface UpDataStructsOwnerRestrictedSet extends BTreeSet {} + /** @name FrameSystemError (147) */ + interface FrameSystemError extends Enum { + readonly isInvalidSpecName: boolean; + readonly isSpecVersionNeedsToIncrease: boolean; + readonly isFailedToExtractRuntimeVersion: boolean; + readonly isNonDefaultComposite: boolean; + readonly isNonZeroRefCount: boolean; + readonly isCallFiltered: boolean; + readonly type: 'InvalidSpecName' | 'SpecVersionNeedsToIncrease' | 'FailedToExtractRuntimeVersion' | 'NonDefaultComposite' | 'NonZeroRefCount' | 'CallFiltered'; + } - /** @name UpDataStructsPropertyKeyPermission (175) */ - export interface UpDataStructsPropertyKeyPermission extends Struct { - readonly key: Bytes; - readonly permission: UpDataStructsPropertyPermission; + /** @name PolkadotPrimitivesV2PersistedValidationData (148) */ + interface PolkadotPrimitivesV2PersistedValidationData extends Struct { + readonly parentHead: Bytes; + readonly relayParentNumber: u32; + readonly relayParentStorageRoot: H256; + readonly maxPovSize: u32; } - /** @name UpDataStructsPropertyPermission (177) */ - export interface UpDataStructsPropertyPermission extends Struct { - readonly mutable: bool; - readonly collectionAdmin: bool; - readonly tokenOwner: bool; + /** @name PolkadotPrimitivesV2UpgradeRestriction (151) */ + interface PolkadotPrimitivesV2UpgradeRestriction extends Enum { + readonly isPresent: boolean; + readonly type: 'Present'; } - /** @name UpDataStructsProperty (180) */ - export interface UpDataStructsProperty extends Struct { - readonly key: Bytes; - readonly value: Bytes; + /** @name SpTrieStorageProof (152) */ + interface SpTrieStorageProof extends Struct { + readonly trieNodes: BTreeSet; } - /** @name PalletEvmAccountBasicCrossAccountIdRepr (183) */ - export interface PalletEvmAccountBasicCrossAccountIdRepr extends Enum { - readonly isSubstrate: boolean; - readonly asSubstrate: AccountId32; - readonly isEthereum: boolean; - readonly asEthereum: H160; - readonly type: 'Substrate' | 'Ethereum'; + /** @name CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot (154) */ + interface CumulusPalletParachainSystemRelayStateSnapshotMessagingStateSnapshot extends Struct { + readonly dmqMqcHead: H256; + readonly relayDispatchQueueSize: ITuple<[u32, u32]>; + readonly ingressChannels: Vec>; + readonly egressChannels: Vec>; } - /** @name UpDataStructsCreateItemData (185) */ - export interface UpDataStructsCreateItemData extends Enum { - readonly isNft: boolean; - readonly asNft: UpDataStructsCreateNftData; - readonly isFungible: boolean; - readonly asFungible: UpDataStructsCreateFungibleData; - readonly isReFungible: boolean; - readonly asReFungible: UpDataStructsCreateReFungibleData; - readonly type: 'Nft' | 'Fungible' | 'ReFungible'; + /** @name PolkadotPrimitivesV2AbridgedHrmpChannel (157) */ + interface PolkadotPrimitivesV2AbridgedHrmpChannel extends Struct { + readonly maxCapacity: u32; + readonly maxTotalSize: u32; + readonly maxMessageSize: u32; + readonly msgCount: u32; + readonly totalSize: u32; + readonly mqcHead: Option; } - /** @name UpDataStructsCreateNftData (186) */ - export interface UpDataStructsCreateNftData extends Struct { - readonly properties: Vec; + /** @name PolkadotPrimitivesV2AbridgedHostConfiguration (158) */ + interface PolkadotPrimitivesV2AbridgedHostConfiguration extends Struct { + readonly maxCodeSize: u32; + readonly maxHeadDataSize: u32; + readonly maxUpwardQueueCount: u32; + readonly maxUpwardQueueSize: u32; + readonly maxUpwardMessageSize: u32; + readonly maxUpwardMessageNumPerCandidate: u32; + readonly hrmpMaxMessageNumPerCandidate: u32; + readonly validationUpgradeCooldown: u32; + readonly validationUpgradeDelay: u32; } - /** @name UpDataStructsCreateFungibleData (187) */ - export interface UpDataStructsCreateFungibleData extends Struct { - readonly value: u128; + /** @name PolkadotCorePrimitivesOutboundHrmpMessage (164) */ + interface PolkadotCorePrimitivesOutboundHrmpMessage extends Struct { + readonly recipient: u32; + readonly data: Bytes; } - /** @name UpDataStructsCreateReFungibleData (188) */ - export interface UpDataStructsCreateReFungibleData extends Struct { - readonly constData: Bytes; - readonly pieces: u128; + /** @name CumulusPalletParachainSystemCall (165) */ + interface CumulusPalletParachainSystemCall extends Enum { + readonly isSetValidationData: boolean; + readonly asSetValidationData: { + readonly data: CumulusPrimitivesParachainInherentParachainInherentData; + } & Struct; + readonly isSudoSendUpwardMessage: boolean; + readonly asSudoSendUpwardMessage: { + readonly message: Bytes; + } & Struct; + readonly isAuthorizeUpgrade: boolean; + readonly asAuthorizeUpgrade: { + readonly codeHash: H256; + } & Struct; + readonly isEnactAuthorizedUpgrade: boolean; + readonly asEnactAuthorizedUpgrade: { + readonly code: Bytes; + } & Struct; + readonly type: 'SetValidationData' | 'SudoSendUpwardMessage' | 'AuthorizeUpgrade' | 'EnactAuthorizedUpgrade'; } - /** @name UpDataStructsCreateItemExData (193) */ - export interface UpDataStructsCreateItemExData extends Enum { - readonly isNft: boolean; - readonly asNft: Vec; - readonly isFungible: boolean; - readonly asFungible: BTreeMap; - readonly isRefungibleMultipleItems: boolean; - readonly asRefungibleMultipleItems: Vec; - readonly isRefungibleMultipleOwners: boolean; - readonly asRefungibleMultipleOwners: UpDataStructsCreateRefungibleExData; - readonly type: 'Nft' | 'Fungible' | 'RefungibleMultipleItems' | 'RefungibleMultipleOwners'; + /** @name CumulusPrimitivesParachainInherentParachainInherentData (166) */ + interface CumulusPrimitivesParachainInherentParachainInherentData extends Struct { + readonly validationData: PolkadotPrimitivesV2PersistedValidationData; + readonly relayChainState: SpTrieStorageProof; + readonly downwardMessages: Vec; + readonly horizontalMessages: BTreeMap>; } - /** @name UpDataStructsCreateNftExData (195) */ - export interface UpDataStructsCreateNftExData extends Struct { - readonly properties: Vec; - readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; + /** @name PolkadotCorePrimitivesInboundDownwardMessage (168) */ + interface PolkadotCorePrimitivesInboundDownwardMessage extends Struct { + readonly sentAt: u32; + readonly msg: Bytes; } - /** @name UpDataStructsCreateRefungibleExData (202) */ - export interface UpDataStructsCreateRefungibleExData extends Struct { - readonly constData: Bytes; - readonly users: BTreeMap; + /** @name PolkadotCorePrimitivesInboundHrmpMessage (171) */ + interface PolkadotCorePrimitivesInboundHrmpMessage extends Struct { + readonly sentAt: u32; + readonly data: Bytes; } - /** @name PalletUniqueSchedulerCall (204) */ - export interface PalletUniqueSchedulerCall extends Enum { - readonly isScheduleNamed: boolean; - readonly asScheduleNamed: { - readonly id: U8aFixed; - readonly when: u32; - readonly maybePeriodic: Option>; - readonly priority: u8; - readonly call: FrameSupportScheduleMaybeHashed; - } & Struct; - readonly isCancelNamed: boolean; - readonly asCancelNamed: { - readonly id: U8aFixed; - } & Struct; - readonly isScheduleNamedAfter: boolean; - readonly asScheduleNamedAfter: { - readonly id: U8aFixed; - readonly after: u32; - readonly maybePeriodic: Option>; - readonly priority: u8; - readonly call: FrameSupportScheduleMaybeHashed; - } & Struct; - readonly type: 'ScheduleNamed' | 'CancelNamed' | 'ScheduleNamedAfter'; + /** @name CumulusPalletParachainSystemError (174) */ + interface CumulusPalletParachainSystemError extends Enum { + readonly isOverlappingUpgrades: boolean; + readonly isProhibitedByPolkadot: boolean; + readonly isTooBig: boolean; + readonly isValidationDataNotAvailable: boolean; + readonly isHostConfigurationNotAvailable: boolean; + readonly isNotScheduled: boolean; + readonly isNothingAuthorized: boolean; + readonly isUnauthorized: boolean; + readonly type: 'OverlappingUpgrades' | 'ProhibitedByPolkadot' | 'TooBig' | 'ValidationDataNotAvailable' | 'HostConfigurationNotAvailable' | 'NotScheduled' | 'NothingAuthorized' | 'Unauthorized'; } - /** @name FrameSupportScheduleMaybeHashed (206) */ - export interface FrameSupportScheduleMaybeHashed extends Enum { - readonly isValue: boolean; - readonly asValue: Call; - readonly isHash: boolean; - readonly asHash: H256; - readonly type: 'Value' | 'Hash'; + /** @name PalletBalancesBalanceLock (176) */ + interface PalletBalancesBalanceLock extends Struct { + readonly id: U8aFixed; + readonly amount: u128; + readonly reasons: PalletBalancesReasons; + } + + /** @name PalletBalancesReasons (177) */ + interface PalletBalancesReasons extends Enum { + readonly isFee: boolean; + readonly isMisc: boolean; + readonly isAll: boolean; + readonly type: 'Fee' | 'Misc' | 'All'; } - /** @name PalletTemplateTransactionPaymentCall (207) */ - export type PalletTemplateTransactionPaymentCall = Null; + /** @name PalletBalancesReserveData (180) */ + interface PalletBalancesReserveData extends Struct { + readonly id: U8aFixed; + readonly amount: u128; + } - /** @name PalletStructureCall (208) */ - export type PalletStructureCall = Null; + /** @name PalletBalancesReleases (182) */ + interface PalletBalancesReleases extends Enum { + readonly isV100: boolean; + readonly isV200: boolean; + readonly type: 'V100' | 'V200'; + } - /** @name PalletRmrkCoreCall (209) */ - export interface PalletRmrkCoreCall extends Enum { - readonly isCreateCollection: boolean; - readonly asCreateCollection: { - readonly metadata: Bytes; - readonly max: Option; - readonly symbol: Bytes; + /** @name PalletBalancesCall (183) */ + interface PalletBalancesCall extends Enum { + readonly isTransfer: boolean; + readonly asTransfer: { + readonly dest: MultiAddress; + readonly value: Compact; } & Struct; - readonly isDestroyCollection: boolean; - readonly asDestroyCollection: { - readonly collectionId: u32; + readonly isSetBalance: boolean; + readonly asSetBalance: { + readonly who: MultiAddress; + readonly newFree: Compact; + readonly newReserved: Compact; } & Struct; - readonly isChangeCollectionIssuer: boolean; - readonly asChangeCollectionIssuer: { - readonly collectionId: u32; - readonly newIssuer: MultiAddress; + readonly isForceTransfer: boolean; + readonly asForceTransfer: { + readonly source: MultiAddress; + readonly dest: MultiAddress; + readonly value: Compact; } & Struct; - readonly isLockCollection: boolean; - readonly asLockCollection: { - readonly collectionId: u32; + readonly isTransferKeepAlive: boolean; + readonly asTransferKeepAlive: { + readonly dest: MultiAddress; + readonly value: Compact; } & Struct; - readonly isMintNft: boolean; - readonly asMintNft: { - readonly owner: Option; - readonly collectionId: u32; - readonly recipient: Option; - readonly royaltyAmount: Option; - readonly metadata: Bytes; - readonly transferable: bool; - readonly resources: Option>; + readonly isTransferAll: boolean; + readonly asTransferAll: { + readonly dest: MultiAddress; + readonly keepAlive: bool; } & Struct; - readonly isBurnNft: boolean; - readonly asBurnNft: { - readonly collectionId: u32; - readonly nftId: u32; - readonly maxBurns: u32; + readonly isForceUnreserve: boolean; + readonly asForceUnreserve: { + readonly who: MultiAddress; + readonly amount: u128; } & Struct; - readonly isSend: boolean; - readonly asSend: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - readonly newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple; + readonly type: 'Transfer' | 'SetBalance' | 'ForceTransfer' | 'TransferKeepAlive' | 'TransferAll' | 'ForceUnreserve'; + } + + /** @name PalletBalancesError (186) */ + interface PalletBalancesError extends Enum { + readonly isVestingBalance: boolean; + readonly isLiquidityRestrictions: boolean; + readonly isInsufficientBalance: boolean; + readonly isExistentialDeposit: boolean; + readonly isKeepAlive: boolean; + readonly isExistingVestingSchedule: boolean; + readonly isDeadAccount: boolean; + readonly isTooManyReserves: boolean; + readonly type: 'VestingBalance' | 'LiquidityRestrictions' | 'InsufficientBalance' | 'ExistentialDeposit' | 'KeepAlive' | 'ExistingVestingSchedule' | 'DeadAccount' | 'TooManyReserves'; + } + + /** @name PalletTimestampCall (188) */ + interface PalletTimestampCall extends Enum { + readonly isSet: boolean; + readonly asSet: { + readonly now: Compact; } & Struct; - readonly isAcceptNft: boolean; - readonly asAcceptNft: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - readonly newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple; + readonly type: 'Set'; + } + + /** @name PalletTransactionPaymentReleases (190) */ + interface PalletTransactionPaymentReleases extends Enum { + readonly isV1Ancient: boolean; + readonly isV2: boolean; + readonly type: 'V1Ancient' | 'V2'; + } + + /** @name PalletTreasuryProposal (191) */ + interface PalletTreasuryProposal extends Struct { + readonly proposer: AccountId32; + readonly value: u128; + readonly beneficiary: AccountId32; + readonly bond: u128; + } + + /** @name PalletTreasuryCall (194) */ + interface PalletTreasuryCall extends Enum { + readonly isProposeSpend: boolean; + readonly asProposeSpend: { + readonly value: Compact; + readonly beneficiary: MultiAddress; } & Struct; - readonly isRejectNft: boolean; - readonly asRejectNft: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - } & Struct; - readonly isAcceptResource: boolean; - readonly asAcceptResource: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - readonly resourceId: u32; - } & Struct; - readonly isAcceptResourceRemoval: boolean; - readonly asAcceptResourceRemoval: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - readonly resourceId: u32; - } & Struct; - readonly isSetProperty: boolean; - readonly asSetProperty: { - readonly rmrkCollectionId: Compact; - readonly maybeNftId: Option; - readonly key: Bytes; - readonly value: Bytes; - } & Struct; - readonly isSetPriority: boolean; - readonly asSetPriority: { - readonly rmrkCollectionId: u32; - readonly rmrkNftId: u32; - readonly priorities: Vec; - } & Struct; - readonly isAddBasicResource: boolean; - readonly asAddBasicResource: { - readonly rmrkCollectionId: u32; - readonly nftId: u32; - readonly resource: RmrkTraitsResourceBasicResource; + readonly isRejectProposal: boolean; + readonly asRejectProposal: { + readonly proposalId: Compact; } & Struct; - readonly isAddComposableResource: boolean; - readonly asAddComposableResource: { - readonly rmrkCollectionId: u32; - readonly nftId: u32; - readonly resource: RmrkTraitsResourceComposableResource; + readonly isApproveProposal: boolean; + readonly asApproveProposal: { + readonly proposalId: Compact; } & Struct; - readonly isAddSlotResource: boolean; - readonly asAddSlotResource: { - readonly rmrkCollectionId: u32; - readonly nftId: u32; - readonly resource: RmrkTraitsResourceSlotResource; + readonly isSpend: boolean; + readonly asSpend: { + readonly amount: Compact; + readonly beneficiary: MultiAddress; } & Struct; - readonly isRemoveResource: boolean; - readonly asRemoveResource: { - readonly rmrkCollectionId: u32; - readonly nftId: u32; - readonly resourceId: u32; + readonly isRemoveApproval: boolean; + readonly asRemoveApproval: { + readonly proposalId: Compact; } & Struct; - readonly type: 'CreateCollection' | 'DestroyCollection' | 'ChangeCollectionIssuer' | 'LockCollection' | 'MintNft' | 'BurnNft' | 'Send' | 'AcceptNft' | 'RejectNft' | 'AcceptResource' | 'AcceptResourceRemoval' | 'SetProperty' | 'SetPriority' | 'AddBasicResource' | 'AddComposableResource' | 'AddSlotResource' | 'RemoveResource'; - } - - /** @name RmrkTraitsResourceResourceTypes (215) */ - export interface RmrkTraitsResourceResourceTypes extends Enum { - readonly isBasic: boolean; - readonly asBasic: RmrkTraitsResourceBasicResource; - readonly isComposable: boolean; - readonly asComposable: RmrkTraitsResourceComposableResource; - readonly isSlot: boolean; - readonly asSlot: RmrkTraitsResourceSlotResource; - readonly type: 'Basic' | 'Composable' | 'Slot'; - } - - /** @name RmrkTraitsResourceBasicResource (217) */ - export interface RmrkTraitsResourceBasicResource extends Struct { - readonly src: Option; - readonly metadata: Option; - readonly license: Option; - readonly thumb: Option; - } - - /** @name RmrkTraitsResourceComposableResource (219) */ - export interface RmrkTraitsResourceComposableResource extends Struct { - readonly parts: Vec; - readonly base: u32; - readonly src: Option; - readonly metadata: Option; - readonly license: Option; - readonly thumb: Option; + readonly type: 'ProposeSpend' | 'RejectProposal' | 'ApproveProposal' | 'Spend' | 'RemoveApproval'; } - /** @name RmrkTraitsResourceSlotResource (220) */ - export interface RmrkTraitsResourceSlotResource extends Struct { - readonly base: u32; - readonly src: Option; - readonly metadata: Option; - readonly slot: u32; - readonly license: Option; - readonly thumb: Option; - } + /** @name FrameSupportPalletId (197) */ + interface FrameSupportPalletId extends U8aFixed {} - /** @name RmrkTraitsNftAccountIdOrCollectionNftTuple (222) */ - export interface RmrkTraitsNftAccountIdOrCollectionNftTuple extends Enum { - readonly isAccountId: boolean; - readonly asAccountId: AccountId32; - readonly isCollectionAndNftTuple: boolean; - readonly asCollectionAndNftTuple: ITuple<[u32, u32]>; - readonly type: 'AccountId' | 'CollectionAndNftTuple'; + /** @name PalletTreasuryError (198) */ + interface PalletTreasuryError extends Enum { + readonly isInsufficientProposersBalance: boolean; + readonly isInvalidIndex: boolean; + readonly isTooManyApprovals: boolean; + readonly isInsufficientPermission: boolean; + readonly isProposalNotApproved: boolean; + readonly type: 'InsufficientProposersBalance' | 'InvalidIndex' | 'TooManyApprovals' | 'InsufficientPermission' | 'ProposalNotApproved'; } - /** @name PalletRmrkEquipCall (226) */ - export interface PalletRmrkEquipCall extends Enum { - readonly isCreateBase: boolean; - readonly asCreateBase: { - readonly baseType: Bytes; - readonly symbol: Bytes; - readonly parts: Vec; + /** @name PalletSudoCall (199) */ + interface PalletSudoCall extends Enum { + readonly isSudo: boolean; + readonly asSudo: { + readonly call: Call; } & Struct; - readonly isThemeAdd: boolean; - readonly asThemeAdd: { - readonly baseId: u32; - readonly theme: RmrkTraitsTheme; + readonly isSudoUncheckedWeight: boolean; + readonly asSudoUncheckedWeight: { + readonly call: Call; + readonly weight: Weight; } & Struct; - readonly isEquippable: boolean; - readonly asEquippable: { - readonly baseId: u32; - readonly slotId: u32; - readonly equippables: RmrkTraitsPartEquippableList; + readonly isSetKey: boolean; + readonly asSetKey: { + readonly new_: MultiAddress; } & Struct; - readonly type: 'CreateBase' | 'ThemeAdd' | 'Equippable'; - } - - /** @name RmrkTraitsPartPartType (229) */ - export interface RmrkTraitsPartPartType extends Enum { - readonly isFixedPart: boolean; - readonly asFixedPart: RmrkTraitsPartFixedPart; - readonly isSlotPart: boolean; - readonly asSlotPart: RmrkTraitsPartSlotPart; - readonly type: 'FixedPart' | 'SlotPart'; - } - - /** @name RmrkTraitsPartFixedPart (231) */ - export interface RmrkTraitsPartFixedPart extends Struct { - readonly id: u32; - readonly z: u32; - readonly src: Bytes; - } - - /** @name RmrkTraitsPartSlotPart (232) */ - export interface RmrkTraitsPartSlotPart extends Struct { - readonly id: u32; - readonly equippable: RmrkTraitsPartEquippableList; - readonly src: Bytes; - readonly z: u32; + readonly isSudoAs: boolean; + readonly asSudoAs: { + readonly who: MultiAddress; + readonly call: Call; + } & Struct; + readonly type: 'Sudo' | 'SudoUncheckedWeight' | 'SetKey' | 'SudoAs'; } - /** @name RmrkTraitsPartEquippableList (233) */ - export interface RmrkTraitsPartEquippableList extends Enum { - readonly isAll: boolean; - readonly isEmpty: boolean; - readonly isCustom: boolean; - readonly asCustom: Vec; - readonly type: 'All' | 'Empty' | 'Custom'; + /** @name OrmlVestingModuleCall (201) */ + interface OrmlVestingModuleCall extends Enum { + readonly isClaim: boolean; + readonly isVestedTransfer: boolean; + readonly asVestedTransfer: { + readonly dest: MultiAddress; + readonly schedule: OrmlVestingVestingSchedule; + } & Struct; + readonly isUpdateVestingSchedules: boolean; + readonly asUpdateVestingSchedules: { + readonly who: MultiAddress; + readonly vestingSchedules: Vec; + } & Struct; + readonly isClaimFor: boolean; + readonly asClaimFor: { + readonly dest: MultiAddress; + } & Struct; + readonly type: 'Claim' | 'VestedTransfer' | 'UpdateVestingSchedules' | 'ClaimFor'; } - /** @name RmrkTraitsTheme (235) */ - export interface RmrkTraitsTheme extends Struct { - readonly name: Bytes; - readonly properties: Vec; - readonly inherit: bool; + /** @name OrmlXtokensModuleCall (203) */ + interface OrmlXtokensModuleCall extends Enum { + readonly isTransfer: boolean; + readonly asTransfer: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiasset: boolean; + readonly asTransferMultiasset: { + readonly asset: XcmVersionedMultiAsset; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferWithFee: boolean; + readonly asTransferWithFee: { + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: u128; + readonly fee: u128; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiassetWithFee: boolean; + readonly asTransferMultiassetWithFee: { + readonly asset: XcmVersionedMultiAsset; + readonly fee: XcmVersionedMultiAsset; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMulticurrencies: boolean; + readonly asTransferMulticurrencies: { + readonly currencies: Vec>; + readonly feeItem: u32; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly isTransferMultiassets: boolean; + readonly asTransferMultiassets: { + readonly assets: XcmVersionedMultiAssets; + readonly feeItem: u32; + readonly dest: XcmVersionedMultiLocation; + readonly destWeight: u64; + } & Struct; + readonly type: 'Transfer' | 'TransferMultiasset' | 'TransferWithFee' | 'TransferMultiassetWithFee' | 'TransferMulticurrencies' | 'TransferMultiassets'; } - /** @name RmrkTraitsThemeThemeProperty (237) */ - export interface RmrkTraitsThemeThemeProperty extends Struct { - readonly key: Bytes; - readonly value: Bytes; + /** @name XcmVersionedMultiAsset (204) */ + interface XcmVersionedMultiAsset extends Enum { + readonly isV0: boolean; + readonly asV0: XcmV0MultiAsset; + readonly isV1: boolean; + readonly asV1: XcmV1MultiAsset; + readonly type: 'V0' | 'V1'; } - /** @name PalletEvmCall (239) */ - export interface PalletEvmCall extends Enum { - readonly isWithdraw: boolean; - readonly asWithdraw: { - readonly address: H160; - readonly value: u128; + /** @name OrmlTokensModuleCall (207) */ + interface OrmlTokensModuleCall extends Enum { + readonly isTransfer: boolean; + readonly asTransfer: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; } & Struct; - readonly isCall: boolean; - readonly asCall: { - readonly source: H160; - readonly target: H160; - readonly input: Bytes; - readonly value: U256; - readonly gasLimit: u64; - readonly maxFeePerGas: U256; - readonly maxPriorityFeePerGas: Option; - readonly nonce: Option; - readonly accessList: Vec]>>; + readonly isTransferAll: boolean; + readonly asTransferAll: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly keepAlive: bool; } & Struct; - readonly isCreate: boolean; - readonly asCreate: { - readonly source: H160; - readonly init: Bytes; - readonly value: U256; - readonly gasLimit: u64; - readonly maxFeePerGas: U256; - readonly maxPriorityFeePerGas: Option; - readonly nonce: Option; - readonly accessList: Vec]>>; + readonly isTransferKeepAlive: boolean; + readonly asTransferKeepAlive: { + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; } & Struct; - readonly isCreate2: boolean; - readonly asCreate2: { - readonly source: H160; - readonly init: Bytes; - readonly salt: H256; - readonly value: U256; - readonly gasLimit: u64; - readonly maxFeePerGas: U256; - readonly maxPriorityFeePerGas: Option; - readonly nonce: Option; - readonly accessList: Vec]>>; + readonly isForceTransfer: boolean; + readonly asForceTransfer: { + readonly source: MultiAddress; + readonly dest: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly amount: Compact; } & Struct; - readonly type: 'Withdraw' | 'Call' | 'Create' | 'Create2'; + readonly isSetBalance: boolean; + readonly asSetBalance: { + readonly who: MultiAddress; + readonly currencyId: PalletForeignAssetsAssetIds; + readonly newFree: Compact; + readonly newReserved: Compact; + } & Struct; + readonly type: 'Transfer' | 'TransferAll' | 'TransferKeepAlive' | 'ForceTransfer' | 'SetBalance'; } - /** @name PalletEthereumCall (245) */ - export interface PalletEthereumCall extends Enum { - readonly isTransact: boolean; - readonly asTransact: { - readonly transaction: EthereumTransactionTransactionV2; - } & Struct; - readonly type: 'Transact'; - } - - /** @name EthereumTransactionTransactionV2 (246) */ - export interface EthereumTransactionTransactionV2 extends Enum { - readonly isLegacy: boolean; - readonly asLegacy: EthereumTransactionLegacyTransaction; - readonly isEip2930: boolean; - readonly asEip2930: EthereumTransactionEip2930Transaction; - readonly isEip1559: boolean; - readonly asEip1559: EthereumTransactionEip1559Transaction; - readonly type: 'Legacy' | 'Eip2930' | 'Eip1559'; + /** @name CumulusPalletXcmpQueueCall (208) */ + interface CumulusPalletXcmpQueueCall extends Enum { + readonly isServiceOverweight: boolean; + readonly asServiceOverweight: { + readonly index: u64; + readonly weightLimit: Weight; + } & Struct; + readonly isSuspendXcmExecution: boolean; + readonly isResumeXcmExecution: boolean; + readonly isUpdateSuspendThreshold: boolean; + readonly asUpdateSuspendThreshold: { + readonly new_: u32; + } & Struct; + readonly isUpdateDropThreshold: boolean; + readonly asUpdateDropThreshold: { + readonly new_: u32; + } & Struct; + readonly isUpdateResumeThreshold: boolean; + readonly asUpdateResumeThreshold: { + readonly new_: u32; + } & Struct; + readonly isUpdateThresholdWeight: boolean; + readonly asUpdateThresholdWeight: { + readonly new_: Weight; + } & Struct; + readonly isUpdateWeightRestrictDecay: boolean; + readonly asUpdateWeightRestrictDecay: { + readonly new_: Weight; + } & Struct; + readonly isUpdateXcmpMaxIndividualWeight: boolean; + readonly asUpdateXcmpMaxIndividualWeight: { + readonly new_: Weight; + } & Struct; + readonly type: 'ServiceOverweight' | 'SuspendXcmExecution' | 'ResumeXcmExecution' | 'UpdateSuspendThreshold' | 'UpdateDropThreshold' | 'UpdateResumeThreshold' | 'UpdateThresholdWeight' | 'UpdateWeightRestrictDecay' | 'UpdateXcmpMaxIndividualWeight'; } - /** @name EthereumTransactionLegacyTransaction (247) */ - export interface EthereumTransactionLegacyTransaction extends Struct { - readonly nonce: U256; - readonly gasPrice: U256; - readonly gasLimit: U256; - readonly action: EthereumTransactionTransactionAction; - readonly value: U256; - readonly input: Bytes; - readonly signature: EthereumTransactionTransactionSignature; + /** @name PalletXcmCall (209) */ + interface PalletXcmCall extends Enum { + readonly isSend: boolean; + readonly asSend: { + readonly dest: XcmVersionedMultiLocation; + readonly message: XcmVersionedXcm; + } & Struct; + readonly isTeleportAssets: boolean; + readonly asTeleportAssets: { + readonly dest: XcmVersionedMultiLocation; + readonly beneficiary: XcmVersionedMultiLocation; + readonly assets: XcmVersionedMultiAssets; + readonly feeAssetItem: u32; + } & Struct; + readonly isReserveTransferAssets: boolean; + readonly asReserveTransferAssets: { + readonly dest: XcmVersionedMultiLocation; + readonly beneficiary: XcmVersionedMultiLocation; + readonly assets: XcmVersionedMultiAssets; + readonly feeAssetItem: u32; + } & Struct; + readonly isExecute: boolean; + readonly asExecute: { + readonly message: XcmVersionedXcm; + readonly maxWeight: Weight; + } & Struct; + readonly isForceXcmVersion: boolean; + readonly asForceXcmVersion: { + readonly location: XcmV1MultiLocation; + readonly xcmVersion: u32; + } & Struct; + readonly isForceDefaultXcmVersion: boolean; + readonly asForceDefaultXcmVersion: { + readonly maybeXcmVersion: Option; + } & Struct; + readonly isForceSubscribeVersionNotify: boolean; + readonly asForceSubscribeVersionNotify: { + readonly location: XcmVersionedMultiLocation; + } & Struct; + readonly isForceUnsubscribeVersionNotify: boolean; + readonly asForceUnsubscribeVersionNotify: { + readonly location: XcmVersionedMultiLocation; + } & Struct; + readonly isLimitedReserveTransferAssets: boolean; + readonly asLimitedReserveTransferAssets: { + readonly dest: XcmVersionedMultiLocation; + readonly beneficiary: XcmVersionedMultiLocation; + readonly assets: XcmVersionedMultiAssets; + readonly feeAssetItem: u32; + readonly weightLimit: XcmV2WeightLimit; + } & Struct; + readonly isLimitedTeleportAssets: boolean; + readonly asLimitedTeleportAssets: { + readonly dest: XcmVersionedMultiLocation; + readonly beneficiary: XcmVersionedMultiLocation; + readonly assets: XcmVersionedMultiAssets; + readonly feeAssetItem: u32; + readonly weightLimit: XcmV2WeightLimit; + } & Struct; + readonly type: 'Send' | 'TeleportAssets' | 'ReserveTransferAssets' | 'Execute' | 'ForceXcmVersion' | 'ForceDefaultXcmVersion' | 'ForceSubscribeVersionNotify' | 'ForceUnsubscribeVersionNotify' | 'LimitedReserveTransferAssets' | 'LimitedTeleportAssets'; } - /** @name EthereumTransactionTransactionAction (248) */ - export interface EthereumTransactionTransactionAction extends Enum { - readonly isCall: boolean; - readonly asCall: H160; - readonly isCreate: boolean; - readonly type: 'Call' | 'Create'; + /** @name XcmVersionedXcm (210) */ + interface XcmVersionedXcm extends Enum { + readonly isV0: boolean; + readonly asV0: XcmV0Xcm; + readonly isV1: boolean; + readonly asV1: XcmV1Xcm; + readonly isV2: boolean; + readonly asV2: XcmV2Xcm; + readonly type: 'V0' | 'V1' | 'V2'; } - /** @name EthereumTransactionTransactionSignature (249) */ - export interface EthereumTransactionTransactionSignature extends Struct { - readonly v: u64; - readonly r: H256; - readonly s: H256; + /** @name XcmV0Xcm (211) */ + interface XcmV0Xcm extends Enum { + readonly isWithdrawAsset: boolean; + readonly asWithdrawAsset: { + readonly assets: Vec; + readonly effects: Vec; + } & Struct; + readonly isReserveAssetDeposit: boolean; + readonly asReserveAssetDeposit: { + readonly assets: Vec; + readonly effects: Vec; + } & Struct; + readonly isTeleportAsset: boolean; + readonly asTeleportAsset: { + readonly assets: Vec; + readonly effects: Vec; + } & Struct; + readonly isQueryResponse: boolean; + readonly asQueryResponse: { + readonly queryId: Compact; + readonly response: XcmV0Response; + } & Struct; + readonly isTransferAsset: boolean; + readonly asTransferAsset: { + readonly assets: Vec; + readonly dest: XcmV0MultiLocation; + } & Struct; + readonly isTransferReserveAsset: boolean; + readonly asTransferReserveAsset: { + readonly assets: Vec; + readonly dest: XcmV0MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isTransact: boolean; + readonly asTransact: { + readonly originType: XcmV0OriginKind; + readonly requireWeightAtMost: u64; + readonly call: XcmDoubleEncoded; + } & Struct; + readonly isHrmpNewChannelOpenRequest: boolean; + readonly asHrmpNewChannelOpenRequest: { + readonly sender: Compact; + readonly maxMessageSize: Compact; + readonly maxCapacity: Compact; + } & Struct; + readonly isHrmpChannelAccepted: boolean; + readonly asHrmpChannelAccepted: { + readonly recipient: Compact; + } & Struct; + readonly isHrmpChannelClosing: boolean; + readonly asHrmpChannelClosing: { + readonly initiator: Compact; + readonly sender: Compact; + readonly recipient: Compact; + } & Struct; + readonly isRelayedFrom: boolean; + readonly asRelayedFrom: { + readonly who: XcmV0MultiLocation; + readonly message: XcmV0Xcm; + } & Struct; + readonly type: 'WithdrawAsset' | 'ReserveAssetDeposit' | 'TeleportAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'RelayedFrom'; } - /** @name EthereumTransactionEip2930Transaction (251) */ - export interface EthereumTransactionEip2930Transaction extends Struct { - readonly chainId: u64; - readonly nonce: U256; - readonly gasPrice: U256; - readonly gasLimit: U256; - readonly action: EthereumTransactionTransactionAction; - readonly value: U256; - readonly input: Bytes; - readonly accessList: Vec; - readonly oddYParity: bool; - readonly r: H256; - readonly s: H256; + /** @name XcmV0Order (213) */ + interface XcmV0Order extends Enum { + readonly isNull: boolean; + readonly isDepositAsset: boolean; + readonly asDepositAsset: { + readonly assets: Vec; + readonly dest: XcmV0MultiLocation; + } & Struct; + readonly isDepositReserveAsset: boolean; + readonly asDepositReserveAsset: { + readonly assets: Vec; + readonly dest: XcmV0MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isExchangeAsset: boolean; + readonly asExchangeAsset: { + readonly give: Vec; + readonly receive: Vec; + } & Struct; + readonly isInitiateReserveWithdraw: boolean; + readonly asInitiateReserveWithdraw: { + readonly assets: Vec; + readonly reserve: XcmV0MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isInitiateTeleport: boolean; + readonly asInitiateTeleport: { + readonly assets: Vec; + readonly dest: XcmV0MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isQueryHolding: boolean; + readonly asQueryHolding: { + readonly queryId: Compact; + readonly dest: XcmV0MultiLocation; + readonly assets: Vec; + } & Struct; + readonly isBuyExecution: boolean; + readonly asBuyExecution: { + readonly fees: XcmV0MultiAsset; + readonly weight: u64; + readonly debt: u64; + readonly haltOnError: bool; + readonly xcm: Vec; + } & Struct; + readonly type: 'Null' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution'; } - /** @name EthereumTransactionAccessListItem (253) */ - export interface EthereumTransactionAccessListItem extends Struct { - readonly address: H160; - readonly storageKeys: Vec; + /** @name XcmV0Response (215) */ + interface XcmV0Response extends Enum { + readonly isAssets: boolean; + readonly asAssets: Vec; + readonly type: 'Assets'; } - /** @name EthereumTransactionEip1559Transaction (254) */ - export interface EthereumTransactionEip1559Transaction extends Struct { - readonly chainId: u64; - readonly nonce: U256; - readonly maxPriorityFeePerGas: U256; - readonly maxFeePerGas: U256; - readonly gasLimit: U256; - readonly action: EthereumTransactionTransactionAction; - readonly value: U256; - readonly input: Bytes; - readonly accessList: Vec; - readonly oddYParity: bool; - readonly r: H256; - readonly s: H256; + /** @name XcmV1Xcm (216) */ + interface XcmV1Xcm extends Enum { + readonly isWithdrawAsset: boolean; + readonly asWithdrawAsset: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly effects: Vec; + } & Struct; + readonly isReserveAssetDeposited: boolean; + readonly asReserveAssetDeposited: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly effects: Vec; + } & Struct; + readonly isReceiveTeleportedAsset: boolean; + readonly asReceiveTeleportedAsset: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly effects: Vec; + } & Struct; + readonly isQueryResponse: boolean; + readonly asQueryResponse: { + readonly queryId: Compact; + readonly response: XcmV1Response; + } & Struct; + readonly isTransferAsset: boolean; + readonly asTransferAsset: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly beneficiary: XcmV1MultiLocation; + } & Struct; + readonly isTransferReserveAsset: boolean; + readonly asTransferReserveAsset: { + readonly assets: XcmV1MultiassetMultiAssets; + readonly dest: XcmV1MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isTransact: boolean; + readonly asTransact: { + readonly originType: XcmV0OriginKind; + readonly requireWeightAtMost: u64; + readonly call: XcmDoubleEncoded; + } & Struct; + readonly isHrmpNewChannelOpenRequest: boolean; + readonly asHrmpNewChannelOpenRequest: { + readonly sender: Compact; + readonly maxMessageSize: Compact; + readonly maxCapacity: Compact; + } & Struct; + readonly isHrmpChannelAccepted: boolean; + readonly asHrmpChannelAccepted: { + readonly recipient: Compact; + } & Struct; + readonly isHrmpChannelClosing: boolean; + readonly asHrmpChannelClosing: { + readonly initiator: Compact; + readonly sender: Compact; + readonly recipient: Compact; + } & Struct; + readonly isRelayedFrom: boolean; + readonly asRelayedFrom: { + readonly who: XcmV1MultilocationJunctions; + readonly message: XcmV1Xcm; + } & Struct; + readonly isSubscribeVersion: boolean; + readonly asSubscribeVersion: { + readonly queryId: Compact; + readonly maxResponseWeight: Compact; + } & Struct; + readonly isUnsubscribeVersion: boolean; + readonly type: 'WithdrawAsset' | 'ReserveAssetDeposited' | 'ReceiveTeleportedAsset' | 'QueryResponse' | 'TransferAsset' | 'TransferReserveAsset' | 'Transact' | 'HrmpNewChannelOpenRequest' | 'HrmpChannelAccepted' | 'HrmpChannelClosing' | 'RelayedFrom' | 'SubscribeVersion' | 'UnsubscribeVersion'; } - /** @name PalletEvmMigrationCall (255) */ - export interface PalletEvmMigrationCall extends Enum { - readonly isBegin: boolean; - readonly asBegin: { - readonly address: H160; + /** @name XcmV1Order (218) */ + interface XcmV1Order extends Enum { + readonly isNoop: boolean; + readonly isDepositAsset: boolean; + readonly asDepositAsset: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly maxAssets: u32; + readonly beneficiary: XcmV1MultiLocation; } & Struct; - readonly isSetData: boolean; - readonly asSetData: { - readonly address: H160; - readonly data: Vec>; + readonly isDepositReserveAsset: boolean; + readonly asDepositReserveAsset: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly maxAssets: u32; + readonly dest: XcmV1MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isExchangeAsset: boolean; + readonly asExchangeAsset: { + readonly give: XcmV1MultiassetMultiAssetFilter; + readonly receive: XcmV1MultiassetMultiAssets; + } & Struct; + readonly isInitiateReserveWithdraw: boolean; + readonly asInitiateReserveWithdraw: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly reserve: XcmV1MultiLocation; + readonly effects: Vec; + } & Struct; + readonly isInitiateTeleport: boolean; + readonly asInitiateTeleport: { + readonly assets: XcmV1MultiassetMultiAssetFilter; + readonly dest: XcmV1MultiLocation; + readonly effects: Vec; } & Struct; - readonly isFinish: boolean; - readonly asFinish: { - readonly address: H160; - readonly code: Bytes; + readonly isQueryHolding: boolean; + readonly asQueryHolding: { + readonly queryId: Compact; + readonly dest: XcmV1MultiLocation; + readonly assets: XcmV1MultiassetMultiAssetFilter; } & Struct; - readonly type: 'Begin' | 'SetData' | 'Finish'; + readonly isBuyExecution: boolean; + readonly asBuyExecution: { + readonly fees: XcmV1MultiAsset; + readonly weight: u64; + readonly debt: u64; + readonly haltOnError: bool; + readonly instructions: Vec; + } & Struct; + readonly type: 'Noop' | 'DepositAsset' | 'DepositReserveAsset' | 'ExchangeAsset' | 'InitiateReserveWithdraw' | 'InitiateTeleport' | 'QueryHolding' | 'BuyExecution'; } - /** @name PalletMaintenanceCall (258) */ - export interface PalletMaintenanceCall extends Enum { - readonly isEnable: boolean; - readonly isDisable: boolean; - readonly type: 'Enable' | 'Disable'; + /** @name XcmV1Response (220) */ + interface XcmV1Response extends Enum { + readonly isAssets: boolean; + readonly asAssets: XcmV1MultiassetMultiAssets; + readonly isVersion: boolean; + readonly asVersion: u32; + readonly type: 'Assets' | 'Version'; } - /** @name PalletSudoEvent (259) */ - export interface PalletSudoEvent extends Enum { - readonly isSudid: boolean; - readonly asSudid: { - readonly sudoResult: Result; - } & Struct; - readonly isKeyChanged: boolean; - readonly asKeyChanged: { - readonly oldSudoer: Option; - } & Struct; - readonly isSudoAsDone: boolean; - readonly asSudoAsDone: { - readonly sudoResult: Result; - } & Struct; - readonly type: 'Sudid' | 'KeyChanged' | 'SudoAsDone'; - } + /** @name CumulusPalletXcmCall (234) */ + type CumulusPalletXcmCall = Null; - /** @name SpRuntimeDispatchError (261) */ - export interface SpRuntimeDispatchError extends Enum { - readonly isOther: boolean; - readonly isCannotLookup: boolean; - readonly isBadOrigin: boolean; - readonly isModule: boolean; - readonly asModule: SpRuntimeModuleError; - readonly isConsumerRemaining: boolean; - readonly isNoProviders: boolean; - readonly isTooManyConsumers: boolean; - readonly isToken: boolean; - readonly asToken: SpRuntimeTokenError; - readonly isArithmetic: boolean; - readonly asArithmetic: SpRuntimeArithmeticError; - readonly isTransactional: boolean; - readonly asTransactional: SpRuntimeTransactionalError; - readonly type: 'Other' | 'CannotLookup' | 'BadOrigin' | 'Module' | 'ConsumerRemaining' | 'NoProviders' | 'TooManyConsumers' | 'Token' | 'Arithmetic' | 'Transactional'; + /** @name CumulusPalletDmpQueueCall (235) */ + interface CumulusPalletDmpQueueCall extends Enum { + readonly isServiceOverweight: boolean; + readonly asServiceOverweight: { + readonly index: u64; + readonly weightLimit: Weight; + } & Struct; + readonly type: 'ServiceOverweight'; } - /** @name SpRuntimeModuleError (262) */ - export interface SpRuntimeModuleError extends Struct { - readonly index: u8; - readonly error: U8aFixed; + /** @name PalletInflationCall (236) */ + interface PalletInflationCall extends Enum { + readonly isStartInflation: boolean; + readonly asStartInflation: { + readonly inflationStartRelayBlock: u32; + } & Struct; + readonly type: 'StartInflation'; } - /** @name SpRuntimeTokenError (263) */ - export interface SpRuntimeTokenError extends Enum { - readonly isNoFunds: boolean; - readonly isWouldDie: boolean; - readonly isBelowMinimum: boolean; - readonly isCannotCreate: boolean; - readonly isUnknownAsset: boolean; - readonly isFrozen: boolean; - readonly isUnsupported: boolean; - readonly type: 'NoFunds' | 'WouldDie' | 'BelowMinimum' | 'CannotCreate' | 'UnknownAsset' | 'Frozen' | 'Unsupported'; + /** @name PalletUniqueCall (237) */ + interface PalletUniqueCall extends Enum { + readonly isCreateCollection: boolean; + readonly asCreateCollection: { + readonly collectionName: Vec; + readonly collectionDescription: Vec; + readonly tokenPrefix: Bytes; + readonly mode: UpDataStructsCollectionMode; + } & Struct; + readonly isCreateCollectionEx: boolean; + readonly asCreateCollectionEx: { + readonly data: UpDataStructsCreateCollectionData; + } & Struct; + readonly isDestroyCollection: boolean; + readonly asDestroyCollection: { + readonly collectionId: u32; + } & Struct; + readonly isAddToAllowList: boolean; + readonly asAddToAllowList: { + readonly collectionId: u32; + readonly address: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isRemoveFromAllowList: boolean; + readonly asRemoveFromAllowList: { + readonly collectionId: u32; + readonly address: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isChangeCollectionOwner: boolean; + readonly asChangeCollectionOwner: { + readonly collectionId: u32; + readonly newOwner: AccountId32; + } & Struct; + readonly isAddCollectionAdmin: boolean; + readonly asAddCollectionAdmin: { + readonly collectionId: u32; + readonly newAdminId: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isRemoveCollectionAdmin: boolean; + readonly asRemoveCollectionAdmin: { + readonly collectionId: u32; + readonly accountId: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isSetCollectionSponsor: boolean; + readonly asSetCollectionSponsor: { + readonly collectionId: u32; + readonly newSponsor: AccountId32; + } & Struct; + readonly isConfirmSponsorship: boolean; + readonly asConfirmSponsorship: { + readonly collectionId: u32; + } & Struct; + readonly isRemoveCollectionSponsor: boolean; + readonly asRemoveCollectionSponsor: { + readonly collectionId: u32; + } & Struct; + readonly isCreateItem: boolean; + readonly asCreateItem: { + readonly collectionId: u32; + readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; + readonly data: UpDataStructsCreateItemData; + } & Struct; + readonly isCreateMultipleItems: boolean; + readonly asCreateMultipleItems: { + readonly collectionId: u32; + readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; + readonly itemsData: Vec; + } & Struct; + readonly isSetCollectionProperties: boolean; + readonly asSetCollectionProperties: { + readonly collectionId: u32; + readonly properties: Vec; + } & Struct; + readonly isDeleteCollectionProperties: boolean; + readonly asDeleteCollectionProperties: { + readonly collectionId: u32; + readonly propertyKeys: Vec; + } & Struct; + readonly isSetTokenProperties: boolean; + readonly asSetTokenProperties: { + readonly collectionId: u32; + readonly tokenId: u32; + readonly properties: Vec; + } & Struct; + readonly isDeleteTokenProperties: boolean; + readonly asDeleteTokenProperties: { + readonly collectionId: u32; + readonly tokenId: u32; + readonly propertyKeys: Vec; + } & Struct; + readonly isSetTokenPropertyPermissions: boolean; + readonly asSetTokenPropertyPermissions: { + readonly collectionId: u32; + readonly propertyPermissions: Vec; + } & Struct; + readonly isCreateMultipleItemsEx: boolean; + readonly asCreateMultipleItemsEx: { + readonly collectionId: u32; + readonly data: UpDataStructsCreateItemExData; + } & Struct; + readonly isSetTransfersEnabledFlag: boolean; + readonly asSetTransfersEnabledFlag: { + readonly collectionId: u32; + readonly value: bool; + } & Struct; + readonly isBurnItem: boolean; + readonly asBurnItem: { + readonly collectionId: u32; + readonly itemId: u32; + readonly value: u128; + } & Struct; + readonly isBurnFrom: boolean; + readonly asBurnFrom: { + readonly collectionId: u32; + readonly from: PalletEvmAccountBasicCrossAccountIdRepr; + readonly itemId: u32; + readonly value: u128; + } & Struct; + readonly isTransfer: boolean; + readonly asTransfer: { + readonly recipient: PalletEvmAccountBasicCrossAccountIdRepr; + readonly collectionId: u32; + readonly itemId: u32; + readonly value: u128; + } & Struct; + readonly isApprove: boolean; + readonly asApprove: { + readonly spender: PalletEvmAccountBasicCrossAccountIdRepr; + readonly collectionId: u32; + readonly itemId: u32; + readonly amount: u128; + } & Struct; + readonly isTransferFrom: boolean; + readonly asTransferFrom: { + readonly from: PalletEvmAccountBasicCrossAccountIdRepr; + readonly recipient: PalletEvmAccountBasicCrossAccountIdRepr; + readonly collectionId: u32; + readonly itemId: u32; + readonly value: u128; + } & Struct; + readonly isSetCollectionLimits: boolean; + readonly asSetCollectionLimits: { + readonly collectionId: u32; + readonly newLimit: UpDataStructsCollectionLimits; + } & Struct; + readonly isSetCollectionPermissions: boolean; + readonly asSetCollectionPermissions: { + readonly collectionId: u32; + readonly newPermission: UpDataStructsCollectionPermissions; + } & Struct; + readonly isRepartition: boolean; + readonly asRepartition: { + readonly collectionId: u32; + readonly tokenId: u32; + readonly amount: u128; + } & Struct; + readonly type: 'CreateCollection' | 'CreateCollectionEx' | 'DestroyCollection' | 'AddToAllowList' | 'RemoveFromAllowList' | 'ChangeCollectionOwner' | 'AddCollectionAdmin' | 'RemoveCollectionAdmin' | 'SetCollectionSponsor' | 'ConfirmSponsorship' | 'RemoveCollectionSponsor' | 'CreateItem' | 'CreateMultipleItems' | 'SetCollectionProperties' | 'DeleteCollectionProperties' | 'SetTokenProperties' | 'DeleteTokenProperties' | 'SetTokenPropertyPermissions' | 'CreateMultipleItemsEx' | 'SetTransfersEnabledFlag' | 'BurnItem' | 'BurnFrom' | 'Transfer' | 'Approve' | 'TransferFrom' | 'SetCollectionLimits' | 'SetCollectionPermissions' | 'Repartition'; } - /** @name SpRuntimeArithmeticError (264) */ - export interface SpRuntimeArithmeticError extends Enum { - readonly isUnderflow: boolean; - readonly isOverflow: boolean; - readonly isDivisionByZero: boolean; - readonly type: 'Underflow' | 'Overflow' | 'DivisionByZero'; + /** @name UpDataStructsCollectionMode (242) */ + interface UpDataStructsCollectionMode extends Enum { + readonly isNft: boolean; + readonly isFungible: boolean; + readonly asFungible: u8; + readonly isReFungible: boolean; + readonly type: 'Nft' | 'Fungible' | 'ReFungible'; } - /** @name SpRuntimeTransactionalError (265) */ - export interface SpRuntimeTransactionalError extends Enum { - readonly isLimitReached: boolean; - readonly isNoLayer: boolean; - readonly type: 'LimitReached' | 'NoLayer'; + /** @name UpDataStructsCreateCollectionData (243) */ + interface UpDataStructsCreateCollectionData extends Struct { + readonly mode: UpDataStructsCollectionMode; + readonly access: Option; + readonly name: Vec; + readonly description: Vec; + readonly tokenPrefix: Bytes; + readonly pendingSponsor: Option; + readonly limits: Option; + readonly permissions: Option; + readonly tokenPropertyPermissions: Vec; + readonly properties: Vec; } - /** @name PalletSudoError (266) */ - export interface PalletSudoError extends Enum { - readonly isRequireSudo: boolean; - readonly type: 'RequireSudo'; + /** @name UpDataStructsAccessMode (245) */ + interface UpDataStructsAccessMode extends Enum { + readonly isNormal: boolean; + readonly isAllowList: boolean; + readonly type: 'Normal' | 'AllowList'; } - /** @name FrameSystemAccountInfo (267) */ - export interface FrameSystemAccountInfo extends Struct { - readonly nonce: u32; - readonly consumers: u32; - readonly providers: u32; - readonly sufficients: u32; - readonly data: PalletBalancesAccountData; + /** @name UpDataStructsCollectionLimits (247) */ + interface UpDataStructsCollectionLimits extends Struct { + readonly accountTokenOwnershipLimit: Option; + readonly sponsoredDataSize: Option; + readonly sponsoredDataRateLimit: Option; + readonly tokenLimit: Option; + readonly sponsorTransferTimeout: Option; + readonly sponsorApproveTimeout: Option; + readonly ownerCanTransfer: Option; + readonly ownerCanDestroy: Option; + readonly transfersEnabled: Option; } - /** @name FrameSupportWeightsPerDispatchClassU64 (268) */ - export interface FrameSupportWeightsPerDispatchClassU64 extends Struct { - readonly normal: u64; - readonly operational: u64; - readonly mandatory: u64; + /** @name UpDataStructsSponsoringRateLimit (249) */ + interface UpDataStructsSponsoringRateLimit extends Enum { + readonly isSponsoringDisabled: boolean; + readonly isBlocks: boolean; + readonly asBlocks: u32; + readonly type: 'SponsoringDisabled' | 'Blocks'; } - /** @name SpRuntimeDigest (269) */ - export interface SpRuntimeDigest extends Struct { - readonly logs: Vec; + /** @name UpDataStructsCollectionPermissions (252) */ + interface UpDataStructsCollectionPermissions extends Struct { + readonly access: Option; + readonly mintMode: Option; + readonly nesting: Option; } - /** @name SpRuntimeDigestDigestItem (271) */ - export interface SpRuntimeDigestDigestItem extends Enum { - readonly isOther: boolean; - readonly asOther: Bytes; - readonly isConsensus: boolean; - readonly asConsensus: ITuple<[U8aFixed, Bytes]>; - readonly isSeal: boolean; - readonly asSeal: ITuple<[U8aFixed, Bytes]>; - readonly isPreRuntime: boolean; - readonly asPreRuntime: ITuple<[U8aFixed, Bytes]>; - readonly isRuntimeEnvironmentUpdated: boolean; - readonly type: 'Other' | 'Consensus' | 'Seal' | 'PreRuntime' | 'RuntimeEnvironmentUpdated'; + /** @name UpDataStructsNestingPermissions (254) */ + interface UpDataStructsNestingPermissions extends Struct { + readonly tokenOwner: bool; + readonly collectionAdmin: bool; + readonly restricted: Option; } - /** @name FrameSystemEventRecord (273) */ - export interface FrameSystemEventRecord extends Struct { - readonly phase: FrameSystemPhase; - readonly event: Event; - readonly topics: Vec; - } + /** @name UpDataStructsOwnerRestrictedSet (256) */ + interface UpDataStructsOwnerRestrictedSet extends BTreeSet {} - /** @name FrameSystemEvent (275) */ - export interface FrameSystemEvent extends Enum { - readonly isExtrinsicSuccess: boolean; - readonly asExtrinsicSuccess: { - readonly dispatchInfo: FrameSupportWeightsDispatchInfo; - } & Struct; - readonly isExtrinsicFailed: boolean; - readonly asExtrinsicFailed: { - readonly dispatchError: SpRuntimeDispatchError; - readonly dispatchInfo: FrameSupportWeightsDispatchInfo; - } & Struct; - readonly isCodeUpdated: boolean; - readonly isNewAccount: boolean; - readonly asNewAccount: { - readonly account: AccountId32; - } & Struct; - readonly isKilledAccount: boolean; - readonly asKilledAccount: { - readonly account: AccountId32; - } & Struct; - readonly isRemarked: boolean; - readonly asRemarked: { - readonly sender: AccountId32; - readonly hash_: H256; - } & Struct; - readonly type: 'ExtrinsicSuccess' | 'ExtrinsicFailed' | 'CodeUpdated' | 'NewAccount' | 'KilledAccount' | 'Remarked'; + /** @name UpDataStructsPropertyKeyPermission (261) */ + interface UpDataStructsPropertyKeyPermission extends Struct { + readonly key: Bytes; + readonly permission: UpDataStructsPropertyPermission; } - /** @name FrameSupportWeightsDispatchInfo (276) */ - export interface FrameSupportWeightsDispatchInfo extends Struct { - readonly weight: u64; - readonly class: FrameSupportWeightsDispatchClass; - readonly paysFee: FrameSupportWeightsPays; + /** @name UpDataStructsPropertyPermission (262) */ + interface UpDataStructsPropertyPermission extends Struct { + readonly mutable: bool; + readonly collectionAdmin: bool; + readonly tokenOwner: bool; } - /** @name FrameSupportWeightsDispatchClass (277) */ - export interface FrameSupportWeightsDispatchClass extends Enum { - readonly isNormal: boolean; - readonly isOperational: boolean; - readonly isMandatory: boolean; - readonly type: 'Normal' | 'Operational' | 'Mandatory'; + /** @name UpDataStructsProperty (265) */ + interface UpDataStructsProperty extends Struct { + readonly key: Bytes; + readonly value: Bytes; } - /** @name FrameSupportWeightsPays (278) */ - export interface FrameSupportWeightsPays extends Enum { - readonly isYes: boolean; - readonly isNo: boolean; - readonly type: 'Yes' | 'No'; + /** @name UpDataStructsCreateItemData (268) */ + interface UpDataStructsCreateItemData extends Enum { + readonly isNft: boolean; + readonly asNft: UpDataStructsCreateNftData; + readonly isFungible: boolean; + readonly asFungible: UpDataStructsCreateFungibleData; + readonly isReFungible: boolean; + readonly asReFungible: UpDataStructsCreateReFungibleData; + readonly type: 'Nft' | 'Fungible' | 'ReFungible'; } - /** @name OrmlVestingModuleEvent (279) */ - export interface OrmlVestingModuleEvent extends Enum { - readonly isVestingScheduleAdded: boolean; - readonly asVestingScheduleAdded: { - readonly from: AccountId32; - readonly to: AccountId32; - readonly vestingSchedule: OrmlVestingVestingSchedule; - } & Struct; - readonly isClaimed: boolean; - readonly asClaimed: { - readonly who: AccountId32; - readonly amount: u128; - } & Struct; - readonly isVestingSchedulesUpdated: boolean; - readonly asVestingSchedulesUpdated: { - readonly who: AccountId32; - } & Struct; - readonly type: 'VestingScheduleAdded' | 'Claimed' | 'VestingSchedulesUpdated'; + /** @name UpDataStructsCreateNftData (269) */ + interface UpDataStructsCreateNftData extends Struct { + readonly properties: Vec; } - /** @name CumulusPalletXcmpQueueEvent (280) */ - export interface CumulusPalletXcmpQueueEvent extends Enum { - readonly isSuccess: boolean; - readonly asSuccess: Option; - readonly isFail: boolean; - readonly asFail: ITuple<[Option, XcmV2TraitsError]>; - readonly isBadVersion: boolean; - readonly asBadVersion: Option; - readonly isBadFormat: boolean; - readonly asBadFormat: Option; - readonly isUpwardMessageSent: boolean; - readonly asUpwardMessageSent: Option; - readonly isXcmpMessageSent: boolean; - readonly asXcmpMessageSent: Option; - readonly isOverweightEnqueued: boolean; - readonly asOverweightEnqueued: ITuple<[u32, u32, u64, u64]>; - readonly isOverweightServiced: boolean; - readonly asOverweightServiced: ITuple<[u64, u64]>; - readonly type: 'Success' | 'Fail' | 'BadVersion' | 'BadFormat' | 'UpwardMessageSent' | 'XcmpMessageSent' | 'OverweightEnqueued' | 'OverweightServiced'; + /** @name UpDataStructsCreateFungibleData (270) */ + interface UpDataStructsCreateFungibleData extends Struct { + readonly value: u128; } - /** @name PalletXcmEvent (281) */ - export interface PalletXcmEvent extends Enum { - readonly isAttempted: boolean; - readonly asAttempted: XcmV2TraitsOutcome; - readonly isSent: boolean; - readonly asSent: ITuple<[XcmV1MultiLocation, XcmV1MultiLocation, XcmV2Xcm]>; - readonly isUnexpectedResponse: boolean; - readonly asUnexpectedResponse: ITuple<[XcmV1MultiLocation, u64]>; - readonly isResponseReady: boolean; - readonly asResponseReady: ITuple<[u64, XcmV2Response]>; - readonly isNotified: boolean; - readonly asNotified: ITuple<[u64, u8, u8]>; - readonly isNotifyOverweight: boolean; - readonly asNotifyOverweight: ITuple<[u64, u8, u8, u64, u64]>; - readonly isNotifyDispatchError: boolean; - readonly asNotifyDispatchError: ITuple<[u64, u8, u8]>; - readonly isNotifyDecodeFailed: boolean; - readonly asNotifyDecodeFailed: ITuple<[u64, u8, u8]>; - readonly isInvalidResponder: boolean; - readonly asInvalidResponder: ITuple<[XcmV1MultiLocation, u64, Option]>; - readonly isInvalidResponderVersion: boolean; - readonly asInvalidResponderVersion: ITuple<[XcmV1MultiLocation, u64]>; - readonly isResponseTaken: boolean; - readonly asResponseTaken: u64; - readonly isAssetsTrapped: boolean; - readonly asAssetsTrapped: ITuple<[H256, XcmV1MultiLocation, XcmVersionedMultiAssets]>; - readonly isVersionChangeNotified: boolean; - readonly asVersionChangeNotified: ITuple<[XcmV1MultiLocation, u32]>; - readonly isSupportedVersionChanged: boolean; - readonly asSupportedVersionChanged: ITuple<[XcmV1MultiLocation, u32]>; - readonly isNotifyTargetSendFail: boolean; - readonly asNotifyTargetSendFail: ITuple<[XcmV1MultiLocation, u64, XcmV2TraitsError]>; - readonly isNotifyTargetMigrationFail: boolean; - readonly asNotifyTargetMigrationFail: ITuple<[XcmVersionedMultiLocation, u64]>; - readonly type: 'Attempted' | 'Sent' | 'UnexpectedResponse' | 'ResponseReady' | 'Notified' | 'NotifyOverweight' | 'NotifyDispatchError' | 'NotifyDecodeFailed' | 'InvalidResponder' | 'InvalidResponderVersion' | 'ResponseTaken' | 'AssetsTrapped' | 'VersionChangeNotified' | 'SupportedVersionChanged' | 'NotifyTargetSendFail' | 'NotifyTargetMigrationFail'; + /** @name UpDataStructsCreateReFungibleData (271) */ + interface UpDataStructsCreateReFungibleData extends Struct { + readonly pieces: u128; + readonly properties: Vec; } - /** @name XcmV2TraitsOutcome (282) */ - export interface XcmV2TraitsOutcome extends Enum { - readonly isComplete: boolean; - readonly asComplete: u64; - readonly isIncomplete: boolean; - readonly asIncomplete: ITuple<[u64, XcmV2TraitsError]>; - readonly isError: boolean; - readonly asError: XcmV2TraitsError; - readonly type: 'Complete' | 'Incomplete' | 'Error'; + /** @name UpDataStructsCreateItemExData (274) */ + interface UpDataStructsCreateItemExData extends Enum { + readonly isNft: boolean; + readonly asNft: Vec; + readonly isFungible: boolean; + readonly asFungible: BTreeMap; + readonly isRefungibleMultipleItems: boolean; + readonly asRefungibleMultipleItems: Vec; + readonly isRefungibleMultipleOwners: boolean; + readonly asRefungibleMultipleOwners: UpDataStructsCreateRefungibleExMultipleOwners; + readonly type: 'Nft' | 'Fungible' | 'RefungibleMultipleItems' | 'RefungibleMultipleOwners'; } - /** @name CumulusPalletXcmEvent (284) */ - export interface CumulusPalletXcmEvent extends Enum { - readonly isInvalidFormat: boolean; - readonly asInvalidFormat: U8aFixed; - readonly isUnsupportedVersion: boolean; - readonly asUnsupportedVersion: U8aFixed; - readonly isExecutedDownward: boolean; - readonly asExecutedDownward: ITuple<[U8aFixed, XcmV2TraitsOutcome]>; - readonly type: 'InvalidFormat' | 'UnsupportedVersion' | 'ExecutedDownward'; + /** @name UpDataStructsCreateNftExData (276) */ + interface UpDataStructsCreateNftExData extends Struct { + readonly properties: Vec; + readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; } - /** @name CumulusPalletDmpQueueEvent (285) */ - export interface CumulusPalletDmpQueueEvent extends Enum { - readonly isInvalidFormat: boolean; - readonly asInvalidFormat: { - readonly messageId: U8aFixed; - } & Struct; - readonly isUnsupportedVersion: boolean; - readonly asUnsupportedVersion: { - readonly messageId: U8aFixed; - } & Struct; - readonly isExecutedDownward: boolean; - readonly asExecutedDownward: { - readonly messageId: U8aFixed; - readonly outcome: XcmV2TraitsOutcome; - } & Struct; - readonly isWeightExhausted: boolean; - readonly asWeightExhausted: { - readonly messageId: U8aFixed; - readonly remainingWeight: u64; - readonly requiredWeight: u64; - } & Struct; - readonly isOverweightEnqueued: boolean; - readonly asOverweightEnqueued: { - readonly messageId: U8aFixed; - readonly overweightIndex: u64; - readonly requiredWeight: u64; - } & Struct; - readonly isOverweightServiced: boolean; - readonly asOverweightServiced: { - readonly overweightIndex: u64; - readonly weightUsed: u64; - } & Struct; - readonly type: 'InvalidFormat' | 'UnsupportedVersion' | 'ExecutedDownward' | 'WeightExhausted' | 'OverweightEnqueued' | 'OverweightServiced'; + /** @name UpDataStructsCreateRefungibleExSingleOwner (283) */ + interface UpDataStructsCreateRefungibleExSingleOwner extends Struct { + readonly user: PalletEvmAccountBasicCrossAccountIdRepr; + readonly pieces: u128; + readonly properties: Vec; } - /** @name PalletUniqueRawEvent (286) */ - export interface PalletUniqueRawEvent extends Enum { - readonly isCollectionSponsorRemoved: boolean; - readonly asCollectionSponsorRemoved: u32; - readonly isCollectionAdminAdded: boolean; - readonly asCollectionAdminAdded: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; - readonly isCollectionOwnedChanged: boolean; - readonly asCollectionOwnedChanged: ITuple<[u32, AccountId32]>; - readonly isCollectionSponsorSet: boolean; - readonly asCollectionSponsorSet: ITuple<[u32, AccountId32]>; - readonly isSponsorshipConfirmed: boolean; - readonly asSponsorshipConfirmed: ITuple<[u32, AccountId32]>; - readonly isCollectionAdminRemoved: boolean; - readonly asCollectionAdminRemoved: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; - readonly isAllowListAddressRemoved: boolean; - readonly asAllowListAddressRemoved: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; - readonly isAllowListAddressAdded: boolean; - readonly asAllowListAddressAdded: ITuple<[u32, PalletEvmAccountBasicCrossAccountIdRepr]>; - readonly isCollectionLimitSet: boolean; - readonly asCollectionLimitSet: u32; - readonly isCollectionPermissionSet: boolean; - readonly asCollectionPermissionSet: u32; - readonly type: 'CollectionSponsorRemoved' | 'CollectionAdminAdded' | 'CollectionOwnedChanged' | 'CollectionSponsorSet' | 'SponsorshipConfirmed' | 'CollectionAdminRemoved' | 'AllowListAddressRemoved' | 'AllowListAddressAdded' | 'CollectionLimitSet' | 'CollectionPermissionSet'; + /** @name UpDataStructsCreateRefungibleExMultipleOwners (285) */ + interface UpDataStructsCreateRefungibleExMultipleOwners extends Struct { + readonly users: BTreeMap; + readonly properties: Vec; } - /** @name PalletUniqueSchedulerEvent (287) */ - export interface PalletUniqueSchedulerEvent extends Enum { - readonly isScheduled: boolean; - readonly asScheduled: { - readonly when: u32; - readonly index: u32; - } & Struct; - readonly isCanceled: boolean; - readonly asCanceled: { + /** @name PalletUniqueSchedulerCall (286) */ + interface PalletUniqueSchedulerCall extends Enum { + readonly isScheduleNamed: boolean; + readonly asScheduleNamed: { + readonly id: U8aFixed; readonly when: u32; - readonly index: u32; + readonly maybePeriodic: Option>; + readonly priority: Option; + readonly call: FrameSupportScheduleMaybeHashed; } & Struct; - readonly isDispatched: boolean; - readonly asDispatched: { - readonly task: ITuple<[u32, u32]>; - readonly id: Option; - readonly result: Result; + readonly isCancelNamed: boolean; + readonly asCancelNamed: { + readonly id: U8aFixed; } & Struct; - readonly isCallLookupFailed: boolean; - readonly asCallLookupFailed: { - readonly task: ITuple<[u32, u32]>; - readonly id: Option; - readonly error: FrameSupportScheduleLookupError; + readonly isScheduleNamedAfter: boolean; + readonly asScheduleNamedAfter: { + readonly id: U8aFixed; + readonly after: u32; + readonly maybePeriodic: Option>; + readonly priority: Option; + readonly call: FrameSupportScheduleMaybeHashed; } & Struct; - readonly type: 'Scheduled' | 'Canceled' | 'Dispatched' | 'CallLookupFailed'; - } - - /** @name FrameSupportScheduleLookupError (289) */ - export interface FrameSupportScheduleLookupError extends Enum { - readonly isUnknown: boolean; - readonly isBadFormat: boolean; - readonly type: 'Unknown' | 'BadFormat'; + readonly isChangeNamedPriority: boolean; + readonly asChangeNamedPriority: { + readonly id: U8aFixed; + readonly priority: u8; + } & Struct; + readonly type: 'ScheduleNamed' | 'CancelNamed' | 'ScheduleNamedAfter' | 'ChangeNamedPriority'; } - /** @name PalletCommonEvent (290) */ - export interface PalletCommonEvent extends Enum { - readonly isCollectionCreated: boolean; - readonly asCollectionCreated: ITuple<[u32, u8, AccountId32]>; - readonly isCollectionDestroyed: boolean; - readonly asCollectionDestroyed: u32; - readonly isItemCreated: boolean; - readonly asItemCreated: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; - readonly isItemDestroyed: boolean; - readonly asItemDestroyed: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; - readonly isTransfer: boolean; - readonly asTransfer: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; - readonly isApproved: boolean; - readonly asApproved: ITuple<[u32, u32, PalletEvmAccountBasicCrossAccountIdRepr, PalletEvmAccountBasicCrossAccountIdRepr, u128]>; - readonly isCollectionPropertySet: boolean; - readonly asCollectionPropertySet: ITuple<[u32, Bytes]>; - readonly isCollectionPropertyDeleted: boolean; - readonly asCollectionPropertyDeleted: ITuple<[u32, Bytes]>; - readonly isTokenPropertySet: boolean; - readonly asTokenPropertySet: ITuple<[u32, u32, Bytes]>; - readonly isTokenPropertyDeleted: boolean; - readonly asTokenPropertyDeleted: ITuple<[u32, u32, Bytes]>; - readonly isPropertyPermissionSet: boolean; - readonly asPropertyPermissionSet: ITuple<[u32, Bytes]>; - readonly type: 'CollectionCreated' | 'CollectionDestroyed' | 'ItemCreated' | 'ItemDestroyed' | 'Transfer' | 'Approved' | 'CollectionPropertySet' | 'CollectionPropertyDeleted' | 'TokenPropertySet' | 'TokenPropertyDeleted' | 'PropertyPermissionSet'; + /** @name FrameSupportScheduleMaybeHashed (289) */ + interface FrameSupportScheduleMaybeHashed extends Enum { + readonly isValue: boolean; + readonly asValue: Call; + readonly isHash: boolean; + readonly asHash: H256; + readonly type: 'Value' | 'Hash'; } - /** @name PalletStructureEvent (291) */ - export interface PalletStructureEvent extends Enum { - readonly isExecuted: boolean; - readonly asExecuted: Result; - readonly type: 'Executed'; + /** @name PalletConfigurationCall (290) */ + interface PalletConfigurationCall extends Enum { + readonly isSetWeightToFeeCoefficientOverride: boolean; + readonly asSetWeightToFeeCoefficientOverride: { + readonly coeff: Option; + } & Struct; + readonly isSetMinGasPriceOverride: boolean; + readonly asSetMinGasPriceOverride: { + readonly coeff: Option; + } & Struct; + readonly type: 'SetWeightToFeeCoefficientOverride' | 'SetMinGasPriceOverride'; } - /** @name PalletRmrkCoreEvent (292) */ - export interface PalletRmrkCoreEvent extends Enum { - readonly isCollectionCreated: boolean; - readonly asCollectionCreated: { - readonly issuer: AccountId32; + /** @name PalletTemplateTransactionPaymentCall (292) */ + type PalletTemplateTransactionPaymentCall = Null; + + /** @name PalletStructureCall (293) */ + type PalletStructureCall = Null; + + /** @name PalletRmrkCoreCall (294) */ + interface PalletRmrkCoreCall extends Enum { + readonly isCreateCollection: boolean; + readonly asCreateCollection: { + readonly metadata: Bytes; + readonly max: Option; + readonly symbol: Bytes; + } & Struct; + readonly isDestroyCollection: boolean; + readonly asDestroyCollection: { readonly collectionId: u32; } & Struct; - readonly isCollectionDestroyed: boolean; - readonly asCollectionDestroyed: { - readonly issuer: AccountId32; + readonly isChangeCollectionIssuer: boolean; + readonly asChangeCollectionIssuer: { readonly collectionId: u32; + readonly newIssuer: MultiAddress; } & Struct; - readonly isIssuerChanged: boolean; - readonly asIssuerChanged: { - readonly oldIssuer: AccountId32; - readonly newIssuer: AccountId32; + readonly isLockCollection: boolean; + readonly asLockCollection: { readonly collectionId: u32; } & Struct; - readonly isCollectionLocked: boolean; - readonly asCollectionLocked: { - readonly issuer: AccountId32; + readonly isMintNft: boolean; + readonly asMintNft: { + readonly owner: Option; readonly collectionId: u32; + readonly recipient: Option; + readonly royaltyAmount: Option; + readonly metadata: Bytes; + readonly transferable: bool; + readonly resources: Option>; } & Struct; - readonly isNftMinted: boolean; - readonly asNftMinted: { - readonly owner: AccountId32; + readonly isBurnNft: boolean; + readonly asBurnNft: { readonly collectionId: u32; readonly nftId: u32; + readonly maxBurns: u32; } & Struct; - readonly isNftBurned: boolean; - readonly asNftBurned: { - readonly owner: AccountId32; - readonly nftId: u32; + readonly isSend: boolean; + readonly asSend: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; + readonly newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple; } & Struct; - readonly isNftSent: boolean; - readonly asNftSent: { - readonly sender: AccountId32; - readonly recipient: RmrkTraitsNftAccountIdOrCollectionNftTuple; - readonly collectionId: u32; - readonly nftId: u32; - readonly approvalRequired: bool; + readonly isAcceptNft: boolean; + readonly asAcceptNft: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; + readonly newOwner: RmrkTraitsNftAccountIdOrCollectionNftTuple; } & Struct; - readonly isNftAccepted: boolean; - readonly asNftAccepted: { - readonly sender: AccountId32; - readonly recipient: RmrkTraitsNftAccountIdOrCollectionNftTuple; - readonly collectionId: u32; - readonly nftId: u32; + readonly isRejectNft: boolean; + readonly asRejectNft: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; } & Struct; - readonly isNftRejected: boolean; - readonly asNftRejected: { - readonly sender: AccountId32; - readonly collectionId: u32; - readonly nftId: u32; + readonly isAcceptResource: boolean; + readonly asAcceptResource: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; + readonly resourceId: u32; } & Struct; - readonly isPropertySet: boolean; - readonly asPropertySet: { - readonly collectionId: u32; + readonly isAcceptResourceRemoval: boolean; + readonly asAcceptResourceRemoval: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; + readonly resourceId: u32; + } & Struct; + readonly isSetProperty: boolean; + readonly asSetProperty: { + readonly rmrkCollectionId: Compact; readonly maybeNftId: Option; readonly key: Bytes; readonly value: Bytes; } & Struct; - readonly isResourceAdded: boolean; - readonly asResourceAdded: { - readonly nftId: u32; - readonly resourceId: u32; + readonly isSetPriority: boolean; + readonly asSetPriority: { + readonly rmrkCollectionId: u32; + readonly rmrkNftId: u32; + readonly priorities: Vec; } & Struct; - readonly isResourceRemoval: boolean; - readonly asResourceRemoval: { + readonly isAddBasicResource: boolean; + readonly asAddBasicResource: { + readonly rmrkCollectionId: u32; readonly nftId: u32; - readonly resourceId: u32; + readonly resource: RmrkTraitsResourceBasicResource; } & Struct; - readonly isResourceAccepted: boolean; - readonly asResourceAccepted: { + readonly isAddComposableResource: boolean; + readonly asAddComposableResource: { + readonly rmrkCollectionId: u32; readonly nftId: u32; - readonly resourceId: u32; + readonly resource: RmrkTraitsResourceComposableResource; } & Struct; - readonly isResourceRemovalAccepted: boolean; - readonly asResourceRemovalAccepted: { + readonly isAddSlotResource: boolean; + readonly asAddSlotResource: { + readonly rmrkCollectionId: u32; readonly nftId: u32; - readonly resourceId: u32; + readonly resource: RmrkTraitsResourceSlotResource; } & Struct; - readonly isPrioritySet: boolean; - readonly asPrioritySet: { - readonly collectionId: u32; + readonly isRemoveResource: boolean; + readonly asRemoveResource: { + readonly rmrkCollectionId: u32; readonly nftId: u32; + readonly resourceId: u32; } & Struct; - readonly type: 'CollectionCreated' | 'CollectionDestroyed' | 'IssuerChanged' | 'CollectionLocked' | 'NftMinted' | 'NftBurned' | 'NftSent' | 'NftAccepted' | 'NftRejected' | 'PropertySet' | 'ResourceAdded' | 'ResourceRemoval' | 'ResourceAccepted' | 'ResourceRemovalAccepted' | 'PrioritySet'; + readonly type: 'CreateCollection' | 'DestroyCollection' | 'ChangeCollectionIssuer' | 'LockCollection' | 'MintNft' | 'BurnNft' | 'Send' | 'AcceptNft' | 'RejectNft' | 'AcceptResource' | 'AcceptResourceRemoval' | 'SetProperty' | 'SetPriority' | 'AddBasicResource' | 'AddComposableResource' | 'AddSlotResource' | 'RemoveResource'; } - /** @name PalletRmrkEquipEvent (293) */ - export interface PalletRmrkEquipEvent extends Enum { - readonly isBaseCreated: boolean; - readonly asBaseCreated: { - readonly issuer: AccountId32; + /** @name RmrkTraitsResourceResourceTypes (300) */ + interface RmrkTraitsResourceResourceTypes extends Enum { + readonly isBasic: boolean; + readonly asBasic: RmrkTraitsResourceBasicResource; + readonly isComposable: boolean; + readonly asComposable: RmrkTraitsResourceComposableResource; + readonly isSlot: boolean; + readonly asSlot: RmrkTraitsResourceSlotResource; + readonly type: 'Basic' | 'Composable' | 'Slot'; + } + + /** @name RmrkTraitsResourceBasicResource (302) */ + interface RmrkTraitsResourceBasicResource extends Struct { + readonly src: Option; + readonly metadata: Option; + readonly license: Option; + readonly thumb: Option; + } + + /** @name RmrkTraitsResourceComposableResource (304) */ + interface RmrkTraitsResourceComposableResource extends Struct { + readonly parts: Vec; + readonly base: u32; + readonly src: Option; + readonly metadata: Option; + readonly license: Option; + readonly thumb: Option; + } + + /** @name RmrkTraitsResourceSlotResource (305) */ + interface RmrkTraitsResourceSlotResource extends Struct { + readonly base: u32; + readonly src: Option; + readonly metadata: Option; + readonly slot: u32; + readonly license: Option; + readonly thumb: Option; + } + + /** @name PalletRmrkEquipCall (308) */ + interface PalletRmrkEquipCall extends Enum { + readonly isCreateBase: boolean; + readonly asCreateBase: { + readonly baseType: Bytes; + readonly symbol: Bytes; + readonly parts: Vec; + } & Struct; + readonly isThemeAdd: boolean; + readonly asThemeAdd: { readonly baseId: u32; + readonly theme: RmrkTraitsTheme; } & Struct; - readonly isEquippablesUpdated: boolean; - readonly asEquippablesUpdated: { + readonly isEquippable: boolean; + readonly asEquippable: { readonly baseId: u32; readonly slotId: u32; + readonly equippables: RmrkTraitsPartEquippableList; } & Struct; - readonly type: 'BaseCreated' | 'EquippablesUpdated'; + readonly type: 'CreateBase' | 'ThemeAdd' | 'Equippable'; } - /** @name PalletEvmEvent (294) */ - export interface PalletEvmEvent extends Enum { - readonly isLog: boolean; - readonly asLog: EthereumLog; - readonly isCreated: boolean; - readonly asCreated: H160; - readonly isCreatedFailed: boolean; - readonly asCreatedFailed: H160; - readonly isExecuted: boolean; - readonly asExecuted: H160; - readonly isExecutedFailed: boolean; - readonly asExecutedFailed: H160; - readonly isBalanceDeposit: boolean; - readonly asBalanceDeposit: ITuple<[AccountId32, H160, U256]>; - readonly isBalanceWithdraw: boolean; - readonly asBalanceWithdraw: ITuple<[AccountId32, H160, U256]>; - readonly type: 'Log' | 'Created' | 'CreatedFailed' | 'Executed' | 'ExecutedFailed' | 'BalanceDeposit' | 'BalanceWithdraw'; + /** @name RmrkTraitsPartPartType (311) */ + interface RmrkTraitsPartPartType extends Enum { + readonly isFixedPart: boolean; + readonly asFixedPart: RmrkTraitsPartFixedPart; + readonly isSlotPart: boolean; + readonly asSlotPart: RmrkTraitsPartSlotPart; + readonly type: 'FixedPart' | 'SlotPart'; } - /** @name EthereumLog (295) */ - export interface EthereumLog extends Struct { - readonly address: H160; - readonly topics: Vec; - readonly data: Bytes; + /** @name RmrkTraitsPartFixedPart (313) */ + interface RmrkTraitsPartFixedPart extends Struct { + readonly id: u32; + readonly z: u32; + readonly src: Bytes; + } + + /** @name RmrkTraitsPartSlotPart (314) */ + interface RmrkTraitsPartSlotPart extends Struct { + readonly id: u32; + readonly equippable: RmrkTraitsPartEquippableList; + readonly src: Bytes; + readonly z: u32; + } + + /** @name RmrkTraitsPartEquippableList (315) */ + interface RmrkTraitsPartEquippableList extends Enum { + readonly isAll: boolean; + readonly isEmpty: boolean; + readonly isCustom: boolean; + readonly asCustom: Vec; + readonly type: 'All' | 'Empty' | 'Custom'; } - /** @name PalletEthereumEvent (296) */ - export interface PalletEthereumEvent extends Enum { - readonly isExecuted: boolean; - readonly asExecuted: ITuple<[H160, H160, H256, EvmCoreErrorExitReason]>; - readonly type: 'Executed'; + /** @name RmrkTraitsTheme (317) */ + interface RmrkTraitsTheme extends Struct { + readonly name: Bytes; + readonly properties: Vec; + readonly inherit: bool; } - /** @name EvmCoreErrorExitReason (297) */ - export interface EvmCoreErrorExitReason extends Enum { - readonly isSucceed: boolean; - readonly asSucceed: EvmCoreErrorExitSucceed; - readonly isError: boolean; - readonly asError: EvmCoreErrorExitError; - readonly isRevert: boolean; - readonly asRevert: EvmCoreErrorExitRevert; - readonly isFatal: boolean; - readonly asFatal: EvmCoreErrorExitFatal; - readonly type: 'Succeed' | 'Error' | 'Revert' | 'Fatal'; + /** @name RmrkTraitsThemeThemeProperty (319) */ + interface RmrkTraitsThemeThemeProperty extends Struct { + readonly key: Bytes; + readonly value: Bytes; } - /** @name EvmCoreErrorExitSucceed (298) */ - export interface EvmCoreErrorExitSucceed extends Enum { - readonly isStopped: boolean; - readonly isReturned: boolean; - readonly isSuicided: boolean; - readonly type: 'Stopped' | 'Returned' | 'Suicided'; + /** @name PalletAppPromotionCall (321) */ + interface PalletAppPromotionCall extends Enum { + readonly isSetAdminAddress: boolean; + readonly asSetAdminAddress: { + readonly admin: PalletEvmAccountBasicCrossAccountIdRepr; + } & Struct; + readonly isStake: boolean; + readonly asStake: { + readonly amount: u128; + } & Struct; + readonly isUnstake: boolean; + readonly isSponsorCollection: boolean; + readonly asSponsorCollection: { + readonly collectionId: u32; + } & Struct; + readonly isStopSponsoringCollection: boolean; + readonly asStopSponsoringCollection: { + readonly collectionId: u32; + } & Struct; + readonly isSponsorContract: boolean; + readonly asSponsorContract: { + readonly contractId: H160; + } & Struct; + readonly isStopSponsoringContract: boolean; + readonly asStopSponsoringContract: { + readonly contractId: H160; + } & Struct; + readonly isPayoutStakers: boolean; + readonly asPayoutStakers: { + readonly stakersNumber: Option; + } & Struct; + readonly type: 'SetAdminAddress' | 'Stake' | 'Unstake' | 'SponsorCollection' | 'StopSponsoringCollection' | 'SponsorContract' | 'StopSponsoringContract' | 'PayoutStakers'; } - /** @name EvmCoreErrorExitError (299) */ - export interface EvmCoreErrorExitError extends Enum { - readonly isStackUnderflow: boolean; - readonly isStackOverflow: boolean; - readonly isInvalidJump: boolean; - readonly isInvalidRange: boolean; - readonly isDesignatedInvalid: boolean; - readonly isCallTooDeep: boolean; - readonly isCreateCollision: boolean; - readonly isCreateContractLimit: boolean; - readonly isOutOfOffset: boolean; - readonly isOutOfGas: boolean; - readonly isOutOfFund: boolean; - readonly isPcUnderflow: boolean; - readonly isCreateEmpty: boolean; - readonly isOther: boolean; - readonly asOther: Text; - readonly isInvalidCode: boolean; - readonly type: 'StackUnderflow' | 'StackOverflow' | 'InvalidJump' | 'InvalidRange' | 'DesignatedInvalid' | 'CallTooDeep' | 'CreateCollision' | 'CreateContractLimit' | 'OutOfOffset' | 'OutOfGas' | 'OutOfFund' | 'PcUnderflow' | 'CreateEmpty' | 'Other' | 'InvalidCode'; + /** @name PalletForeignAssetsModuleCall (322) */ + interface PalletForeignAssetsModuleCall extends Enum { + readonly isRegisterForeignAsset: boolean; + readonly asRegisterForeignAsset: { + readonly owner: AccountId32; + readonly location: XcmVersionedMultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly isUpdateForeignAsset: boolean; + readonly asUpdateForeignAsset: { + readonly foreignAssetId: u32; + readonly location: XcmVersionedMultiLocation; + readonly metadata: PalletForeignAssetsModuleAssetMetadata; + } & Struct; + readonly type: 'RegisterForeignAsset' | 'UpdateForeignAsset'; } - /** @name EvmCoreErrorExitRevert (302) */ - export interface EvmCoreErrorExitRevert extends Enum { - readonly isReverted: boolean; - readonly type: 'Reverted'; + /** @name PalletEvmCall (323) */ + interface PalletEvmCall extends Enum { + readonly isWithdraw: boolean; + readonly asWithdraw: { + readonly address: H160; + readonly value: u128; + } & Struct; + readonly isCall: boolean; + readonly asCall: { + readonly source: H160; + readonly target: H160; + readonly input: Bytes; + readonly value: U256; + readonly gasLimit: u64; + readonly maxFeePerGas: U256; + readonly maxPriorityFeePerGas: Option; + readonly nonce: Option; + readonly accessList: Vec]>>; + } & Struct; + readonly isCreate: boolean; + readonly asCreate: { + readonly source: H160; + readonly init: Bytes; + readonly value: U256; + readonly gasLimit: u64; + readonly maxFeePerGas: U256; + readonly maxPriorityFeePerGas: Option; + readonly nonce: Option; + readonly accessList: Vec]>>; + } & Struct; + readonly isCreate2: boolean; + readonly asCreate2: { + readonly source: H160; + readonly init: Bytes; + readonly salt: H256; + readonly value: U256; + readonly gasLimit: u64; + readonly maxFeePerGas: U256; + readonly maxPriorityFeePerGas: Option; + readonly nonce: Option; + readonly accessList: Vec]>>; + } & Struct; + readonly type: 'Withdraw' | 'Call' | 'Create' | 'Create2'; } - /** @name EvmCoreErrorExitFatal (303) */ - export interface EvmCoreErrorExitFatal extends Enum { - readonly isNotSupported: boolean; - readonly isUnhandledInterrupt: boolean; - readonly isCallErrorAsFatal: boolean; - readonly asCallErrorAsFatal: EvmCoreErrorExitError; - readonly isOther: boolean; - readonly asOther: Text; - readonly type: 'NotSupported' | 'UnhandledInterrupt' | 'CallErrorAsFatal' | 'Other'; + /** @name PalletEthereumCall (327) */ + interface PalletEthereumCall extends Enum { + readonly isTransact: boolean; + readonly asTransact: { + readonly transaction: EthereumTransactionTransactionV2; + } & Struct; + readonly type: 'Transact'; } - /** @name PalletMaintenanceEvent (304) */ - export interface PalletMaintenanceEvent extends Enum { - readonly isMaintenanceEnabled: boolean; - readonly isMaintenanceDisabled: boolean; - readonly type: 'MaintenanceEnabled' | 'MaintenanceDisabled'; + /** @name EthereumTransactionTransactionV2 (328) */ + interface EthereumTransactionTransactionV2 extends Enum { + readonly isLegacy: boolean; + readonly asLegacy: EthereumTransactionLegacyTransaction; + readonly isEip2930: boolean; + readonly asEip2930: EthereumTransactionEip2930Transaction; + readonly isEip1559: boolean; + readonly asEip1559: EthereumTransactionEip1559Transaction; + readonly type: 'Legacy' | 'Eip2930' | 'Eip1559'; } - /** @name FrameSystemPhase (305) */ - export interface FrameSystemPhase extends Enum { - readonly isApplyExtrinsic: boolean; - readonly asApplyExtrinsic: u32; - readonly isFinalization: boolean; - readonly isInitialization: boolean; - readonly type: 'ApplyExtrinsic' | 'Finalization' | 'Initialization'; + /** @name EthereumTransactionLegacyTransaction (329) */ + interface EthereumTransactionLegacyTransaction extends Struct { + readonly nonce: U256; + readonly gasPrice: U256; + readonly gasLimit: U256; + readonly action: EthereumTransactionTransactionAction; + readonly value: U256; + readonly input: Bytes; + readonly signature: EthereumTransactionTransactionSignature; } - /** @name FrameSystemLastRuntimeUpgradeInfo (307) */ - export interface FrameSystemLastRuntimeUpgradeInfo extends Struct { - readonly specVersion: Compact; - readonly specName: Text; + /** @name EthereumTransactionTransactionAction (330) */ + interface EthereumTransactionTransactionAction extends Enum { + readonly isCall: boolean; + readonly asCall: H160; + readonly isCreate: boolean; + readonly type: 'Call' | 'Create'; } - /** @name FrameSystemLimitsBlockWeights (308) */ - export interface FrameSystemLimitsBlockWeights extends Struct { - readonly baseBlock: u64; - readonly maxBlock: u64; - readonly perClass: FrameSupportWeightsPerDispatchClassWeightsPerClass; + /** @name EthereumTransactionTransactionSignature (331) */ + interface EthereumTransactionTransactionSignature extends Struct { + readonly v: u64; + readonly r: H256; + readonly s: H256; } - /** @name FrameSupportWeightsPerDispatchClassWeightsPerClass (309) */ - export interface FrameSupportWeightsPerDispatchClassWeightsPerClass extends Struct { - readonly normal: FrameSystemLimitsWeightsPerClass; - readonly operational: FrameSystemLimitsWeightsPerClass; - readonly mandatory: FrameSystemLimitsWeightsPerClass; + /** @name EthereumTransactionEip2930Transaction (333) */ + interface EthereumTransactionEip2930Transaction extends Struct { + readonly chainId: u64; + readonly nonce: U256; + readonly gasPrice: U256; + readonly gasLimit: U256; + readonly action: EthereumTransactionTransactionAction; + readonly value: U256; + readonly input: Bytes; + readonly accessList: Vec; + readonly oddYParity: bool; + readonly r: H256; + readonly s: H256; } - /** @name FrameSystemLimitsWeightsPerClass (310) */ - export interface FrameSystemLimitsWeightsPerClass extends Struct { - readonly baseExtrinsic: u64; - readonly maxExtrinsic: Option; - readonly maxTotal: Option; - readonly reserved: Option; + /** @name EthereumTransactionAccessListItem (335) */ + interface EthereumTransactionAccessListItem extends Struct { + readonly address: H160; + readonly storageKeys: Vec; } - /** @name FrameSystemLimitsBlockLength (312) */ - export interface FrameSystemLimitsBlockLength extends Struct { - readonly max: FrameSupportWeightsPerDispatchClassU32; + /** @name EthereumTransactionEip1559Transaction (336) */ + interface EthereumTransactionEip1559Transaction extends Struct { + readonly chainId: u64; + readonly nonce: U256; + readonly maxPriorityFeePerGas: U256; + readonly maxFeePerGas: U256; + readonly gasLimit: U256; + readonly action: EthereumTransactionTransactionAction; + readonly value: U256; + readonly input: Bytes; + readonly accessList: Vec; + readonly oddYParity: bool; + readonly r: H256; + readonly s: H256; } - /** @name FrameSupportWeightsPerDispatchClassU32 (313) */ - export interface FrameSupportWeightsPerDispatchClassU32 extends Struct { - readonly normal: u32; - readonly operational: u32; - readonly mandatory: u32; + /** @name PalletEvmMigrationCall (337) */ + interface PalletEvmMigrationCall extends Enum { + readonly isBegin: boolean; + readonly asBegin: { + readonly address: H160; + } & Struct; + readonly isSetData: boolean; + readonly asSetData: { + readonly address: H160; + readonly data: Vec>; + } & Struct; + readonly isFinish: boolean; + readonly asFinish: { + readonly address: H160; + readonly code: Bytes; + } & Struct; + readonly type: 'Begin' | 'SetData' | 'Finish'; } - /** @name FrameSupportWeightsRuntimeDbWeight (314) */ - export interface FrameSupportWeightsRuntimeDbWeight extends Struct { - readonly read: u64; - readonly write: u64; + /** @name PalletMaintenanceCall (340) */ + interface PalletMaintenanceCall extends Enum { + readonly isEnable: boolean; + readonly isDisable: boolean; + readonly type: 'Enable' | 'Disable'; } - /** @name SpVersionRuntimeVersion (315) */ - export interface SpVersionRuntimeVersion extends Struct { - readonly specName: Text; - readonly implName: Text; - readonly authoringVersion: u32; - readonly specVersion: u32; - readonly implVersion: u32; - readonly apis: Vec>; - readonly transactionVersion: u32; - readonly stateVersion: u8; + /** @name PalletTestUtilsCall (341) */ + interface PalletTestUtilsCall extends Enum { + readonly isEnable: boolean; + readonly isSetTestValue: boolean; + readonly asSetTestValue: { + readonly value: u32; + } & Struct; + readonly isSetTestValueAndRollback: boolean; + readonly asSetTestValueAndRollback: { + readonly value: u32; + } & Struct; + readonly isIncTestValue: boolean; + readonly isSelfCancelingInc: boolean; + readonly asSelfCancelingInc: { + readonly id: U8aFixed; + readonly maxTestValue: u32; + } & Struct; + readonly isJustTakeFee: boolean; + readonly type: 'Enable' | 'SetTestValue' | 'SetTestValueAndRollback' | 'IncTestValue' | 'SelfCancelingInc' | 'JustTakeFee'; } - /** @name FrameSystemError (319) */ - export interface FrameSystemError extends Enum { - readonly isInvalidSpecName: boolean; - readonly isSpecVersionNeedsToIncrease: boolean; - readonly isFailedToExtractRuntimeVersion: boolean; - readonly isNonDefaultComposite: boolean; - readonly isNonZeroRefCount: boolean; - readonly isCallFiltered: boolean; - readonly type: 'InvalidSpecName' | 'SpecVersionNeedsToIncrease' | 'FailedToExtractRuntimeVersion' | 'NonDefaultComposite' | 'NonZeroRefCount' | 'CallFiltered'; + /** @name PalletSudoError (342) */ + interface PalletSudoError extends Enum { + readonly isRequireSudo: boolean; + readonly type: 'RequireSudo'; } - /** @name OrmlVestingModuleError (321) */ - export interface OrmlVestingModuleError extends Enum { + /** @name OrmlVestingModuleError (344) */ + interface OrmlVestingModuleError extends Enum { readonly isZeroVestingPeriod: boolean; readonly isZeroVestingPeriodCount: boolean; readonly isInsufficientBalanceToLock: boolean; @@ -2760,30 +3188,86 @@ declare module '@polkadot/types/lookup' { readonly type: 'ZeroVestingPeriod' | 'ZeroVestingPeriodCount' | 'InsufficientBalanceToLock' | 'TooManyVestingSchedules' | 'AmountLow' | 'MaxVestingSchedulesExceeded'; } - /** @name CumulusPalletXcmpQueueInboundChannelDetails (323) */ - export interface CumulusPalletXcmpQueueInboundChannelDetails extends Struct { + /** @name OrmlXtokensModuleError (345) */ + interface OrmlXtokensModuleError extends Enum { + readonly isAssetHasNoReserve: boolean; + readonly isNotCrossChainTransfer: boolean; + readonly isInvalidDest: boolean; + readonly isNotCrossChainTransferableCurrency: boolean; + readonly isUnweighableMessage: boolean; + readonly isXcmExecutionFailed: boolean; + readonly isCannotReanchor: boolean; + readonly isInvalidAncestry: boolean; + readonly isInvalidAsset: boolean; + readonly isDestinationNotInvertible: boolean; + readonly isBadVersion: boolean; + readonly isDistinctReserveForAssetAndFee: boolean; + readonly isZeroFee: boolean; + readonly isZeroAmount: boolean; + readonly isTooManyAssetsBeingSent: boolean; + readonly isAssetIndexNonExistent: boolean; + readonly isFeeNotEnough: boolean; + readonly isNotSupportedMultiLocation: boolean; + readonly isMinXcmFeeNotDefined: boolean; + readonly type: 'AssetHasNoReserve' | 'NotCrossChainTransfer' | 'InvalidDest' | 'NotCrossChainTransferableCurrency' | 'UnweighableMessage' | 'XcmExecutionFailed' | 'CannotReanchor' | 'InvalidAncestry' | 'InvalidAsset' | 'DestinationNotInvertible' | 'BadVersion' | 'DistinctReserveForAssetAndFee' | 'ZeroFee' | 'ZeroAmount' | 'TooManyAssetsBeingSent' | 'AssetIndexNonExistent' | 'FeeNotEnough' | 'NotSupportedMultiLocation' | 'MinXcmFeeNotDefined'; + } + + /** @name OrmlTokensBalanceLock (348) */ + interface OrmlTokensBalanceLock extends Struct { + readonly id: U8aFixed; + readonly amount: u128; + } + + /** @name OrmlTokensAccountData (350) */ + interface OrmlTokensAccountData extends Struct { + readonly free: u128; + readonly reserved: u128; + readonly frozen: u128; + } + + /** @name OrmlTokensReserveData (352) */ + interface OrmlTokensReserveData extends Struct { + readonly id: Null; + readonly amount: u128; + } + + /** @name OrmlTokensModuleError (354) */ + interface OrmlTokensModuleError extends Enum { + readonly isBalanceTooLow: boolean; + readonly isAmountIntoBalanceFailed: boolean; + readonly isLiquidityRestrictions: boolean; + readonly isMaxLocksExceeded: boolean; + readonly isKeepAlive: boolean; + readonly isExistentialDeposit: boolean; + readonly isDeadAccount: boolean; + readonly isTooManyReserves: boolean; + readonly type: 'BalanceTooLow' | 'AmountIntoBalanceFailed' | 'LiquidityRestrictions' | 'MaxLocksExceeded' | 'KeepAlive' | 'ExistentialDeposit' | 'DeadAccount' | 'TooManyReserves'; + } + + /** @name CumulusPalletXcmpQueueInboundChannelDetails (356) */ + interface CumulusPalletXcmpQueueInboundChannelDetails extends Struct { readonly sender: u32; readonly state: CumulusPalletXcmpQueueInboundState; readonly messageMetadata: Vec>; } - /** @name CumulusPalletXcmpQueueInboundState (324) */ - export interface CumulusPalletXcmpQueueInboundState extends Enum { + /** @name CumulusPalletXcmpQueueInboundState (357) */ + interface CumulusPalletXcmpQueueInboundState extends Enum { readonly isOk: boolean; readonly isSuspended: boolean; readonly type: 'Ok' | 'Suspended'; } - /** @name PolkadotParachainPrimitivesXcmpMessageFormat (327) */ - export interface PolkadotParachainPrimitivesXcmpMessageFormat extends Enum { + /** @name PolkadotParachainPrimitivesXcmpMessageFormat (360) */ + interface PolkadotParachainPrimitivesXcmpMessageFormat extends Enum { readonly isConcatenatedVersionedXcm: boolean; readonly isConcatenatedEncodedBlob: boolean; readonly isSignals: boolean; readonly type: 'ConcatenatedVersionedXcm' | 'ConcatenatedEncodedBlob' | 'Signals'; } - /** @name CumulusPalletXcmpQueueOutboundChannelDetails (330) */ - export interface CumulusPalletXcmpQueueOutboundChannelDetails extends Struct { + /** @name CumulusPalletXcmpQueueOutboundChannelDetails (363) */ + interface CumulusPalletXcmpQueueOutboundChannelDetails extends Struct { readonly recipient: u32; readonly state: CumulusPalletXcmpQueueOutboundState; readonly signalsExist: bool; @@ -2791,25 +3275,25 @@ declare module '@polkadot/types/lookup' { readonly lastIndex: u16; } - /** @name CumulusPalletXcmpQueueOutboundState (331) */ - export interface CumulusPalletXcmpQueueOutboundState extends Enum { + /** @name CumulusPalletXcmpQueueOutboundState (364) */ + interface CumulusPalletXcmpQueueOutboundState extends Enum { readonly isOk: boolean; readonly isSuspended: boolean; readonly type: 'Ok' | 'Suspended'; } - /** @name CumulusPalletXcmpQueueQueueConfigData (333) */ - export interface CumulusPalletXcmpQueueQueueConfigData extends Struct { + /** @name CumulusPalletXcmpQueueQueueConfigData (366) */ + interface CumulusPalletXcmpQueueQueueConfigData extends Struct { readonly suspendThreshold: u32; readonly dropThreshold: u32; readonly resumeThreshold: u32; - readonly thresholdWeight: u64; - readonly weightRestrictDecay: u64; - readonly xcmpMaxIndividualWeight: u64; + readonly thresholdWeight: Weight; + readonly weightRestrictDecay: Weight; + readonly xcmpMaxIndividualWeight: Weight; } - /** @name CumulusPalletXcmpQueueError (335) */ - export interface CumulusPalletXcmpQueueError extends Enum { + /** @name CumulusPalletXcmpQueueError (368) */ + interface CumulusPalletXcmpQueueError extends Enum { readonly isFailedToSend: boolean; readonly isBadXcmOrigin: boolean; readonly isBadXcm: boolean; @@ -2818,8 +3302,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'FailedToSend' | 'BadXcmOrigin' | 'BadXcm' | 'BadOverweightIndex' | 'WeightOverLimit'; } - /** @name PalletXcmError (336) */ - export interface PalletXcmError extends Enum { + /** @name PalletXcmError (369) */ + interface PalletXcmError extends Enum { readonly isUnreachable: boolean; readonly isSendFailure: boolean; readonly isFiltered: boolean; @@ -2836,30 +3320,30 @@ declare module '@polkadot/types/lookup' { readonly type: 'Unreachable' | 'SendFailure' | 'Filtered' | 'UnweighableMessage' | 'DestinationNotInvertible' | 'Empty' | 'CannotReanchor' | 'TooManyAssets' | 'InvalidOrigin' | 'BadVersion' | 'BadLocation' | 'NoSubscription' | 'AlreadySubscribed'; } - /** @name CumulusPalletXcmError (337) */ - export type CumulusPalletXcmError = Null; + /** @name CumulusPalletXcmError (370) */ + type CumulusPalletXcmError = Null; - /** @name CumulusPalletDmpQueueConfigData (338) */ - export interface CumulusPalletDmpQueueConfigData extends Struct { - readonly maxIndividual: u64; + /** @name CumulusPalletDmpQueueConfigData (371) */ + interface CumulusPalletDmpQueueConfigData extends Struct { + readonly maxIndividual: Weight; } - /** @name CumulusPalletDmpQueuePageIndexData (339) */ - export interface CumulusPalletDmpQueuePageIndexData extends Struct { + /** @name CumulusPalletDmpQueuePageIndexData (372) */ + interface CumulusPalletDmpQueuePageIndexData extends Struct { readonly beginUsed: u32; readonly endUsed: u32; readonly overweightCount: u64; } - /** @name CumulusPalletDmpQueueError (342) */ - export interface CumulusPalletDmpQueueError extends Enum { + /** @name CumulusPalletDmpQueueError (375) */ + interface CumulusPalletDmpQueueError extends Enum { readonly isUnknown: boolean; readonly isOverLimit: boolean; readonly type: 'Unknown' | 'OverLimit'; } - /** @name PalletUniqueError (346) */ - export interface PalletUniqueError extends Enum { + /** @name PalletUniqueError (379) */ + interface PalletUniqueError extends Enum { readonly isCollectionDecimalPointLimitExceeded: boolean; readonly isConfirmUnsetSponsorFail: boolean; readonly isEmptyArgument: boolean; @@ -2867,8 +3351,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'CollectionDecimalPointLimitExceeded' | 'ConfirmUnsetSponsorFail' | 'EmptyArgument' | 'RepartitionCalledOnNonRefungibleCollection'; } - /** @name PalletUniqueSchedulerScheduledV3 (349) */ - export interface PalletUniqueSchedulerScheduledV3 extends Struct { + /** @name PalletUniqueSchedulerScheduledV3 (382) */ + interface PalletUniqueSchedulerScheduledV3 extends Struct { readonly maybeId: Option; readonly priority: u8; readonly call: FrameSupportScheduleMaybeHashed; @@ -2876,22 +3360,22 @@ declare module '@polkadot/types/lookup' { readonly origin: OpalRuntimeOriginCaller; } - /** @name OpalRuntimeOriginCaller (350) */ - export interface OpalRuntimeOriginCaller extends Enum { - readonly isVoid: boolean; + /** @name OpalRuntimeOriginCaller (383) */ + interface OpalRuntimeOriginCaller extends Enum { readonly isSystem: boolean; readonly asSystem: FrameSupportDispatchRawOrigin; + readonly isVoid: boolean; readonly isPolkadotXcm: boolean; readonly asPolkadotXcm: PalletXcmOrigin; readonly isCumulusXcm: boolean; readonly asCumulusXcm: CumulusPalletXcmOrigin; readonly isEthereum: boolean; readonly asEthereum: PalletEthereumRawOrigin; - readonly type: 'Void' | 'System' | 'PolkadotXcm' | 'CumulusXcm' | 'Ethereum'; + readonly type: 'System' | 'Void' | 'PolkadotXcm' | 'CumulusXcm' | 'Ethereum'; } - /** @name FrameSupportDispatchRawOrigin (351) */ - export interface FrameSupportDispatchRawOrigin extends Enum { + /** @name FrameSupportDispatchRawOrigin (384) */ + interface FrameSupportDispatchRawOrigin extends Enum { readonly isRoot: boolean; readonly isSigned: boolean; readonly asSigned: AccountId32; @@ -2899,8 +3383,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'Root' | 'Signed' | 'None'; } - /** @name PalletXcmOrigin (352) */ - export interface PalletXcmOrigin extends Enum { + /** @name PalletXcmOrigin (385) */ + interface PalletXcmOrigin extends Enum { readonly isXcm: boolean; readonly asXcm: XcmV1MultiLocation; readonly isResponse: boolean; @@ -2908,26 +3392,26 @@ declare module '@polkadot/types/lookup' { readonly type: 'Xcm' | 'Response'; } - /** @name CumulusPalletXcmOrigin (353) */ - export interface CumulusPalletXcmOrigin extends Enum { + /** @name CumulusPalletXcmOrigin (386) */ + interface CumulusPalletXcmOrigin extends Enum { readonly isRelay: boolean; readonly isSiblingParachain: boolean; readonly asSiblingParachain: u32; readonly type: 'Relay' | 'SiblingParachain'; } - /** @name PalletEthereumRawOrigin (354) */ - export interface PalletEthereumRawOrigin extends Enum { + /** @name PalletEthereumRawOrigin (387) */ + interface PalletEthereumRawOrigin extends Enum { readonly isEthereumTransaction: boolean; readonly asEthereumTransaction: H160; readonly type: 'EthereumTransaction'; } - /** @name SpCoreVoid (355) */ - export type SpCoreVoid = Null; + /** @name SpCoreVoid (388) */ + type SpCoreVoid = Null; - /** @name PalletUniqueSchedulerError (356) */ - export interface PalletUniqueSchedulerError extends Enum { + /** @name PalletUniqueSchedulerError (389) */ + interface PalletUniqueSchedulerError extends Enum { readonly isFailedToSchedule: boolean; readonly isNotFound: boolean; readonly isTargetBlockNumberInPast: boolean; @@ -2935,21 +3419,21 @@ declare module '@polkadot/types/lookup' { readonly type: 'FailedToSchedule' | 'NotFound' | 'TargetBlockNumberInPast' | 'RescheduleNoChange'; } - /** @name UpDataStructsCollection (357) */ - export interface UpDataStructsCollection extends Struct { + /** @name UpDataStructsCollection (390) */ + interface UpDataStructsCollection extends Struct { readonly owner: AccountId32; readonly mode: UpDataStructsCollectionMode; readonly name: Vec; readonly description: Vec; readonly tokenPrefix: Bytes; - readonly sponsorship: UpDataStructsSponsorshipState; + readonly sponsorship: UpDataStructsSponsorshipStateAccountId32; readonly limits: UpDataStructsCollectionLimits; readonly permissions: UpDataStructsCollectionPermissions; - readonly externalCollection: bool; + readonly flags: U8aFixed; } - /** @name UpDataStructsSponsorshipState (358) */ - export interface UpDataStructsSponsorshipState extends Enum { + /** @name UpDataStructsSponsorshipStateAccountId32 (391) */ + interface UpDataStructsSponsorshipStateAccountId32 extends Enum { readonly isDisabled: boolean; readonly isUnconfirmed: boolean; readonly asUnconfirmed: AccountId32; @@ -2958,58 +3442,66 @@ declare module '@polkadot/types/lookup' { readonly type: 'Disabled' | 'Unconfirmed' | 'Confirmed'; } - /** @name UpDataStructsProperties (359) */ - export interface UpDataStructsProperties extends Struct { + /** @name UpDataStructsProperties (393) */ + interface UpDataStructsProperties extends Struct { readonly map: UpDataStructsPropertiesMapBoundedVec; readonly consumedSpace: u32; readonly spaceLimit: u32; } - /** @name UpDataStructsPropertiesMapBoundedVec (360) */ - export interface UpDataStructsPropertiesMapBoundedVec extends BTreeMap {} + /** @name UpDataStructsPropertiesMapBoundedVec (394) */ + interface UpDataStructsPropertiesMapBoundedVec extends BTreeMap {} - /** @name UpDataStructsPropertiesMapPropertyPermission (365) */ - export interface UpDataStructsPropertiesMapPropertyPermission extends BTreeMap {} + /** @name UpDataStructsPropertiesMapPropertyPermission (399) */ + interface UpDataStructsPropertiesMapPropertyPermission extends BTreeMap {} - /** @name UpDataStructsCollectionStats (372) */ - export interface UpDataStructsCollectionStats extends Struct { + /** @name UpDataStructsCollectionStats (406) */ + interface UpDataStructsCollectionStats extends Struct { readonly created: u32; readonly destroyed: u32; readonly alive: u32; } - /** @name UpDataStructsTokenChild (373) */ - export interface UpDataStructsTokenChild extends Struct { + /** @name UpDataStructsTokenChild (407) */ + interface UpDataStructsTokenChild extends Struct { readonly token: u32; readonly collection: u32; } - /** @name PhantomTypeUpDataStructs (374) */ - export interface PhantomTypeUpDataStructs extends Vec> {} + /** @name PhantomTypeUpDataStructs (408) */ + interface PhantomTypeUpDataStructs extends Vec> {} - /** @name UpDataStructsTokenData (376) */ - export interface UpDataStructsTokenData extends Struct { + /** @name UpDataStructsTokenData (410) */ + interface UpDataStructsTokenData extends Struct { readonly properties: Vec; readonly owner: Option; + readonly pieces: u128; } - /** @name UpDataStructsRpcCollection (378) */ - export interface UpDataStructsRpcCollection extends Struct { + /** @name UpDataStructsRpcCollection (412) */ + interface UpDataStructsRpcCollection extends Struct { readonly owner: AccountId32; readonly mode: UpDataStructsCollectionMode; readonly name: Vec; readonly description: Vec; readonly tokenPrefix: Bytes; - readonly sponsorship: UpDataStructsSponsorshipState; + readonly sponsorship: UpDataStructsSponsorshipStateAccountId32; readonly limits: UpDataStructsCollectionLimits; readonly permissions: UpDataStructsCollectionPermissions; readonly tokenPropertyPermissions: Vec; readonly properties: Vec; readonly readOnly: bool; + readonly flags: UpDataStructsRpcCollectionFlags; + } + + /** @name UpDataStructsRpcCollectionFlags (413) */ + interface UpDataStructsRpcCollectionFlags extends Struct { + readonly foreign: bool; + readonly erc721metadata: bool; } - /** @name RmrkTraitsCollectionCollectionInfo (379) */ - export interface RmrkTraitsCollectionCollectionInfo extends Struct { + /** @name RmrkTraitsCollectionCollectionInfo (414) */ + interface RmrkTraitsCollectionCollectionInfo extends Struct { readonly issuer: AccountId32; readonly metadata: Bytes; readonly max: Option; @@ -3017,8 +3509,8 @@ declare module '@polkadot/types/lookup' { readonly nftsCount: u32; } - /** @name RmrkTraitsNftNftInfo (380) */ - export interface RmrkTraitsNftNftInfo extends Struct { + /** @name RmrkTraitsNftNftInfo (415) */ + interface RmrkTraitsNftNftInfo extends Struct { readonly owner: RmrkTraitsNftAccountIdOrCollectionNftTuple; readonly royalty: Option; readonly metadata: Bytes; @@ -3026,41 +3518,41 @@ declare module '@polkadot/types/lookup' { readonly pending: bool; } - /** @name RmrkTraitsNftRoyaltyInfo (382) */ - export interface RmrkTraitsNftRoyaltyInfo extends Struct { + /** @name RmrkTraitsNftRoyaltyInfo (417) */ + interface RmrkTraitsNftRoyaltyInfo extends Struct { readonly recipient: AccountId32; readonly amount: Permill; } - /** @name RmrkTraitsResourceResourceInfo (383) */ - export interface RmrkTraitsResourceResourceInfo extends Struct { + /** @name RmrkTraitsResourceResourceInfo (418) */ + interface RmrkTraitsResourceResourceInfo extends Struct { readonly id: u32; readonly resource: RmrkTraitsResourceResourceTypes; readonly pending: bool; readonly pendingRemoval: bool; } - /** @name RmrkTraitsPropertyPropertyInfo (384) */ - export interface RmrkTraitsPropertyPropertyInfo extends Struct { + /** @name RmrkTraitsPropertyPropertyInfo (419) */ + interface RmrkTraitsPropertyPropertyInfo extends Struct { readonly key: Bytes; readonly value: Bytes; } - /** @name RmrkTraitsBaseBaseInfo (385) */ - export interface RmrkTraitsBaseBaseInfo extends Struct { + /** @name RmrkTraitsBaseBaseInfo (420) */ + interface RmrkTraitsBaseBaseInfo extends Struct { readonly issuer: AccountId32; readonly baseType: Bytes; readonly symbol: Bytes; } - /** @name RmrkTraitsNftNftChild (386) */ - export interface RmrkTraitsNftNftChild extends Struct { + /** @name RmrkTraitsNftNftChild (421) */ + interface RmrkTraitsNftNftChild extends Struct { readonly collectionId: u32; readonly nftId: u32; } - /** @name PalletCommonError (388) */ - export interface PalletCommonError extends Enum { + /** @name PalletCommonError (423) */ + interface PalletCommonError extends Enum { readonly isCollectionNotFound: boolean; readonly isMustBeTokenOwner: boolean; readonly isNoPermission: boolean; @@ -3098,8 +3590,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'CollectionNotFound' | 'MustBeTokenOwner' | 'NoPermission' | 'CantDestroyNotEmptyCollection' | 'PublicMintingNotAllowed' | 'AddressNotInAllowlist' | 'CollectionNameLimitExceeded' | 'CollectionDescriptionLimitExceeded' | 'CollectionTokenPrefixLimitExceeded' | 'TotalCollectionsLimitExceeded' | 'CollectionAdminCountExceeded' | 'CollectionLimitBoundsExceeded' | 'OwnerPermissionsCantBeReverted' | 'TransferNotAllowed' | 'AccountTokenLimitExceeded' | 'CollectionTokenLimitExceeded' | 'MetadataFlagFrozen' | 'TokenNotFound' | 'TokenValueTooLow' | 'ApprovedValueTooLow' | 'CantApproveMoreThanOwned' | 'AddressIsZero' | 'UnsupportedOperation' | 'NotSufficientFounds' | 'UserIsNotAllowedToNest' | 'SourceCollectionIsNotAllowedToNest' | 'CollectionFieldSizeExceeded' | 'NoSpaceForProperty' | 'PropertyLimitReached' | 'PropertyKeyIsTooLong' | 'InvalidCharacterInPropertyKey' | 'EmptyPropertyKey' | 'CollectionIsExternal' | 'CollectionIsInternal'; } - /** @name PalletFungibleError (390) */ - export interface PalletFungibleError extends Enum { + /** @name PalletFungibleError (425) */ + interface PalletFungibleError extends Enum { readonly isNotFungibleDataUsedToMintFungibleCollectionToken: boolean; readonly isFungibleItemsHaveNoId: boolean; readonly isFungibleItemsDontHaveData: boolean; @@ -3108,13 +3600,13 @@ declare module '@polkadot/types/lookup' { readonly type: 'NotFungibleDataUsedToMintFungibleCollectionToken' | 'FungibleItemsHaveNoId' | 'FungibleItemsDontHaveData' | 'FungibleDisallowsNesting' | 'SettingPropertiesNotAllowed'; } - /** @name PalletRefungibleItemData (391) */ - export interface PalletRefungibleItemData extends Struct { + /** @name PalletRefungibleItemData (426) */ + interface PalletRefungibleItemData extends Struct { readonly constData: Bytes; } - /** @name PalletRefungibleError (395) */ - export interface PalletRefungibleError extends Enum { + /** @name PalletRefungibleError (431) */ + interface PalletRefungibleError extends Enum { readonly isNotRefungibleDataUsedToMintFungibleCollectionToken: boolean; readonly isWrongRefungiblePieces: boolean; readonly isRepartitionWhileNotOwningAllPieces: boolean; @@ -3123,28 +3615,28 @@ declare module '@polkadot/types/lookup' { readonly type: 'NotRefungibleDataUsedToMintFungibleCollectionToken' | 'WrongRefungiblePieces' | 'RepartitionWhileNotOwningAllPieces' | 'RefungibleDisallowsNesting' | 'SettingPropertiesNotAllowed'; } - /** @name PalletNonfungibleItemData (396) */ - export interface PalletNonfungibleItemData extends Struct { + /** @name PalletNonfungibleItemData (432) */ + interface PalletNonfungibleItemData extends Struct { readonly owner: PalletEvmAccountBasicCrossAccountIdRepr; } - /** @name UpDataStructsPropertyScope (398) */ - export interface UpDataStructsPropertyScope extends Enum { + /** @name UpDataStructsPropertyScope (434) */ + interface UpDataStructsPropertyScope extends Enum { readonly isNone: boolean; readonly isRmrk: boolean; readonly type: 'None' | 'Rmrk'; } - /** @name PalletNonfungibleError (400) */ - export interface PalletNonfungibleError extends Enum { + /** @name PalletNonfungibleError (436) */ + interface PalletNonfungibleError extends Enum { readonly isNotNonfungibleDataUsedToMintFungibleCollectionToken: boolean; readonly isNonfungibleItemsHaveNoAmount: boolean; readonly isCantBurnNftWithChildren: boolean; readonly type: 'NotNonfungibleDataUsedToMintFungibleCollectionToken' | 'NonfungibleItemsHaveNoAmount' | 'CantBurnNftWithChildren'; } - /** @name PalletStructureError (401) */ - export interface PalletStructureError extends Enum { + /** @name PalletStructureError (437) */ + interface PalletStructureError extends Enum { readonly isOuroborosDetected: boolean; readonly isDepthLimit: boolean; readonly isBreadthLimit: boolean; @@ -3152,10 +3644,9 @@ declare module '@polkadot/types/lookup' { readonly type: 'OuroborosDetected' | 'DepthLimit' | 'BreadthLimit' | 'TokenNotFound'; } - /** @name PalletRmrkCoreError (402) */ - export interface PalletRmrkCoreError extends Enum { + /** @name PalletRmrkCoreError (438) */ + interface PalletRmrkCoreError extends Enum { readonly isCorruptedCollectionType: boolean; - readonly isNftTypeEncodeError: boolean; readonly isRmrkPropertyKeyIsTooLong: boolean; readonly isRmrkPropertyValueIsTooLong: boolean; readonly isRmrkPropertyIsNotFound: boolean; @@ -3174,11 +3665,11 @@ declare module '@polkadot/types/lookup' { readonly isCannotRejectNonPendingNft: boolean; readonly isResourceNotPending: boolean; readonly isNoAvailableResourceId: boolean; - readonly type: 'CorruptedCollectionType' | 'NftTypeEncodeError' | 'RmrkPropertyKeyIsTooLong' | 'RmrkPropertyValueIsTooLong' | 'RmrkPropertyIsNotFound' | 'UnableToDecodeRmrkData' | 'CollectionNotEmpty' | 'NoAvailableCollectionId' | 'NoAvailableNftId' | 'CollectionUnknown' | 'NoPermission' | 'NonTransferable' | 'CollectionFullOrLocked' | 'ResourceDoesntExist' | 'CannotSendToDescendentOrSelf' | 'CannotAcceptNonOwnedNft' | 'CannotRejectNonOwnedNft' | 'CannotRejectNonPendingNft' | 'ResourceNotPending' | 'NoAvailableResourceId'; + readonly type: 'CorruptedCollectionType' | 'RmrkPropertyKeyIsTooLong' | 'RmrkPropertyValueIsTooLong' | 'RmrkPropertyIsNotFound' | 'UnableToDecodeRmrkData' | 'CollectionNotEmpty' | 'NoAvailableCollectionId' | 'NoAvailableNftId' | 'CollectionUnknown' | 'NoPermission' | 'NonTransferable' | 'CollectionFullOrLocked' | 'ResourceDoesntExist' | 'CannotSendToDescendentOrSelf' | 'CannotAcceptNonOwnedNft' | 'CannotRejectNonOwnedNft' | 'CannotRejectNonPendingNft' | 'ResourceNotPending' | 'NoAvailableResourceId'; } - /** @name PalletRmrkEquipError (404) */ - export interface PalletRmrkEquipError extends Enum { + /** @name PalletRmrkEquipError (440) */ + interface PalletRmrkEquipError extends Enum { readonly isPermissionError: boolean; readonly isNoAvailableBaseId: boolean; readonly isNoAvailablePartId: boolean; @@ -3189,8 +3680,28 @@ declare module '@polkadot/types/lookup' { readonly type: 'PermissionError' | 'NoAvailableBaseId' | 'NoAvailablePartId' | 'BaseDoesntExist' | 'NeedsDefaultThemeFirst' | 'PartDoesntExist' | 'NoEquippableOnFixedPart'; } - /** @name PalletEvmError (407) */ - export interface PalletEvmError extends Enum { + /** @name PalletAppPromotionError (446) */ + interface PalletAppPromotionError extends Enum { + readonly isAdminNotSet: boolean; + readonly isNoPermission: boolean; + readonly isNotSufficientFunds: boolean; + readonly isPendingForBlockOverflow: boolean; + readonly isSponsorNotSet: boolean; + readonly isIncorrectLockedBalanceOperation: boolean; + readonly type: 'AdminNotSet' | 'NoPermission' | 'NotSufficientFunds' | 'PendingForBlockOverflow' | 'SponsorNotSet' | 'IncorrectLockedBalanceOperation'; + } + + /** @name PalletForeignAssetsModuleError (447) */ + interface PalletForeignAssetsModuleError extends Enum { + readonly isBadLocation: boolean; + readonly isMultiLocationExisted: boolean; + readonly isAssetIdNotExists: boolean; + readonly isAssetIdExisted: boolean; + readonly type: 'BadLocation' | 'MultiLocationExisted' | 'AssetIdNotExists' | 'AssetIdExisted'; + } + + /** @name PalletEvmError (450) */ + interface PalletEvmError extends Enum { readonly isBalanceLow: boolean; readonly isFeeOverflow: boolean; readonly isPaymentOverflow: boolean; @@ -3200,8 +3711,8 @@ declare module '@polkadot/types/lookup' { readonly type: 'BalanceLow' | 'FeeOverflow' | 'PaymentOverflow' | 'WithdrawFailed' | 'GasPriceTooLow' | 'InvalidNonce'; } - /** @name FpRpcTransactionStatus (410) */ - export interface FpRpcTransactionStatus extends Struct { + /** @name FpRpcTransactionStatus (453) */ + interface FpRpcTransactionStatus extends Struct { readonly transactionHash: H256; readonly transactionIndex: u32; readonly from: H160; @@ -3211,11 +3722,11 @@ declare module '@polkadot/types/lookup' { readonly logsBloom: EthbloomBloom; } - /** @name EthbloomBloom (412) */ - export interface EthbloomBloom extends U8aFixed {} + /** @name EthbloomBloom (455) */ + interface EthbloomBloom extends U8aFixed {} - /** @name EthereumReceiptReceiptV3 (414) */ - export interface EthereumReceiptReceiptV3 extends Enum { + /** @name EthereumReceiptReceiptV3 (457) */ + interface EthereumReceiptReceiptV3 extends Enum { readonly isLegacy: boolean; readonly asLegacy: EthereumReceiptEip658ReceiptData; readonly isEip2930: boolean; @@ -3225,23 +3736,23 @@ declare module '@polkadot/types/lookup' { readonly type: 'Legacy' | 'Eip2930' | 'Eip1559'; } - /** @name EthereumReceiptEip658ReceiptData (415) */ - export interface EthereumReceiptEip658ReceiptData extends Struct { + /** @name EthereumReceiptEip658ReceiptData (458) */ + interface EthereumReceiptEip658ReceiptData extends Struct { readonly statusCode: u8; readonly usedGas: U256; readonly logsBloom: EthbloomBloom; readonly logs: Vec; } - /** @name EthereumBlock (416) */ - export interface EthereumBlock extends Struct { + /** @name EthereumBlock (459) */ + interface EthereumBlock extends Struct { readonly header: EthereumHeader; readonly transactions: Vec; readonly ommers: Vec; } - /** @name EthereumHeader (417) */ - export interface EthereumHeader extends Struct { + /** @name EthereumHeader (460) */ + interface EthereumHeader extends Struct { readonly parentHash: H256; readonly ommersHash: H256; readonly beneficiary: H160; @@ -3259,49 +3770,68 @@ declare module '@polkadot/types/lookup' { readonly nonce: EthereumTypesHashH64; } - /** @name EthereumTypesHashH64 (418) */ - export interface EthereumTypesHashH64 extends U8aFixed {} + /** @name EthereumTypesHashH64 (461) */ + interface EthereumTypesHashH64 extends U8aFixed {} - /** @name PalletEthereumError (423) */ - export interface PalletEthereumError extends Enum { + /** @name PalletEthereumError (466) */ + interface PalletEthereumError extends Enum { readonly isInvalidSignature: boolean; readonly isPreLogExists: boolean; readonly type: 'InvalidSignature' | 'PreLogExists'; } - /** @name PalletEvmCoderSubstrateError (424) */ - export interface PalletEvmCoderSubstrateError extends Enum { + /** @name PalletEvmCoderSubstrateError (467) */ + interface PalletEvmCoderSubstrateError extends Enum { readonly isOutOfGas: boolean; readonly isOutOfFund: boolean; readonly type: 'OutOfGas' | 'OutOfFund'; } - /** @name PalletEvmContractHelpersSponsoringModeT (425) */ - export interface PalletEvmContractHelpersSponsoringModeT extends Enum { + /** @name UpDataStructsSponsorshipStateBasicCrossAccountIdRepr (468) */ + interface UpDataStructsSponsorshipStateBasicCrossAccountIdRepr extends Enum { + readonly isDisabled: boolean; + readonly isUnconfirmed: boolean; + readonly asUnconfirmed: PalletEvmAccountBasicCrossAccountIdRepr; + readonly isConfirmed: boolean; + readonly asConfirmed: PalletEvmAccountBasicCrossAccountIdRepr; + readonly type: 'Disabled' | 'Unconfirmed' | 'Confirmed'; + } + + /** @name PalletEvmContractHelpersSponsoringModeT (469) */ + interface PalletEvmContractHelpersSponsoringModeT extends Enum { readonly isDisabled: boolean; readonly isAllowlisted: boolean; readonly isGenerous: boolean; readonly type: 'Disabled' | 'Allowlisted' | 'Generous'; } - /** @name PalletEvmContractHelpersError (427) */ - export interface PalletEvmContractHelpersError extends Enum { + /** @name PalletEvmContractHelpersError (475) */ + interface PalletEvmContractHelpersError extends Enum { readonly isNoPermission: boolean; - readonly type: 'NoPermission'; + readonly isNoPendingSponsor: boolean; + readonly isTooManyMethodsHaveSponsoredLimit: boolean; + readonly type: 'NoPermission' | 'NoPendingSponsor' | 'TooManyMethodsHaveSponsoredLimit'; } - /** @name PalletEvmMigrationError (428) */ - export interface PalletEvmMigrationError extends Enum { + /** @name PalletEvmMigrationError (476) */ + interface PalletEvmMigrationError extends Enum { readonly isAccountNotEmpty: boolean; readonly isAccountIsNotMigrating: boolean; readonly type: 'AccountNotEmpty' | 'AccountIsNotMigrating'; } - /** @name PalletMaintenanceError (429) */ - export type PalletMaintenanceError = Null; + /** @name PalletMaintenanceError (477) */ + type PalletMaintenanceError = Null; + + /** @name PalletTestUtilsError (478) */ + interface PalletTestUtilsError extends Enum { + readonly isTestPalletDisabled: boolean; + readonly isTriggerRollback: boolean; + readonly type: 'TestPalletDisabled' | 'TriggerRollback'; + } - /** @name SpRuntimeMultiSignature (431) */ - export interface SpRuntimeMultiSignature extends Enum { + /** @name SpRuntimeMultiSignature (480) */ + interface SpRuntimeMultiSignature extends Enum { readonly isEd25519: boolean; readonly asEd25519: SpCoreEd25519Signature; readonly isSr25519: boolean; @@ -3311,37 +3841,40 @@ declare module '@polkadot/types/lookup' { readonly type: 'Ed25519' | 'Sr25519' | 'Ecdsa'; } - /** @name SpCoreEd25519Signature (432) */ - export interface SpCoreEd25519Signature extends U8aFixed {} + /** @name SpCoreEd25519Signature (481) */ + interface SpCoreEd25519Signature extends U8aFixed {} + + /** @name SpCoreSr25519Signature (483) */ + interface SpCoreSr25519Signature extends U8aFixed {} - /** @name SpCoreSr25519Signature (434) */ - export interface SpCoreSr25519Signature extends U8aFixed {} + /** @name SpCoreEcdsaSignature (484) */ + interface SpCoreEcdsaSignature extends U8aFixed {} - /** @name SpCoreEcdsaSignature (435) */ - export interface SpCoreEcdsaSignature extends U8aFixed {} + /** @name FrameSystemExtensionsCheckSpecVersion (487) */ + type FrameSystemExtensionsCheckSpecVersion = Null; - /** @name FrameSystemExtensionsCheckSpecVersion (438) */ - export type FrameSystemExtensionsCheckSpecVersion = Null; + /** @name FrameSystemExtensionsCheckTxVersion (488) */ + type FrameSystemExtensionsCheckTxVersion = Null; - /** @name FrameSystemExtensionsCheckGenesis (439) */ - export type FrameSystemExtensionsCheckGenesis = Null; + /** @name FrameSystemExtensionsCheckGenesis (489) */ + type FrameSystemExtensionsCheckGenesis = Null; - /** @name FrameSystemExtensionsCheckNonce (442) */ - export interface FrameSystemExtensionsCheckNonce extends Compact {} + /** @name FrameSystemExtensionsCheckNonce (492) */ + interface FrameSystemExtensionsCheckNonce extends Compact {} - /** @name FrameSystemExtensionsCheckWeight (443) */ - export type FrameSystemExtensionsCheckWeight = Null; + /** @name FrameSystemExtensionsCheckWeight (493) */ + type FrameSystemExtensionsCheckWeight = Null; - /** @name OpalRuntimeCheckMaintenance (444) */ - export type OpalRuntimeCheckMaintenance = Null; + /** @name OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance (494) */ + type OpalRuntimeRuntimeCommonMaintenanceCheckMaintenance = Null; - /** @name PalletTemplateTransactionPaymentChargeTransactionPayment (445) */ - export interface PalletTemplateTransactionPaymentChargeTransactionPayment extends Compact {} + /** @name PalletTemplateTransactionPaymentChargeTransactionPayment (495) */ + interface PalletTemplateTransactionPaymentChargeTransactionPayment extends Compact {} - /** @name OpalRuntimeRuntime (446) */ - export type OpalRuntimeRuntime = Null; + /** @name OpalRuntimeRuntime (496) */ + type OpalRuntimeRuntime = Null; - /** @name PalletEthereumFakeTransactionFinalizer (447) */ - export type PalletEthereumFakeTransactionFinalizer = Null; + /** @name PalletEthereumFakeTransactionFinalizer (497) */ + type PalletEthereumFakeTransactionFinalizer = Null; } // declare module diff --git a/tests/src/interfaces/types.ts b/tests/src/interfaces/types.ts index 7db708bdf0..17cdd49c05 100644 --- a/tests/src/interfaces/types.ts +++ b/tests/src/interfaces/types.ts @@ -2,5 +2,6 @@ /* eslint-disable */ export * from './unique/types'; +export * from './appPromotion/types'; export * from './rmrk/types'; export * from './default/types'; diff --git a/tests/src/interfaces/unique/definitions.ts b/tests/src/interfaces/unique/definitions.ts index 9229e6eb07..303e5d2813 100644 --- a/tests/src/interfaces/unique/definitions.ts +++ b/tests/src/interfaces/unique/definitions.ts @@ -24,7 +24,7 @@ const CROSS_ACCOUNT_ID_TYPE = 'PalletEvmAccountBasicCrossAccountIdRepr'; const collectionParam = {name: 'collection', type: 'u32'}; const tokenParam = {name: 'tokenId', type: 'u32'}; -const propertyKeysParam = {name: 'propertyKeys', type: 'Vec', isOptional: true}; +const propertyKeysParam = {name: 'propertyKeys', type: 'Option>', isOptional: true}; const crossAccountParam = (name = 'account') => ({name, type: CROSS_ACCOUNT_ID_TYPE}); const atParam = {name: 'at', type: 'Hash', isOptional: true}; @@ -37,47 +37,143 @@ const fun = (description: string, params: RpcParam[], type: string) => ({ export default { types: {}, rpc: { - adminlist: fun('Get admin list', [collectionParam], 'Vec'), - allowlist: fun('Get allowlist', [collectionParam], 'Vec'), - - accountTokens: fun('Get tokens owned by account', [collectionParam, crossAccountParam()], 'Vec'), - collectionTokens: fun('Get tokens contained in collection', [collectionParam], 'Vec'), - - lastTokenId: fun('Get last token id', [collectionParam], 'u32'), - totalSupply: fun('Get amount of unique collection tokens', [collectionParam], 'u32'), - accountBalance: fun('Get amount of different user tokens', [collectionParam, crossAccountParam()], 'u32'), - balance: fun('Get amount of specific account token', [collectionParam, crossAccountParam(), tokenParam], 'u128'), - allowance: fun('Get allowed amount', [collectionParam, crossAccountParam('sender'), crossAccountParam('spender'), tokenParam], 'u128'), - tokenOwner: fun('Get token owner', [collectionParam, tokenParam], `Option<${CROSS_ACCOUNT_ID_TYPE}>`), - topmostTokenOwner: fun('Get token owner, in case of nested token - find parent recursive', [collectionParam, tokenParam], `Option<${CROSS_ACCOUNT_ID_TYPE}>`), - tokenChildren: fun('Get tokens nested directly into the token', [collectionParam, tokenParam], 'Vec'), - constMetadata: fun('Get token constant metadata', [collectionParam, tokenParam], 'Vec'), - variableMetadata: fun('Get token variable metadata', [collectionParam, tokenParam], 'Vec'), + accountTokens: fun( + 'Get tokens owned by an account in a collection', + [collectionParam, crossAccountParam()], + 'Vec', + ), + collectionTokens: fun( + 'Get tokens contained within a collection', + [collectionParam], + 'Vec', + ), + tokenExists: fun( + 'Check if the token exists', + [collectionParam, tokenParam], + 'bool', + ), + + tokenOwner: fun( + 'Get the token owner', + [collectionParam, tokenParam], + `Option<${CROSS_ACCOUNT_ID_TYPE}>`, + ), + topmostTokenOwner: fun( + 'Get the topmost token owner in the hierarchy of a possibly nested token', + [collectionParam, tokenParam], + `Option<${CROSS_ACCOUNT_ID_TYPE}>`, + ), + tokenOwners: fun( + 'Returns 10 tokens owners in no particular order', + [collectionParam, tokenParam], + `Vec<${CROSS_ACCOUNT_ID_TYPE}>`, + ), + tokenChildren: fun( + 'Get tokens nested directly into the token', + [collectionParam, tokenParam], + 'Vec', + ), + collectionProperties: fun( - 'Get collection properties', + 'Get collection properties, optionally limited to the provided keys', [collectionParam, propertyKeysParam], 'Vec', ), tokenProperties: fun( - 'Get token properties', + 'Get token properties, optionally limited to the provided keys', [collectionParam, tokenParam, propertyKeysParam], 'Vec', ), propertyPermissions: fun( - 'Get property permissions', + 'Get property permissions, optionally limited to the provided keys', [collectionParam, propertyKeysParam], 'Vec', ), + + constMetadata: fun( + 'Get token constant metadata', + [collectionParam, tokenParam], + 'Vec', + ), + variableMetadata: fun( + 'Get token variable metadata', + [collectionParam, tokenParam], + 'Vec', + ), + tokenData: fun( - 'Get token data', + 'Get token data, including properties, optionally limited to the provided keys, and total pieces for an RFT', [collectionParam, tokenParam, propertyKeysParam], 'UpDataStructsTokenData', ), - tokenExists: fun('Check if token exists', [collectionParam, tokenParam], 'bool'), - collectionById: fun('Get collection by specified id', [collectionParam], 'Option'), - collectionStats: fun('Get collection stats', [], 'UpDataStructsCollectionStats'), - allowed: fun('Check if user is allowed to use collection', [collectionParam, crossAccountParam()], 'bool'), - nextSponsored: fun('Get number of blocks when sponsored transaction is available', [collectionParam, crossAccountParam(), tokenParam], 'Option'), - effectiveCollectionLimits: fun('Get effective collection limits', [collectionParam], 'Option'), + totalSupply: fun( + 'Get the amount of distinctive tokens present in a collection', + [collectionParam], + 'u32', + ), + + accountBalance: fun( + 'Get the amount of any user tokens owned by an account', + [collectionParam, crossAccountParam()], + 'u32', + ), + balance: fun( + 'Get the amount of a specific token owned by an account', + [collectionParam, crossAccountParam(), tokenParam], + 'u128', + ), + allowance: fun( + 'Get the amount of currently possible sponsored transactions on a token for the fee to be taken off a sponsor', + [collectionParam, crossAccountParam('sender'), crossAccountParam('spender'), tokenParam], + 'u128', + ), + + adminlist: fun( + 'Get the list of admin accounts of a collection', + [collectionParam], + 'Vec', + ), + allowlist: fun( + 'Get the list of accounts allowed to operate within a collection', + [collectionParam], + 'Vec', + ), + allowed: fun( + 'Check if a user is allowed to operate within a collection', + [collectionParam, crossAccountParam()], + 'bool', + ), + + lastTokenId: fun( + 'Get the last token ID created in a collection', + [collectionParam], + 'u32', + ), + collectionById: fun( + 'Get a collection by the specified ID', + [collectionParam], + 'Option', + ), + collectionStats: fun( + 'Get chain stats about collections', + [], + 'UpDataStructsCollectionStats', + ), + + nextSponsored: fun( + 'Get the number of blocks until sponsoring a transaction is available', + [collectionParam, crossAccountParam(), tokenParam], + 'Option', + ), + effectiveCollectionLimits: fun( + 'Get effective collection limits', + [collectionParam], + 'Option', + ), + totalPieces: fun( + 'Get the total amount of pieces of an RFT', + [collectionParam, tokenParam], + 'Option', + ), }, }; diff --git a/tests/src/limits.test.ts b/tests/src/limits.test.ts index 5df99145db..a7a756443f 100644 --- a/tests/src/limits.test.ts +++ b/tests/src/limits.test.ts @@ -15,89 +15,85 @@ // along with Unique Network. If not, see . import {IKeyringPair} from '@polkadot/types/types'; -import usingApi from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - destroyCollectionExpectSuccess, - setCollectionLimitsExpectSuccess, - setCollectionSponsorExpectSuccess, - confirmSponsorshipExpectSuccess, - createItemExpectSuccess, - createItemExpectFailure, - transferExpectSuccess, - getFreeBalance, - waitNewBlocks, burnItemExpectSuccess, -} from './util/helpers'; -import {expect} from 'chai'; +import {expect, itSub, Pallets, requirePalletsOrSkip, usingPlaygrounds} from './util'; describe('Number of tokens per address (NFT)', () => { let alice: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it.skip('Collection limits allow greater number than chain limits, chain limits are enforced', async () => { - - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 20}); + itSub.skip('Collection limits allow greater number than chain limits, chain limits are enforced', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 20}); + for(let i = 0; i < 10; i++){ - await createItemExpectSuccess(alice, collectionId, 'NFT'); + await expect(collection.mintToken(alice)).to.be.not.rejected; } - await createItemExpectFailure(alice, collectionId, 'NFT'); + await expect(collection.mintToken(alice)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); for(let i = 1; i < 11; i++) { - await burnItemExpectSuccess(alice, collectionId, i); + await expect(collection.burnToken(alice, i)).to.be.not.rejected; } - await destroyCollectionExpectSuccess(collectionId); + await collection.burn(alice); }); - - it('Collection limits allow lower number than chain limits, collection limits are enforced', async () => { - - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 1}); - await createItemExpectSuccess(alice, collectionId, 'NFT'); - await createItemExpectFailure(alice, collectionId, 'NFT'); - await burnItemExpectSuccess(alice, collectionId, 1); - await destroyCollectionExpectSuccess(collectionId); + + itSub('Collection limits allow lower number than chain limits, collection limits are enforced', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 1}); + + await collection.mintToken(alice); + await expect(collection.mintToken(alice)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); + + await collection.burnToken(alice, 1); + await expect(collection.burn(alice)).to.be.not.rejected; }); }); describe('Number of tokens per address (ReFungible)', () => { let alice: IKeyringPair; - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); + before(async function() { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); - it.skip('Collection limits allow greater number than chain limits, chain limits are enforced', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 20}); + itSub.skip('Collection limits allow greater number than chain limits, chain limits are enforced', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 20}); + for(let i = 0; i < 10; i++){ - await createItemExpectSuccess(alice, collectionId, 'ReFungible'); + await expect(collection.mintToken(alice, 10n)).to.be.not.rejected; } - await createItemExpectFailure(alice, collectionId, 'ReFungible'); + await expect(collection.mintToken(alice, 10n)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); for(let i = 1; i < 11; i++) { - await burnItemExpectSuccess(alice, collectionId, i, 100); + await expect(collection.burnToken(alice, i, 10n)).to.be.not.rejected; } - await destroyCollectionExpectSuccess(collectionId); + await collection.burn(alice); }); - it('Collection limits allow lower number than chain limits, collection limits are enforced', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 1}); - await createItemExpectSuccess(alice, collectionId, 'ReFungible'); - await createItemExpectFailure(alice, collectionId, 'ReFungible'); - await burnItemExpectSuccess(alice, collectionId, 1, 100); - await destroyCollectionExpectSuccess(collectionId); + itSub('Collection limits allow lower number than chain limits, collection limits are enforced', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 1}); + + await collection.mintToken(alice); + await expect(collection.mintToken(alice)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); + + await collection.burnToken(alice, 1); + await expect(collection.burn(alice)).to.be.not.rejected; }); }); +// todo:playgrounds skipped ~ postponed describe.skip('Sponsor timeout (NFT) (only for special chain limits test)', () => { - let alice: IKeyringPair; + /*let alice: IKeyringPair; let bob: IKeyringPair; let charlie: IKeyringPair; @@ -109,7 +105,7 @@ describe.skip('Sponsor timeout (NFT) (only for special chain limits test)', () = }); }); - it.skip('Collection limits have greater timeout value than chain limits, collection limits are enforced', async () => { + itSub.skip('Collection limits have greater timeout value than chain limits, collection limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 7}); const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT'); @@ -133,7 +129,7 @@ describe.skip('Sponsor timeout (NFT) (only for special chain limits test)', () = await destroyCollectionExpectSuccess(collectionId); }); - it('Collection limits have lower timeout value than chain limits, chain limits are enforced', async () => { + itSub('Collection limits have lower timeout value than chain limits, chain limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 1}); @@ -172,7 +168,7 @@ describe.skip('Sponsor timeout (Fungible) (only for special chain limits test)', }); }); - it('Collection limits have greater timeout value than chain limits, collection limits are enforced', async () => { + itSub('Collection limits have greater timeout value than chain limits, collection limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 7}); const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible'); @@ -198,7 +194,7 @@ describe.skip('Sponsor timeout (Fungible) (only for special chain limits test)', await destroyCollectionExpectSuccess(collectionId); }); - it('Collection limits have lower timeout value than chain limits, chain limits are enforced', async () => { + itSub('Collection limits have lower timeout value than chain limits, chain limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 1}); @@ -239,7 +235,7 @@ describe.skip('Sponsor timeout (ReFungible) (only for special chain limits test) }); }); - it('Collection limits have greater timeout value than chain limits, collection limits are enforced', async () => { + itSub('Collection limits have greater timeout value than chain limits, collection limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 7}); const tokenId = await createItemExpectSuccess(alice, collectionId, 'ReFungible'); @@ -263,7 +259,7 @@ describe.skip('Sponsor timeout (ReFungible) (only for special chain limits test) await destroyCollectionExpectSuccess(collectionId); }); - it('Collection limits have lower timeout value than chain limits, chain limits are enforced', async () => { + itSub('Collection limits have lower timeout value than chain limits, chain limits are enforced', async ({helper}) => { const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 1}); @@ -286,7 +282,7 @@ describe.skip('Sponsor timeout (ReFungible) (only for special chain limits test) expect(aliceBalanceAfterSponsoredTransaction < aliceBalanceBefore).to.be.true; //expect(aliceBalanceAfterSponsoredTransaction).to.be.lessThan(aliceBalanceBefore); await destroyCollectionExpectSuccess(collectionId); - }); + });*/ }); describe('Collection zero limits (NFT)', () => { @@ -295,38 +291,38 @@ describe('Collection zero limits (NFT)', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); }); - it.skip('Limits have 0 in tokens per address field, the chain limits are applied', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 0}); + itSub.skip('Limits have 0 in tokens per address field, the chain limits are applied', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 0}); + for(let i = 0; i < 10; i++){ - await createItemExpectSuccess(alice, collectionId, 'NFT'); + await collection.mintToken(alice); } - await createItemExpectFailure(alice, collectionId, 'NFT'); + await expect(collection.mintToken(alice)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); }); - it('Limits have 0 in sponsor timeout, no limits are applied', async () => { + itSub('Limits have 0 in sponsor timeout, no limits are applied', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setLimits(alice, {sponsorTransferTimeout: 0}); + const token = await collection.mintToken(alice); - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 0}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'NFT'); - await setCollectionSponsorExpectSuccess(collectionId, alice.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Alice'); - await transferExpectSuccess(collectionId, tokenId, alice, bob); - const aliceBalanceBefore = await getFreeBalance(alice); + await collection.setSponsor(alice, alice.address); + await collection.confirmSponsorship(alice); + + await token.transfer(alice, {Substrate: bob.address}); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); // check setting SponsorTimeout = 0, success with next block - await waitNewBlocks(1); - await transferExpectSuccess(collectionId, tokenId, bob, charlie); - const aliceBalanceAfterSponsoredTransaction1 = await getFreeBalance(alice); + await helper.wait.newBlocks(1); + await token.transfer(bob, {Substrate: charlie.address}); + const aliceBalanceAfterSponsoredTransaction1 = await helper.balance.getSubstrate(alice.address); expect(aliceBalanceAfterSponsoredTransaction1 < aliceBalanceBefore).to.be.true; - //expect(aliceBalanceAfterSponsoredTransaction1).to.be.lessThan(aliceBalanceBefore); }); }); @@ -336,29 +332,28 @@ describe('Collection zero limits (Fungible)', () => { let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); }); - it('Limits have 0 in sponsor timeout, no limits are applied', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 0}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible'); - await setCollectionSponsorExpectSuccess(collectionId, alice.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Alice'); - await transferExpectSuccess(collectionId, tokenId, alice, bob, 10, 'Fungible'); - const aliceBalanceBefore = await getFreeBalance(alice); - await transferExpectSuccess(collectionId, tokenId, bob, charlie, 2, 'Fungible'); + itSub('Limits have 0 in sponsor timeout, no limits are applied', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}); + await collection.setLimits(alice, {sponsorTransferTimeout: 0}); + await collection.mint(alice, 3n); + + await collection.setSponsor(alice, alice.address); + await collection.confirmSponsorship(alice); + + await collection.transfer(alice, {Substrate: bob.address}, 2n); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); // check setting SponsorTimeout = 0, success with next block - await waitNewBlocks(1); - await transferExpectSuccess(collectionId, tokenId, bob, charlie, 2, 'Fungible'); - const aliceBalanceAfterSponsoredTransaction1 = await getFreeBalance(alice); + await helper.wait.newBlocks(1); + await collection.transfer(bob, {Substrate: charlie.address}); + const aliceBalanceAfterSponsoredTransaction1 = await helper.balance.getSubstrate(alice.address); expect(aliceBalanceAfterSponsoredTransaction1 < aliceBalanceBefore).to.be.true; - //expect(aliceBalanceAfterSponsoredTransaction1).to.be.lessThan(aliceBalanceBefore); }); }); @@ -367,109 +362,113 @@ describe('Collection zero limits (ReFungible)', () => { let bob: IKeyringPair; let charlie: IKeyringPair; - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + before(async function() { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([10n, 10n, 10n], donor); }); }); - it.skip('Limits have 0 in tokens per address field, the chain limits are applied', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {accountTokenOwnershipLimit: 0}); + itSub.skip('Limits have 0 in tokens per address field, the chain limits are applied', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + await collection.setLimits(alice, {accountTokenOwnershipLimit: 0}); for(let i = 0; i < 10; i++){ - await createItemExpectSuccess(alice, collectionId, 'ReFungible'); + await collection.mintToken(alice); } - await createItemExpectFailure(alice, collectionId, 'ReFungible'); + await expect(collection.mintToken(alice)).to.be.rejectedWith(/common\.AccountTokenLimitExceeded/); }); - it('Limits have 0 in sponsor timeout, no limits are applied', async () => { + itSub('Limits have 0 in sponsor timeout, no limits are applied', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + await collection.setLimits(alice, {sponsorTransferTimeout: 0}); + const token = await collection.mintToken(alice, 3n); - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {sponsorTransferTimeout: 0}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'ReFungible'); - await setCollectionSponsorExpectSuccess(collectionId, alice.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Alice'); - await transferExpectSuccess(collectionId, tokenId, alice, bob, 100, 'ReFungible'); - await transferExpectSuccess(collectionId, tokenId, bob, charlie, 20, 'ReFungible'); - const aliceBalanceBefore = await getFreeBalance(alice); + await collection.setSponsor(alice, alice.address); + await collection.confirmSponsorship(alice); + + await token.transfer(alice, {Substrate: bob.address}, 2n); + const aliceBalanceBefore = await helper.balance.getSubstrate(alice.address); // check setting SponsorTimeout = 0, success with next block - await waitNewBlocks(1); - await transferExpectSuccess(collectionId, tokenId, bob, charlie, 20, 'ReFungible'); - const aliceBalanceAfterSponsoredTransaction1 = await getFreeBalance(alice); + await helper.wait.newBlocks(1); + await token.transfer(bob, {Substrate: charlie.address}); + const aliceBalanceAfterSponsoredTransaction1 = await helper.balance.getSubstrate(alice.address); expect(aliceBalanceAfterSponsoredTransaction1 < aliceBalanceBefore).to.be.true; - //expect(aliceBalanceAfterSponsoredTransaction1).to.be.lessThan(aliceBalanceBefore); }); - - it('Effective collection limits', async () => { - await usingApi(async (api) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - - { // Check that limits is undefined - const collection = await api.rpc.unique.collectionById(collectionId); - expect(collection.isSome).to.be.true; - const limits = collection.unwrap().limits; - expect(limits).to.be.any; - - expect(limits.accountTokenOwnershipLimit.toHuman()).to.be.null; - expect(limits.sponsoredDataSize.toHuman()).to.be.null; - expect(limits.sponsoredDataRateLimit.toHuman()).to.be.null; - expect(limits.tokenLimit.toHuman()).to.be.null; - expect(limits.sponsorTransferTimeout.toHuman()).to.be.null; - expect(limits.sponsorApproveTimeout.toHuman()).to.be.null; - expect(limits.ownerCanTransfer.toHuman()).to.be.true; - expect(limits.ownerCanDestroy.toHuman()).to.be.null; - expect(limits.transfersEnabled.toHuman()).to.be.null; - } - - { // Check that limits is undefined for non-existent collection - const limits = await api.rpc.unique.effectiveCollectionLimits(11111); - expect(limits.toHuman()).to.be.null; - } - - { // Check that default values defined for collection limits - const limitsOpt = await api.rpc.unique.effectiveCollectionLimits(collectionId); - expect(limitsOpt.isNone).to.be.false; - const limits = limitsOpt.unwrap(); - - expect(limits.accountTokenOwnershipLimit.toHuman()).to.be.eq('100,000'); - expect(limits.sponsoredDataSize.toHuman()).to.be.eq('2,048'); - expect(limits.sponsoredDataRateLimit.toHuman()).to.be.eq('SponsoringDisabled'); - expect(limits.tokenLimit.toHuman()).to.be.eq('4,294,967,295'); - expect(limits.sponsorTransferTimeout.toHuman()).to.be.eq('5'); - expect(limits.sponsorApproveTimeout.toHuman()).to.be.eq('5'); - expect(limits.ownerCanTransfer.toHuman()).to.be.true; - expect(limits.ownerCanDestroy.toHuman()).to.be.true; - expect(limits.transfersEnabled.toHuman()).to.be.true; - } - - { //Check the values for collection limits - await setCollectionLimitsExpectSuccess(alice, collectionId, { - accountTokenOwnershipLimit: 99_999, - sponsoredDataSize: 1024, - tokenLimit: 123, - transfersEnabled: false, - }); - - const limitsOpt = await api.rpc.unique.effectiveCollectionLimits(collectionId); - expect(limitsOpt.isNone).to.be.false; - const limits = limitsOpt.unwrap(); - - expect(limits.accountTokenOwnershipLimit.toHuman()).to.be.eq('99,999'); - expect(limits.sponsoredDataSize.toHuman()).to.be.eq('1,024'); - expect(limits.sponsoredDataRateLimit.toHuman()).to.be.eq('SponsoringDisabled'); - expect(limits.tokenLimit.toHuman()).to.be.eq('123'); - expect(limits.sponsorTransferTimeout.toHuman()).to.be.eq('5'); - expect(limits.sponsorApproveTimeout.toHuman()).to.be.eq('5'); - expect(limits.ownerCanTransfer.toHuman()).to.be.true; - expect(limits.ownerCanDestroy.toHuman()).to.be.true; - expect(limits.transfersEnabled.toHuman()).to.be.false; - } +}); + +describe('Effective collection limits (NFT)', () => { + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); -}); + + itSub('Effective collection limits', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {}); + await collection.setLimits(alice, {ownerCanTransfer: true}); + + { + // Check that limits are undefined + const collectionInfo = await collection.getData(); + const limits = collectionInfo?.raw.limits; + expect(limits).to.be.any; + + expect(limits.accountTokenOwnershipLimit).to.be.null; + expect(limits.sponsoredDataSize).to.be.null; + expect(limits.sponsoredDataRateLimit).to.be.null; + expect(limits.tokenLimit).to.be.null; + expect(limits.sponsorTransferTimeout).to.be.null; + expect(limits.sponsorApproveTimeout).to.be.null; + expect(limits.ownerCanTransfer).to.be.true; + expect(limits.ownerCanDestroy).to.be.null; + expect(limits.transfersEnabled).to.be.null; + } + + { // Check that limits is undefined for non-existent collection + const limits = await helper.collection.getEffectiveLimits(999999); + expect(limits).to.be.null; + } + { // Check that default values defined for collection limits + const limits = await collection.getEffectiveLimits(); + + expect(limits.accountTokenOwnershipLimit).to.be.eq(100000); + expect(limits.sponsoredDataSize).to.be.eq(2048); + expect(limits.sponsoredDataRateLimit).to.be.deep.eq({sponsoringDisabled: null}); + expect(limits.tokenLimit).to.be.eq(4294967295); + expect(limits.sponsorTransferTimeout).to.be.eq(5); + expect(limits.sponsorApproveTimeout).to.be.eq(5); + expect(limits.ownerCanTransfer).to.be.true; + expect(limits.ownerCanDestroy).to.be.true; + expect(limits.transfersEnabled).to.be.true; + } + { + // Check the values for collection limits + await collection.setLimits(alice, { + accountTokenOwnershipLimit: 99_999, + sponsoredDataSize: 1024, + tokenLimit: 123, + transfersEnabled: false, + }); + + const limits = await collection.getEffectiveLimits(); + + expect(limits.accountTokenOwnershipLimit).to.be.eq(99999); + expect(limits.sponsoredDataSize).to.be.eq(1024); + expect(limits.sponsoredDataRateLimit).to.be.deep.eq({sponsoringDisabled: null}); + expect(limits.tokenLimit).to.be.eq(123); + expect(limits.sponsorTransferTimeout).to.be.eq(5); + expect(limits.sponsorApproveTimeout).to.be.eq(5); + expect(limits.ownerCanTransfer).to.be.true; + expect(limits.ownerCanDestroy).to.be.true; + expect(limits.transfersEnabled).to.be.false; + } + }); +}); diff --git a/tests/src/maintenanceMode.seqtest.ts b/tests/src/maintenanceMode.seqtest.ts new file mode 100644 index 0000000000..bd0a3175d7 --- /dev/null +++ b/tests/src/maintenanceMode.seqtest.ts @@ -0,0 +1,266 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {ApiPromise} from '@polkadot/api'; +import {expect, itSub, Pallets, usingPlaygrounds} from './util'; +import {itEth} from './eth/util'; + +async function maintenanceEnabled(api: ApiPromise): Promise { + return (await api.query.maintenance.enabled()).toJSON() as boolean; +} + +describe('Integration Test: Maintenance Mode', () => { + let superuser: IKeyringPair; + let donor: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + superuser = await privateKey('//Alice'); + donor = await privateKey({filename: __filename}); + [bob] = await helper.arrange.createAccounts([100n], donor); + + if (await maintenanceEnabled(helper.getApi())) { + console.warn('\tMaintenance mode was left enabled BEFORE the test suite! Disabling it now.'); + await expect(helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', [])).to.be.fulfilled; + } + }); + }); + + itSub('Allows superuser to enable and disable maintenance mode - and disallows anyone else', async ({helper}) => { + // Make sure non-sudo can't enable maintenance mode + await expect(helper.executeExtrinsic(superuser, 'api.tx.maintenance.enable', []), 'on commoner enabling MM') + .to.be.rejectedWith(/BadOrigin/); + + // Set maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + // Make sure non-sudo can't disable maintenance mode + await expect(helper.executeExtrinsic(bob, 'api.tx.maintenance.disable', []), 'on commoner disabling MM') + .to.be.rejectedWith(/BadOrigin/); + + // Disable maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + }); + + itSub('MM blocks unique pallet calls', async ({helper}) => { + // Can create an NFT collection before enabling the MM + const nftCollection = await helper.nft.mintCollection(bob, { + tokenPropertyPermissions: [{key: 'test', permission: { + collectionAdmin: true, + tokenOwner: true, + mutable: true, + }}], + }); + + // Can mint an NFT before enabling the MM + const nft = await nftCollection.mintToken(bob); + + // Can create an FT collection before enabling the MM + const ftCollection = await helper.ft.mintCollection(superuser); + + // Can mint an FT before enabling the MM + await expect(ftCollection.mint(superuser)).to.be.fulfilled; + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + // Unable to create a collection when the MM is enabled + await expect(helper.nft.mintCollection(superuser), 'cudo forbidden stuff') + .to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + // Unable to set token properties when the MM is enabled + await expect(nft.setProperties( + bob, + [{key: 'test', value: 'test-val'}], + )).to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + // Unable to mint an NFT when the MM is enabled + await expect(nftCollection.mintToken(superuser)) + .to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + // Unable to mint an FT when the MM is enabled + await expect(ftCollection.mint(superuser)) + .to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + + // Can create a collection after disabling the MM + await expect(helper.nft.mintCollection(bob), 'MM is disabled, the collection should be created').to.be.fulfilled; + + // Can set token properties after disabling the MM + await nft.setProperties(bob, [{key: 'test', value: 'test-val'}]); + + // Can mint an NFT after disabling the MM + await nftCollection.mintToken(bob); + + // Can mint an FT after disabling the MM + await ftCollection.mint(superuser); + }); + + itSub.ifWithPallets('MM blocks unique pallet calls (Re-Fungible)', [Pallets.ReFungible], async ({helper}) => { + // Can create an RFT collection before enabling the MM + const rftCollection = await helper.rft.mintCollection(superuser); + + // Can mint an RFT before enabling the MM + await expect(rftCollection.mintToken(superuser)).to.be.fulfilled; + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + // Unable to mint an RFT when the MM is enabled + await expect(rftCollection.mintToken(superuser)) + .to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + + // Can mint an RFT after disabling the MM + await rftCollection.mintToken(superuser); + }); + + itSub('MM allows native token transfers and RPC calls', async ({helper}) => { + // We can use RPC before the MM is enabled + const totalCount = await helper.collection.getTotalCount(); + + // We can transfer funds before the MM is enabled + await expect(helper.balance.transferToSubstrate(superuser, bob.address, 2n)).to.be.fulfilled; + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + // RPCs work while in maintenance + expect(await helper.collection.getTotalCount()).to.be.deep.equal(totalCount); + + // We still able to transfer funds + await expect(helper.balance.transferToSubstrate(bob, superuser.address, 1n)).to.be.fulfilled; + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + + // RPCs work after maintenance + expect(await helper.collection.getTotalCount()).to.be.deep.equal(totalCount); + + // Transfers work after maintenance + await expect(helper.balance.transferToSubstrate(bob, superuser.address, 1n)).to.be.fulfilled; + }); + + itSub.ifWithPallets('MM blocks scheduled calls and the scheduler itself', [Pallets.Scheduler], async ({helper}) => { + const collection = await helper.nft.mintCollection(bob); + + const nftBeforeMM = await collection.mintToken(bob); + const nftDuringMM = await collection.mintToken(bob); + const nftAfterMM = await collection.mintToken(bob); + + const scheduledIdBeforeMM = '0x' + '0'.repeat(31) + '0'; + const scheduledIdDuringMM = '0x' + '0'.repeat(31) + '1'; + const scheduledIdBunkerThroughMM = '0x' + '0'.repeat(31) + '2'; + const scheduledIdAttemptDuringMM = '0x' + '0'.repeat(31) + '3'; + const scheduledIdAfterMM = '0x' + '0'.repeat(31) + '4'; + + const blocksToWait = 6; + + // Scheduling works before the maintenance + await nftBeforeMM.scheduleAfter(scheduledIdBeforeMM, blocksToWait) + .transfer(bob, {Substrate: superuser.address}); + + await helper.wait.newBlocks(blocksToWait + 1); + expect(await nftBeforeMM.getOwner()).to.be.deep.equal({Substrate: superuser.address}); + + // Schedule a transaction that should occur *during* the maintenance + await nftDuringMM.scheduleAfter(scheduledIdDuringMM, blocksToWait) + .transfer(bob, {Substrate: superuser.address}); + + // Schedule a transaction that should occur *after* the maintenance + await nftDuringMM.scheduleAfter(scheduledIdBunkerThroughMM, blocksToWait * 2) + .transfer(bob, {Substrate: superuser.address}); + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + await helper.wait.newBlocks(blocksToWait + 1); + // The owner should NOT change since the scheduled transaction should be rejected + expect(await nftDuringMM.getOwner()).to.be.deep.equal({Substrate: bob.address}); + + // Any attempts to schedule a tx during the MM should be rejected + await expect(nftDuringMM.scheduleAfter(scheduledIdAttemptDuringMM, blocksToWait) + .transfer(bob, {Substrate: superuser.address})) + .to.be.rejectedWith(/Invalid Transaction: Transaction call is not expected/); + + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + + // Scheduling works after the maintenance + await nftAfterMM.scheduleAfter(scheduledIdAfterMM, blocksToWait) + .transfer(bob, {Substrate: superuser.address}); + + await helper.wait.newBlocks(blocksToWait + 1); + + expect(await nftAfterMM.getOwner()).to.be.deep.equal({Substrate: superuser.address}); + // The owner of the token scheduled for transaction *before* maintenance should now change *after* maintenance + expect(await nftDuringMM.getOwner()).to.be.deep.equal({Substrate: superuser.address}); + }); + + itEth('Disallows Ethereum transactions to execute while in maintenance', async ({helper}) => { + const owner = await helper.eth.createAccountWithBalance(donor); + const receiver = helper.eth.createAccount(); + + const {collectionAddress} = await helper.eth.createERC721MetadataCompatibleNFTCollection(owner, 'A', 'B', 'C', ''); + + // Set maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + const contract = helper.ethNativeContract.collection(collectionAddress, 'nft', owner); + const tokenId = await contract.methods.nextTokenId().call(); + expect(tokenId).to.be.equal('1'); + + await expect(contract.methods.mintWithTokenURI(receiver, 'Test URI').send()) + .to.be.rejectedWith(/submit transaction to pool failed: Pool\(InvalidTransaction\(InvalidTransaction::Call\)\)/); + + await expect(contract.methods.ownerOf(tokenId).call()).rejectedWith(/token not found/); + + // Disable maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + }); + + itSub('Allows to enable and disable MM repeatedly', async ({helper}) => { + // Set maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.enable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is OFF when it should be ON').to.be.true; + + // Disable maintenance mode + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + expect(await maintenanceEnabled(helper.getApi()), 'MM is ON when it should be OFF').to.be.false; + }); + + afterEach(async () => { + await usingPlaygrounds(async helper => { + if (await maintenanceEnabled(helper.getApi())) { + console.warn('\tMaintenance mode was left enabled AFTER a test has finished! Be careful. Disabling it now.'); + await helper.getSudo().executeExtrinsic(superuser, 'api.tx.maintenance.disable', []); + } + expect(await maintenanceEnabled(helper.getApi()), 'Disastrous! Exited the test suite with maintenance mode on.').to.be.false; + }); + }); +}); diff --git a/tests/src/mintModes.test.ts b/tests/src/mintModes.test.ts deleted file mode 100644 index e93d91a998..0000000000 --- a/tests/src/mintModes.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import {IKeyringPair} from '@polkadot/types/types'; -import usingApi from './substrate/substrate-api'; -import { - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectFailure, - createItemExpectSuccess, - enableAllowListExpectSuccess, - setMintPermissionExpectSuccess, - addCollectionAdminExpectSuccess, - disableAllowListExpectSuccess, -} from './util/helpers'; - -describe('Integration Test public minting', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('If the AllowList mode is enabled, then the address added to the allowlist and not the owner or administrator can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - - await createItemExpectSuccess(bob, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is enabled, address not included in allowlist that is regular user cannot create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectFailure(bob, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is enabled, address not included in allowlist that is admin can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is enabled, address not included in allowlist that is owner can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectSuccess(alice, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is disabled, owner can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await disableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectSuccess(alice, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is disabled, collection admin can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await disableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT'); - }); - }); - - it('If the AllowList mode is disabled, regular user can`t create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await disableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await createItemExpectFailure(bob, collectionId, 'NFT'); - }); - }); -}); - -describe('Integration Test private minting', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Address that is the not owner or not admin cannot create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await createItemExpectFailure(bob, collectionId, 'NFT'); - }); - }); - - it('Address that is collection owner can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await disableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await createItemExpectSuccess(alice, collectionId, 'NFT'); - }); - }); - - it('Address that is admin can create tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await disableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT'); - }); - }); -}); diff --git a/tests/src/nesting/collectionProperties.test.ts b/tests/src/nesting/collectionProperties.test.ts new file mode 100644 index 0000000000..1ad1361c71 --- /dev/null +++ b/tests/src/nesting/collectionProperties.test.ts @@ -0,0 +1,261 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, Pallets, usingPlaygrounds, expect} from '../util'; +import {UniqueBaseCollection} from '../util/playgrounds/unique'; + +describe('Integration Test: Collection Properties', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); + }); + }); + + itSub('Properties are initially empty', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + expect(await collection.getProperties()).to.be.empty; + }); + + async function testSetsPropertiesForCollection(collection: UniqueBaseCollection) { + // As owner + await expect(collection.setProperties(alice, [{key: 'electron', value: 'come bond'}])).to.be.fulfilled; + + await collection.addAdmin(alice, {Substrate: bob.address}); + + // As administrator + await expect(collection.setProperties(bob, [{key: 'black_hole'}])).to.be.fulfilled; + + const properties = await collection.getProperties(); + expect(properties).to.include.deep.members([ + {key: 'electron', value: 'come bond'}, + {key: 'black_hole', value: ''}, + ]); + } + + itSub('Sets properties for a NFT collection', async ({helper}) => { + await testSetsPropertiesForCollection(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Sets properties for a ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + await testSetsPropertiesForCollection(await helper.rft.mintCollection(alice)); + }); + + async function testCheckValidNames(collection: UniqueBaseCollection) { + // alpha symbols + await expect(collection.setProperties(alice, [{key: 'answer'}])).to.be.fulfilled; + + // numeric symbols + await expect(collection.setProperties(alice, [{key: '451'}])).to.be.fulfilled; + + // underscore symbol + await expect(collection.setProperties(alice, [{key: 'black_hole'}])).to.be.fulfilled; + + // dash symbol + await expect(collection.setProperties(alice, [{key: '-'}])).to.be.fulfilled; + + // dot symbol + await expect(collection.setProperties(alice, [{key: 'once.in.a.long.long.while...', value: 'you get a little lost'}])).to.be.fulfilled; + + const properties = await collection.getProperties(); + expect(properties).to.include.deep.members([ + {key: 'answer', value: ''}, + {key: '451', value: ''}, + {key: 'black_hole', value: ''}, + {key: '-', value: ''}, + {key: 'once.in.a.long.long.while...', value: 'you get a little lost'}, + ]); + } + + itSub('Check valid names for NFT collection properties keys', async ({helper}) => { + await testCheckValidNames(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Check valid names for ReFungible collection properties keys', [Pallets.ReFungible], async ({helper}) => { + await testCheckValidNames(await helper.rft.mintCollection(alice)); + }); + + async function testChangesProperties(collection: UniqueBaseCollection) { + await expect(collection.setProperties(alice, [{key: 'electron', value: 'come bond'}, {key: 'black_hole', value: ''}])).to.be.fulfilled; + + // Mutate the properties + await expect(collection.setProperties(alice, [{key: 'black_hole', value: 'LIGO'}])).to.be.fulfilled; + + const properties = await collection.getProperties(); + expect(properties).to.include.deep.members([ + {key: 'electron', value: 'come bond'}, + {key: 'black_hole', value: 'LIGO'}, + ]); + } + + itSub('Changes properties of a NFT collection', async ({helper}) => { + await testChangesProperties(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Changes properties of a ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + await testChangesProperties(await helper.rft.mintCollection(alice)); + }); + + async function testDeleteProperties(collection: UniqueBaseCollection) { + await expect(collection.setProperties(alice, [{key: 'electron', value: 'come bond'}, {key: 'black_hole', value: 'LIGO'}])).to.be.fulfilled; + + await expect(collection.deleteProperties(alice, ['electron'])).to.be.fulfilled; + + const properties = await collection.getProperties(['black_hole', 'electron']); + expect(properties).to.be.deep.equal([ + {key: 'black_hole', value: 'LIGO'}, + ]); + } + + itSub('Deletes properties of a NFT collection', async ({helper}) => { + await testDeleteProperties(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Deletes properties of a ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + await testDeleteProperties(await helper.rft.mintCollection(alice)); + }); +}); + +describe('Negative Integration Test: Collection Properties', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 10n], donor); + }); + }); + + async function testFailsSetPropertiesIfNotOwnerOrAdmin(collection: UniqueBaseCollection) { + await expect(collection.setProperties(bob, [{key: 'electron', value: 'come bond'}, {key: 'black_hole', value: 'LIGO'}])) + .to.be.rejectedWith(/common\.NoPermission/); + + expect(await collection.getProperties()).to.be.empty; + } + + itSub('Fails to set properties in a NFT collection if not its onwer/administrator', async ({helper}) => { + await testFailsSetPropertiesIfNotOwnerOrAdmin(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Fails to set properties in a ReFungible collection if not its onwer/administrator', [Pallets.ReFungible], async ({helper}) => { + await testFailsSetPropertiesIfNotOwnerOrAdmin(await helper.rft.mintCollection(alice)); + }); + + async function testFailsSetPropertiesThatExeedLimits(collection: UniqueBaseCollection) { + const spaceLimit = (await (collection.helper!.api! as any).query.common.collectionProperties(collection.collectionId)).spaceLimit.toNumber(); + + // Mute the general tx parsing error, too many bytes to process + { + console.error = () => {}; + await expect(collection.setProperties(alice, [ + {key: 'electron', value: 'low high '.repeat(Math.ceil(spaceLimit! / 9))}, + ])).to.be.rejected; + } + + expect(await collection.getProperties(['electron'])).to.be.empty; + + await expect(collection.setProperties(alice, [ + {key: 'electron', value: 'low high '.repeat(Math.ceil(spaceLimit! / 18))}, + {key: 'black_hole', value: '0'.repeat(Math.ceil(spaceLimit! / 2))}, + ])).to.be.rejectedWith(/common\.NoSpaceForProperty/); + + expect(await collection.getProperties(['electron', 'black_hole'])).to.be.empty; + } + + itSub('Fails to set properties that exceed the limits (NFT)', async ({helper}) => { + await testFailsSetPropertiesThatExeedLimits(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Fails to set properties that exceed the limits (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testFailsSetPropertiesThatExeedLimits(await helper.rft.mintCollection(alice)); + }); + + async function testFailsSetMorePropertiesThanAllowed(collection: UniqueBaseCollection) { + const propertiesToBeSet = []; + for (let i = 0; i < 65; i++) { + propertiesToBeSet.push({ + key: 'electron_' + i, + value: Math.random() > 0.5 ? 'high' : 'low', + }); + } + + await expect(collection.setProperties(alice, propertiesToBeSet)). + to.be.rejectedWith(/common\.PropertyLimitReached/); + + expect(await collection.getProperties()).to.be.empty; + } + + itSub('Fails to set more properties than it is allowed (NFT)', async ({helper}) => { + await testFailsSetMorePropertiesThanAllowed(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Fails to set more properties than it is allowed (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testFailsSetMorePropertiesThanAllowed(await helper.rft.mintCollection(alice)); + }); + + async function testFailsSetPropertiesWithInvalidNames(collection: UniqueBaseCollection) { + const invalidProperties = [ + [{key: 'electron', value: 'negative'}, {key: 'string theory', value: 'understandable'}], + [{key: 'Mr/Sandman', value: 'Bring me a gene'}], + [{key: 'déjà vu', value: 'hmm...'}], + ]; + + for (let i = 0; i < invalidProperties.length; i++) { + await expect( + collection.setProperties(alice, invalidProperties[i]), + `on rejecting the new badly-named property #${i}`, + ).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); + } + + await expect( + collection.setProperties(alice, [{key: '', value: 'nothing must not exist'}]), + 'on rejecting an unnamed property', + ).to.be.rejectedWith(/common\.EmptyPropertyKey/); + + await expect( + collection.setProperties(alice, [{key: 'CRISPR-Cas9', value: 'rewriting nature!'}]), + 'on setting the correctly-but-still-badly-named property', + ).to.be.fulfilled; + + const keys = invalidProperties.flatMap(propertySet => propertySet.map(property => property.key)).concat('CRISPR-Cas9').concat(''); + + const properties = await collection.getProperties(keys); + expect(properties).to.be.deep.equal([ + {key: 'CRISPR-Cas9', value: 'rewriting nature!'}, + ]); + + for (let i = 0; i < invalidProperties.length; i++) { + await expect( + collection.deleteProperties(alice, invalidProperties[i].map(propertySet => propertySet.key)), + `on trying to delete the non-existent badly-named property #${i}`, + ).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); + } + } + + itSub('Fails to set properties with invalid names (NFT)', async ({helper}) => { + await testFailsSetPropertiesWithInvalidNames(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Fails to set properties with invalid names (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testFailsSetPropertiesWithInvalidNames(await helper.rft.mintCollection(alice)); + }); +}); + \ No newline at end of file diff --git a/tests/src/nesting/graphs.test.ts b/tests/src/nesting/graphs.test.ts index d384dddd00..b6c3ad7538 100644 --- a/tests/src/nesting/graphs.test.ts +++ b/tests/src/nesting/graphs.test.ts @@ -1,9 +1,22 @@ -import {ApiPromise} from '@polkadot/api'; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + import {IKeyringPair} from '@polkadot/types/types'; -import {expect} from 'chai'; -import {tokenIdToCross} from '../eth/util/helpers'; -import usingApi, {executeTransaction} from '../substrate/substrate-api'; -import {getCreateCollectionResult, transferExpectSuccess, setCollectionLimitsExpectSuccess} from '../util/helpers'; +import {expect, itSub, usingPlaygrounds} from '../util'; +import {UniqueHelper, UniqueNFToken} from '../util/playgrounds/unique'; /** * ```dot @@ -12,46 +25,47 @@ import {getCreateCollectionResult, transferExpectSuccess, setCollectionLimitsExp * 8 -> 5 * ``` */ -async function buildComplexObjectGraph(api: ApiPromise, sender: IKeyringPair): Promise { - const events = await executeTransaction(api, sender, api.tx.unique.createCollectionEx({mode: 'NFT', permissions: {nesting: {tokenOwner: true}}})); - const {collectionId} = getCreateCollectionResult(events); - - await executeTransaction(api, sender, api.tx.unique.createMultipleItemsEx(collectionId, {NFT: Array(8).fill({owner: {Substrate: sender.address}})})); - - await transferExpectSuccess(collectionId, 8, sender, tokenIdToCross(collectionId, 5)); +async function buildComplexObjectGraph(helper: UniqueHelper, sender: IKeyringPair): Promise { + const collection = await helper.nft.mintCollection(sender, {permissions: {nesting: {tokenOwner: true}}}); + const tokens = await collection.mintMultipleTokens(sender, Array(8).fill({owner: {Substrate: sender.address}})); - await transferExpectSuccess(collectionId, 7, sender, tokenIdToCross(collectionId, 6)); - await transferExpectSuccess(collectionId, 6, sender, tokenIdToCross(collectionId, 5)); - await transferExpectSuccess(collectionId, 5, sender, tokenIdToCross(collectionId, 2)); + await tokens[7].nest(sender, tokens[4]); + await tokens[6].nest(sender, tokens[5]); + await tokens[5].nest(sender, tokens[4]); + await tokens[4].nest(sender, tokens[1]); + await tokens[3].nest(sender, tokens[2]); + await tokens[2].nest(sender, tokens[1]); + await tokens[1].nest(sender, tokens[0]); - await transferExpectSuccess(collectionId, 4, sender, tokenIdToCross(collectionId, 3)); - await transferExpectSuccess(collectionId, 3, sender, tokenIdToCross(collectionId, 2)); - await transferExpectSuccess(collectionId, 2, sender, tokenIdToCross(collectionId, 1)); - - return collectionId; + return tokens; } describe('Graphs', () => { - it('Ouroboros can\'t be created in a complex graph', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const collection = await buildComplexObjectGraph(api, alice); - const tokenTwoParent = tokenIdToCross(collection, 1); - - // to self - await expect( - executeTransaction(api, alice, api.tx.unique.transfer(tokenIdToCross(collection, 1), collection, 1, 1)), - 'first transaction', - ).to.be.rejectedWith(/structure\.OuroborosDetected/); - // to nested part of graph - await expect( - executeTransaction(api, alice, api.tx.unique.transfer(tokenIdToCross(collection, 5), collection, 1, 1)), - 'second transaction', - ).to.be.rejectedWith(/structure\.OuroborosDetected/); - await expect( - executeTransaction(api, alice, api.tx.unique.transferFrom(tokenTwoParent, tokenIdToCross(collection, 8), collection, 2, 1)), - 'third transaction', - ).to.be.rejectedWith(/structure\.OuroborosDetected/); + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([10n], donor); }); }); + + itSub('Ouroboros can\'t be created in a complex graph', async ({helper}) => { + const tokens = await buildComplexObjectGraph(helper, alice); + + // to self + await expect( + tokens[0].nest(alice, tokens[0]), + 'first transaction', + ).to.be.rejectedWith(/structure\.OuroborosDetected/); + // to nested part of graph + await expect( + tokens[0].nest(alice, tokens[4]), + 'second transaction', + ).to.be.rejectedWith(/structure\.OuroborosDetected/); + await expect( + tokens[1].transferFrom(alice, tokens[0].nestingAccount(), tokens[7].nestingAccount()), + 'third transaction', + ).to.be.rejectedWith(/structure\.OuroborosDetected/); + }); }); diff --git a/tests/src/nesting/migration-check.test.ts b/tests/src/nesting/migration-check.test.ts deleted file mode 100644 index 19c212ef91..0000000000 --- a/tests/src/nesting/migration-check.test.ts +++ /dev/null @@ -1,172 +0,0 @@ -import {expect} from 'chai'; -import usingApi, {executeTransaction, submitTransactionAsync} from '../substrate/substrate-api'; -import {getCreateCollectionResult, getCreateItemResult, normalizeAccountId} from '../util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; -import {strToUTF16} from '../util/util'; -import waitNewBlocks from '../substrate/wait-new-blocks'; -// Used for polkadot-launch signalling -import find from 'find-process'; - -// todo un-skip for migrations -describe.skip('Migration testing', () => { - let alice: IKeyringPair; - - before(async() => { - await usingApi(async (_, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - }); - }); - - it('Preserves collection settings after migration', async () => { - let oldVersion: number; - let collectionId: number; - let collectionOld: any; - let nftId: number; - let nftOld: any; - - await usingApi(async api => { - // ----------- Collection pre-upgrade ------------ - const txCollection = api.tx.unique.createCollectionEx({ - mode: 'NFT', - access: 'AllowList', - name: strToUTF16('Mojave Pictures'), - description: strToUTF16('$2.2 billion power plant!'), - tokenPrefix: '0x0002030', - offchainSchema: '0x111111', - schemaVersion: 'Unique', - limits: { - accountTokenOwnershipLimit: 3, - }, - constOnChainSchema: '0x333333', - variableOnChainSchema: '0x22222', - }); - const events0 = await submitTransactionAsync(alice, txCollection); - const result0 = getCreateCollectionResult(events0); - collectionId = result0.collectionId; - - // Get the pre-upgrade collection info - collectionOld = (await api.query.common.collectionById(collectionId)).toJSON(); - - // ---------- NFT pre-upgrade ------------ - const txNft = api.tx.unique.createItem( - collectionId, - normalizeAccountId(alice), - { - NFT: { - owner: {substrate: alice.address}, - constData: '0x0000', - variableData: '0x1111', - }, - }, - ); - const events1 = await executeTransaction(api, alice, txNft); - const result1 = getCreateItemResult(events1); - nftId = result1.itemId; - - // Get the pre-upgrade NFT data - nftOld = (await api.query.nonfungible.tokenData(collectionId, nftId)).toJSON(); - - // Get the pre-upgrade spec version - oldVersion = (api.consts.system.version.toJSON() as any).specVersion; - }); - - console.log(`Now waiting for the parachain upgrade from ${oldVersion!}...`); - - let newVersion = oldVersion!; - let connectionFailCounter = 0; - - // Cooperate with polkadot-launch if it's running (assuming custom name change to 'polkadot-launch'), and send a custom signal - find('name', 'polkadot-launch', true).then((list) => { - for (const proc of list) { - process.kill(proc.pid, 'SIGUSR1'); - } - }); - - // And wait for the parachain upgrade - { - // Catch warnings like 'RPC methods not decorated' and keep the 'waiting' message in front - const stdlog = console.warn.bind(console); - let warnCount = 0; - console.warn = function(...args){ - if (arguments.length <= 2 || !args[2].includes('RPC methods not decorated')) { - warnCount++; - stdlog.apply(console, args as any); - } - }; - - let oldWarnCount = 0; - while (newVersion == oldVersion! && connectionFailCounter < 5) { - await new Promise(resolve => setTimeout(resolve, 12000)); - try { - await usingApi(async api => { - await waitNewBlocks(api); - newVersion = (api.consts.system.version.toJSON() as any).specVersion; - if (warnCount > oldWarnCount) { - console.log(`Still waiting for the parachain upgrade from ${oldVersion!}...`); - oldWarnCount = warnCount; - } - }); - } catch (_) { - connectionFailCounter++; - } - } - } - - await usingApi(async api => { - // ---------- Collection comparison ----------- - const collectionNew = (await api.query.common.collectionById(collectionId)).toJSON() as any; - - // Make sure the extra fields are what they should be - expect(( - await api.rpc.unique.collectionProperties(collectionId, ['_old_constOnChainSchema']) - )[0].value.toHex()).to.be.equal(collectionOld.constOnChainSchema); - - expect(( - await api.rpc.unique.collectionProperties(collectionId, ['_old_variableOnChainSchema']) - )[0].value.toHex()).to.be.equal(collectionOld.variableOnChainSchema); - - expect(( - await api.rpc.unique.collectionProperties(collectionId, ['_old_offchainSchema']) - )[0].value.toHex()).to.be.equal(collectionOld.offchainSchema); - - expect(( - await api.rpc.unique.collectionProperties(collectionId, ['_old_schemaVersion']) - )[0].value.toHuman()).to.be.equal(collectionOld.schemaVersion); - - expect(collectionNew.permissions).to.be.deep.equal({ - access: collectionOld.access, - mintMode: collectionOld.mintMode, - nesting: null, - }); - - expect(collectionNew.externalCollection).to.be.equal(false); - - // Get rid of extra fields to perform comparison on the rest of the collection - delete collectionNew.permissions; - delete collectionNew.externalCollection; - delete collectionOld.schemaVersion; - delete collectionOld.constOnChainSchema; - delete collectionOld.variableOnChainSchema; - delete collectionOld.offchainSchema; - delete collectionOld.mintMode; - delete collectionOld.access; - delete collectionOld.metaUpdatePermission; // todo look into, doesn't migrate - - expect(collectionNew).to.be.deep.equal(collectionOld); - - // ---------- NFT comparison ----------- - const nftNew = (await api.query.nonfungible.tokenData(collectionId, nftId)).toJSON() as any; - - // Make sure the extra fields are what they should be - expect((await api.rpc.unique.tokenProperties(collectionId, nftId, ['_old_constData']))[0].value.toHex()).to.be.equal(nftOld.constData); - - expect((await api.rpc.unique.tokenProperties(collectionId, nftId, ['_old_variableData']))[0].value.toHex()).to.be.equal(nftOld.variableData); - - // Get rid of extra fields to perform comparison on the rest of the NFT - delete nftOld.constData; - delete nftOld.variableData; - - expect(nftNew).to.be.deep.equal(nftOld); - }); - }); -}); diff --git a/tests/src/nesting/nest.test.ts b/tests/src/nesting/nest.test.ts index 6db38e950c..be20a96c4e 100644 --- a/tests/src/nesting/nest.test.ts +++ b/tests/src/nesting/nest.test.ts @@ -1,826 +1,667 @@ -import {expect} from 'chai'; -import {tokenIdToAddress} from '../eth/util/helpers'; -import usingApi, {executeTransaction} from '../substrate/substrate-api'; -import { - addCollectionAdminExpectSuccess, - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - enableAllowListExpectSuccess, - enablePublicMintingExpectSuccess, - getTokenChildren, - getTokenOwner, - getTopmostTokenOwner, - normalizeAccountId, - setCollectionPermissionsExpectSuccess, - transferExpectFailure, - transferExpectSuccess, - transferFromExpectSuccess, - setCollectionLimitsExpectSuccess, -} from '../util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. -describe('Integration Test: Composite nesting tests', () => { - before(async () => { - await usingApi(async (_, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. - it('Performs the full suite: bundles a token, transfers, and unnests', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. - // Create a nested token - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . - // Create a token to be nested - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Nest - await transferExpectSuccess(collection, newToken, alice, {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - - // Move bundle to different user - await transferExpectSuccess(collection, targetToken, alice, {Substrate: bob.address}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: bob.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - - // Unnest - await transferFromExpectSuccess(collection, newToken, bob, {Ethereum: tokenIdToAddress(collection, targetToken)}, {Substrate: bob.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: bob.address}); - }); - }); +import {IKeyringPair} from '@polkadot/types/types'; +import {expect, itSub, Pallets, usingPlaygrounds} from '../util'; - it('Transfers an already bundled token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - - const tokenA = await createItemExpectSuccess(alice, collection, 'NFT'); - const tokenB = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Create a nested token - const tokenC = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, tokenA)}); - expect(await getTopmostTokenOwner(api, collection, tokenC)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, tokenC)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, tokenA).toLowerCase()}); - - // Transfer the nested token to another token - await expect(executeTransaction( - api, - alice, - api.tx.unique.transferFrom( - normalizeAccountId({Ethereum: tokenIdToAddress(collection, tokenA)}), - normalizeAccountId({Ethereum: tokenIdToAddress(collection, tokenB)}), - collection, - tokenC, - 1, - ), - )).to.not.be.rejected; - expect(await getTopmostTokenOwner(api, collection, tokenC)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, tokenC)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, tokenB).toLowerCase()}); - }); - }); +describe('Integration Test: Composite nesting tests', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; - it('Checks token children', async () => { - await usingApi(async api => { - const collectionA = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collectionA, {ownerCanTransfer: true}); - await setCollectionPermissionsExpectSuccess(alice, collectionA, {nesting: {tokenOwner: true}}); - const collectionB = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - - const targetToken = await createItemExpectSuccess(alice, collectionA, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionA, targetToken)}; - let children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(0, 'Children length check at creation'); - - // Create a nested NFT token - const tokenA = await createItemExpectSuccess(alice, collectionA, 'NFT', targetAddress); - children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(1, 'Children length check at nesting #1'); - expect(children).to.have.deep.members([ - {token: tokenA, collection: collectionA}, - ], 'Children contents check at nesting #1'); - - // Create then nest - const tokenB = await createItemExpectSuccess(alice, collectionA, 'NFT'); - await transferExpectSuccess(collectionA, tokenB, alice, targetAddress); - children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(2, 'Children length check at nesting #2'); - expect(children).to.have.deep.members([ - {token: tokenA, collection: collectionA}, - {token: tokenB, collection: collectionA}, - ], 'Children contents check at nesting #2'); - - // Move token B to a different user outside the nesting tree - await transferFromExpectSuccess(collectionA, tokenB, alice, targetAddress, bob); - children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(1, 'Children length check at unnesting'); - expect(children).to.be.have.deep.members([ - {token: tokenA, collection: collectionA}, - ], 'Children contents check at unnesting'); - - // Create a fungible token in another collection and then nest - const tokenC = await createItemExpectSuccess(alice, collectionB, 'Fungible'); - await transferExpectSuccess(collectionB, tokenC, alice, targetAddress, 1, 'Fungible'); - children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(2, 'Children length check at nesting #3 (from another collection)'); - expect(children).to.be.have.deep.members([ - {token: tokenA, collection: collectionA}, - {token: tokenC, collection: collectionB}, - ], 'Children contents check at nesting #3 (from another collection)'); - - // Move the fungible token inside token A deeper in the nesting tree - await transferFromExpectSuccess(collectionB, tokenC, alice, targetAddress, {Ethereum: tokenIdToAddress(collectionA, tokenA)}, 1, 'Fungible'); - children = await getTokenChildren(api, collectionA, targetToken); - expect(children.length).to.be.equal(1, 'Children length check at deeper nesting'); - expect(children).to.be.have.deep.members([ - {token: tokenA, collection: collectionA}, - ], 'Children contents check at deeper nesting'); - }); + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); + }); + }); + + itSub('Performs the full suite: bundles a token, transfers, and unnests', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + // Create an immediately nested token + const nestedToken = await collection.mintToken(alice, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: alice.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Create a token to be nested + const newToken = await collection.mintToken(alice); + + // Nest + await newToken.nest(alice, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: alice.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Move bundle to different user + await targetToken.transfer(alice, {Substrate: bob.address}); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: bob.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: bob.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Unnest + await newToken.unnest(bob, targetToken, {Substrate: bob.address}); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: bob.address}); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: bob.address}); + }); + + itSub('Transfers an already bundled token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const tokenA = await collection.mintToken(alice); + const tokenB = await collection.mintToken(alice); + + // Create a nested token + const tokenC = await collection.mintToken(alice, tokenA.nestingAccount()); + expect(await tokenC.getOwner()).to.be.deep.equal(tokenA.nestingAccount().toLowerCase()); + + // Transfer the nested token to another token + await expect(tokenC.transferFrom(alice, tokenA.nestingAccount(), tokenB.nestingAccount())).to.be.fulfilled; + expect(await tokenC.getTopmostOwner()).to.be.deep.equal({Substrate: alice.address}); + expect(await tokenC.getOwner()).to.be.deep.equal(tokenB.nestingAccount().toLowerCase()); + }); + + itSub('Checks token children', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const collectionB = await helper.ft.mintCollection(alice); + + const targetToken = await collectionA.mintToken(alice); + expect((await targetToken.getChildren()).length).to.be.equal(0, 'Children length check at creation'); + + // Create a nested NFT token + const tokenA = await collectionA.mintToken(alice, targetToken.nestingAccount()); + expect(await targetToken.getChildren()).to.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + ], 'Children contents check at nesting #1').and.be.length(1, 'Children length check at nesting #1'); + + // Create then nest + const tokenB = await collectionA.mintToken(alice); + await tokenB.nest(alice, targetToken); + expect(await targetToken.getChildren()).to.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + {tokenId: tokenB.tokenId, collectionId: collectionA.collectionId}, + ], 'Children contents check at nesting #2').and.be.length(2, 'Children length check at nesting #2'); + + // Move token B to a different user outside the nesting tree + await tokenB.unnest(alice, targetToken, {Substrate: bob.address}); + expect(await targetToken.getChildren()).to.be.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + ], 'Children contents check at nesting #3 (unnesting)').and.be.length(1, 'Children length check at nesting #3 (unnesting)'); + + // Create a fungible token in another collection and then nest + await collectionB.mint(alice, 10n); + await collectionB.transfer(alice, targetToken.nestingAccount(), 2n); + expect(await targetToken.getChildren()).to.be.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + {tokenId: 0, collectionId: collectionB.collectionId}, + ], 'Children contents check at nesting #4 (from another collection)') + .and.be.length(2, 'Children length check at nesting #4 (from another collection)'); + + // Move part of the fungible token inside token A deeper in the nesting tree + await collectionB.transferFrom(alice, targetToken.nestingAccount(), tokenA.nestingAccount(), 1n); + expect(await targetToken.getChildren()).to.be.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + {tokenId: 0, collectionId: collectionB.collectionId}, + ], 'Children contents check at nesting #5 (deeper)').and.be.length(2, 'Children length check at nesting #5 (deeper)'); + expect(await tokenA.getChildren()).to.be.have.deep.members([ + {tokenId: 0, collectionId: collectionB.collectionId}, + ], 'Children contents check at nesting #5.5 (deeper)').and.be.length(1, 'Children length check at nesting #5.5 (deeper)'); + + // Move the remaining part of the fungible token inside token A deeper in the nesting tree + await collectionB.transferFrom(alice, targetToken.nestingAccount(), tokenA.nestingAccount(), 1n); + expect(await targetToken.getChildren()).to.be.have.deep.members([ + {tokenId: tokenA.tokenId, collectionId: collectionA.collectionId}, + ], 'Children contents check at nesting #6 (deeper)').and.be.length(1, 'Children length check at nesting #6 (deeper)'); + expect(await tokenA.getChildren()).to.be.have.deep.members([ + {tokenId: 0, collectionId: collectionB.collectionId}, + ], 'Children contents check at nesting #6.5 (deeper)').and.be.length(1, 'Children length check at nesting #6.5 (deeper)'); }); }); -describe('Integration Test: Various token type nesting', async () => { +describe('Integration Test: Various token type nesting', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + before(async () => { - await usingApi(async (_, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([50n, 10n, 10n], donor); }); }); - it('Admin (NFT): allows an Admin to nest a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {collectionAdmin: true}}); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT', charlie.address); + itSub('Admin (NFT): allows an Admin to nest a token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {collectionAdmin: true}}}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const targetToken = await collection.mintToken(alice, {Substrate: charlie.address}); - // Create a nested token - const nestedToken = await createItemExpectSuccess(bob, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); + // Create an immediately nested token + const nestedToken = await collection.mintToken(bob, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); - // Create a token to be nested and nest - const newToken = await createItemExpectSuccess(bob, collection, 'NFT'); - await transferExpectSuccess(collection, newToken, bob, {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - }); + // Create a token to be nested and nest + const newToken = await collection.mintToken(bob); + await newToken.nest(bob, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); - it('Admin (NFT): Admin and Token Owner can operate together', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true, collectionAdmin: true}}); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT', charlie.address); + itSub('Admin (NFT): Admin and Token Owner can operate together', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {collectionAdmin: true, tokenOwner: true}}}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const targetToken = await collection.mintToken(alice, {Substrate: charlie.address}); - // Create a nested token by an administrator - const nestedToken = await createItemExpectSuccess(bob, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); + // Create an immediately nested token by an administrator + const nestedToken = await collection.mintToken(bob, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); - // Create a token and allow the owner to nest too - const newToken = await createItemExpectSuccess(alice, collection, 'NFT', charlie.address); - await transferExpectSuccess(collection, newToken, charlie, {Ethereum: tokenIdToAddress(collection, nestedToken)}); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, nestedToken).toLowerCase()}); - }); + // Create a token to be nested and nest + const newToken = await collection.mintToken(alice, {Substrate: charlie.address}); + await newToken.nest(charlie, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); - it('Admin (NFT): allows an Admin to nest a token (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionA = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addCollectionAdminExpectSuccess(alice, collectionA, bob.address); - const collectionB = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addCollectionAdminExpectSuccess(alice, collectionB, bob.address); - await setCollectionPermissionsExpectSuccess(alice, collectionA, {nesting: {collectionAdmin: true, restricted:[collectionA, collectionB]}}); - const targetToken = await createItemExpectSuccess(alice, collectionA, 'NFT', charlie.address); - - // Create a nested token - const nestedToken = await createItemExpectSuccess(bob, collectionB, 'NFT', {Ethereum: tokenIdToAddress(collectionA, targetToken)}); - expect(await getTopmostTokenOwner(api, collectionB, nestedToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collectionB, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collectionA, targetToken).toLowerCase()}); - - // Create a token to be nested and nest - const newToken = await createItemExpectSuccess(bob, collectionB, 'NFT'); - await transferExpectSuccess(collectionB, newToken, bob, {Ethereum: tokenIdToAddress(collectionA, targetToken)}); - expect(await getTopmostTokenOwner(api, collectionB, newToken)).to.be.deep.equal({Substrate: charlie.address}); - expect(await getTokenOwner(api, collectionB, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collectionA, targetToken).toLowerCase()}); - }); + itSub('Admin (NFT): allows an Admin to nest a token (Restricted nesting)', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice); + await collectionA.addAdmin(alice, {Substrate: bob.address}); + const collectionB = await helper.nft.mintCollection(alice); + await collectionB.addAdmin(alice, {Substrate: bob.address}); + await collectionA.setPermissions(alice, {nesting: {collectionAdmin: true, restricted:[collectionB.collectionId]}}); + const targetToken = await collectionA.mintToken(alice, {Substrate: charlie.address}); + + // Create an immediately nested token + const nestedToken = await collectionB.mintToken(bob, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Create a token to be nested and nest + const newToken = await collectionB.mintToken(bob); + await newToken.nest(bob, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); // ---------- Non-Fungible ---------- - it('NFT: allows an Owner to nest/unnest their token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Create a nested token - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - - // Create a token to be nested and nest - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - await transferExpectSuccess(collection, newToken, alice, {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - }); + itSub('NFT: allows an Owner to nest/unnest their token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}}); + await collection.addToAllowList(alice, {Substrate: charlie.address}); + const targetToken = await collection.mintToken(charlie); + await collection.addToAllowList(alice, targetToken.nestingAccount()); + + // Create an immediately nested token + const nestedToken = await collection.mintToken(charlie, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Create a token to be nested and nest + const newToken = await collection.mintToken(charlie); + await newToken.nest(charlie, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); - it('NFT: allows an Owner to nest/unnest their token (Restricted nesting)', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true, restricted:[collection]}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); + itSub('NFT: allows an Owner to nest/unnest their token (Restricted nesting)', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice); + const collectionB = await helper.nft.mintCollection(alice); + //await collectionB.addAdmin(alice, {Substrate: bob.address}); + const targetToken = await collectionA.mintToken(alice, {Substrate: charlie.address}); - // Create a nested token - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); + await collectionA.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true, restricted:[collectionB.collectionId]}}); + await collectionA.addToAllowList(alice, {Substrate: charlie.address}); + await collectionA.addToAllowList(alice, targetToken.nestingAccount()); - // Create a token to be nested and nest - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - await transferExpectSuccess(collection, newToken, alice, {Ethereum: tokenIdToAddress(collection, targetToken)}); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - }); + await collectionB.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collectionB.addToAllowList(alice, {Substrate: charlie.address}); + await collectionB.addToAllowList(alice, targetToken.nestingAccount()); + + // Create an immediately nested token + const nestedToken = await collectionB.mintToken(charlie, targetToken.nestingAccount()); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Create a token to be nested and nest + const newToken = await collectionB.mintToken(charlie); + await newToken.nest(charlie, targetToken); + expect(await newToken.getTopmostOwner()).to.be.deep.equal({Substrate: charlie.address}); + expect(await newToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); // ---------- Fungible ---------- - it('Fungible: allows an Owner to nest/unnest their token', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT', {Substrate: alice.address}); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; - - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - - // Create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - ))).to.not.be.rejected; - - // Nest a new token - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - await transferExpectSuccess(collectionFT, newToken, alice, targetAddress, 1, 'Fungible'); - }); + itSub('Fungible: allows an Owner to nest/unnest their token', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}}); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: charlie.address}); + + await collectionNFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); + + await collectionFT.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collectionFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionFT.addToAllowList(alice, targetToken.nestingAccount()); + + // Create an immediately nested token + await collectionFT.mint(charlie, 5n, targetToken.nestingAccount()); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(5n); + + // Create a token to be nested and nest + await collectionFT.mint(charlie, 5n); + await collectionFT.transfer(charlie, targetToken.nestingAccount(), 2n); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(7n); }); - it('Fungible: allows an Owner to nest/unnest their token (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT', {Substrate: alice.address}); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + itSub('Fungible: allows an Owner to nest/unnest their token (Restricted nesting)', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: charlie.address}); - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); + await collectionNFT.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true, restricted:[collectionFT.collectionId]}}); + await collectionNFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted: [collectionFT]}}); + await collectionFT.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collectionFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionFT.addToAllowList(alice, targetToken.nestingAccount()); - // Create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - ))).to.not.be.rejected; + // Create an immediately nested token + await collectionFT.mint(charlie, 5n, targetToken.nestingAccount()); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(5n); - // Nest a new token - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - await transferExpectSuccess(collectionFT, newToken, alice, targetAddress, 1, 'Fungible'); - }); + // Create a token to be nested and nest + await collectionFT.mint(charlie, 5n); + await collectionFT.transfer(charlie, targetToken.nestingAccount(), 2n); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(7n); }); // ---------- Re-Fungible ---------- - it('ReFungible: allows an Owner to nest/unnest their token', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT', {Substrate: alice.address}); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; - - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - - // Create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - ))).to.not.be.rejected; - - // Nest a new token - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - await transferExpectSuccess(collectionRFT, newToken, alice, targetAddress, 100, 'ReFungible'); - }); + itSub.ifWithPallets('ReFungible: allows an Owner to nest/unnest their token', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}}); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: charlie.address}); + + await collectionNFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); + + await collectionRFT.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collectionRFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionRFT.addToAllowList(alice, targetToken.nestingAccount()); + + // Create an immediately nested token + const nestedToken = await collectionRFT.mintToken(charlie, 5n, targetToken.nestingAccount()); + expect(await nestedToken.getBalance(targetToken.nestingAccount())).to.be.equal(5n); + + // Create a token to be nested and nest + const newToken = await collectionRFT.mintToken(charlie, 5n); + await newToken.transfer(charlie, targetToken.nestingAccount(), 2n); + expect(await newToken.getBalance(targetToken.nestingAccount())).to.be.equal(2n); }); - it('ReFungible: allows an Owner to nest/unnest their token (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT', {Substrate: alice.address}); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + itSub.ifWithPallets('ReFungible: allows an Owner to nest/unnest their token (Restricted nesting)', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: charlie.address}); - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); + await collectionNFT.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true, restricted:[collectionRFT.collectionId]}}); + await collectionNFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted:[collectionRFT]}}); + await collectionRFT.setPermissions(alice, {access: 'AllowList', mintMode: true}); + await collectionRFT.addToAllowList(alice, {Substrate: charlie.address}); + await collectionRFT.addToAllowList(alice, targetToken.nestingAccount()); - // Create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - ))).to.not.be.rejected; + // Create an immediately nested token + const nestedToken = await collectionRFT.mintToken(charlie, 5n, targetToken.nestingAccount()); + expect(await nestedToken.getBalance(targetToken.nestingAccount())).to.be.equal(5n); - // Nest a new token - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - await transferExpectSuccess(collectionRFT, newToken, alice, targetAddress, 100, 'ReFungible'); - }); + // Create a token to be nested and nest + const newToken = await collectionRFT.mintToken(charlie, 5n); + await newToken.transfer(charlie, targetToken.nestingAccount(), 2n); + expect(await newToken.getBalance(targetToken.nestingAccount())).to.be.equal(2n); }); }); -describe('Negative Test: Nesting', async() => { +describe('Negative Test: Nesting', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (_, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 50n], donor); }); }); - it('Disallows excessive token nesting', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - const maxNestingLevel = 5; - let prevToken = targetToken; + itSub('Disallows excessive token nesting', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + let token = await collection.mintToken(alice); - // Create a nested-token matryoshka - for (let i = 0; i < maxNestingLevel; i++) { - const nestedToken = await createItemExpectSuccess( - alice, - collection, - 'NFT', - {Ethereum: tokenIdToAddress(collection, prevToken)}, - ); + const maxNestingLevel = 5; - prevToken = nestedToken; - } + // Create a nested-token matryoshka + for (let i = 0; i < maxNestingLevel; i++) { + token = await collection.mintToken(alice, token.nestingAccount()); + } - // The nesting depth is limited by `maxNestingLevel` - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, prevToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/^structure\.DepthLimit$/); - - expect(await getTopmostTokenOwner(api, collection, prevToken)).to.be.deep.equal({Substrate: alice.address}); - }); + // The nesting depth is limited by `maxNestingLevel` + await expect(collection.mintToken(alice, token.nestingAccount())) + .to.be.rejectedWith(/structure\.DepthLimit/); + expect(await token.getTopmostOwner()).to.be.deep.equal({Substrate: alice.address}); + expect(await token.getChildren()).to.be.length(0); }); // ---------- Admin ------------ - it('Admin (NFT): disallows an Admin to operate nesting when only TokenOwner is allowed', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Try to create a nested token as collection admin when it's disallowed - await expect(executeTransaction(api, bob, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(bob, collection, 'NFT'); - await expect(executeTransaction( - api, - bob, - api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1), - ), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: bob.address}); - }); - }); + itSub('Admin (NFT): disallows an Admin to operate nesting when only TokenOwner is allowed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + await collection.addAdmin(alice, {Substrate: bob.address}); + const targetToken = await collection.mintToken(alice); - it('Admin (NFT): disallows a Token Owner to operate nesting when only Admin is allowed', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {collectionAdmin: true}}); - await addToAllowListExpectSuccess(alice, collection, bob.address); - await enableAllowListExpectSuccess(alice, collection); - await enablePublicMintingExpectSuccess(alice, collection); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Try to create a nested token as collection admin when it's disallowed - await expect(executeTransaction(api, bob, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(bob, collection, 'NFT'); - await expect(executeTransaction( - api, - bob, - api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1), - ), 'while nesting new token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: bob.address}); - }); - }); + // Try to create an immediately nested token as collection admin when it's disallowed + await expect(collection.mintToken(bob, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - it('Admin (NFT): disallows an Admin to nest and unnest someone else\'s token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionLimitsExpectSuccess(alice, collection, {ownerCanTransfer: true}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {collectionAdmin: true}}); - - await addToAllowListExpectSuccess(alice, collection, bob.address); - await enableAllowListExpectSuccess(alice, collection); - await enablePublicMintingExpectSuccess(alice, collection); - - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collection, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}; - - // Try to nest somebody else's token - const newToken = await createItemExpectSuccess(bob, collection, 'NFT'); - await expect(executeTransaction( - api, - alice, - api.tx.unique.transferFrom(targetAddress, {Substrate: bob.address}, collection, newToken, 1), - ), 'while nesting another\'s token token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: bob.address}); - - // Nest a token as admin and try to unnest it, now belonging to someone else - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', targetAddress); - await expect(executeTransaction( - api, - alice, - api.tx.unique.transferFrom(targetAddress, normalizeAccountId(alice), collection, nestedToken, 1), - ), 'while unnesting another\'s token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal(targetAddress); - expect(await getTopmostTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: bob.address}); - }); + // Try to create a token to be nested and nest + const newToken = await collection.mintToken(bob); + await expect(newToken.nest(bob, targetToken)) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: bob.address}); }); - it('Admin (NFT): disallows an Admin to nest a token from an unlisted collection (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionA = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - const collectionB = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionA, {nesting: {collectionAdmin: true, restricted:[collectionA]}}); + itSub('Admin (NFT): disallows a Token Owner to operate nesting when only Admin is allowed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true, nesting: {collectionAdmin: true}}}); + const targetToken = await collection.mintToken(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, targetToken.nestingAccount()); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(alice, collectionA, 'NFT'); + // Try to create a nested token as token owner when it's disallowed + await expect(collection.mintToken(bob, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionB, 'NFT'); - await expect(executeTransaction( - api, - alice, - api.tx.unique.transfer({Ethereum: tokenIdToAddress(collectionA, targetToken)}, collectionB, newToken, 1), - ), 'while nesting a foreign token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - expect(await getTokenOwner(api, collectionB, newToken)).to.be.deep.equal({Substrate: alice.address}); - }); + // Try to create a token to be nested and nest + const newToken = await collection.mintToken(bob); + await expect(newToken.nest(bob, targetToken)) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: bob.address}); }); - // ---------- Non-Fungible ---------- + itSub('Admin (NFT): disallows an Admin to unnest someone else\'s token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {limits: {ownerCanTransfer: true}, permissions: {access: 'AllowList', mintMode: true, nesting: {collectionAdmin: true}}}); + //await collection.addAdmin(alice, {Substrate: bob.address}); + const targetToken = await collection.mintToken(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, targetToken.nestingAccount()); - it('NFT: disallows to nest token if nesting is disabled', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Try to create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - // Create a token to be nested - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - // Try to nest - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - expect(await getTopmostTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - }); - }); + // Try to nest somebody else's token + const newToken = await collection.mintToken(bob); + await expect(newToken.nest(alice, targetToken)) + .to.be.rejectedWith(/common\.NoPermission/); + + // Try to unnest a token belonging to someone else as collection admin + const nestedToken = await collection.mintToken(alice, targetToken.nestingAccount()); + await expect(nestedToken.unnest(alice, targetToken, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.AddressNotInAllowlist/); - it('NFT: disallows a non-Owner to nest someone else\'s token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); + expect(await targetToken.getChildren()).to.be.length(1); + expect(await nestedToken.getTopmostOwner()).to.be.deep.equal({Substrate: bob.address}); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + }); - await addToAllowListExpectSuccess(alice, collection, bob.address); - await enableAllowListExpectSuccess(alice, collection); - await enablePublicMintingExpectSuccess(alice, collection); + itSub('Admin (NFT): disallows an Admin to nest a token from an unlisted collection (Restricted nesting)', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice); + const collectionB = await helper.nft.mintCollection(alice); + await collectionA.setPermissions(alice, {nesting: {collectionAdmin: true, restricted: [collectionA.collectionId]}}); + const targetToken = await collectionA.mintToken(alice); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collection, 'NFT'); + // Try to create a nested token from another collection + await expect(collectionB.mintToken(alice, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + // Create a token in another collection yet to be nested and try to nest + const newToken = await collectionB.mintToken(alice); + await expect(newToken.nest(alice, targetToken)) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - }); + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('NFT: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true, restricted:[collection]}}); + // ---------- Non-Fungible ---------- - await addToAllowListExpectSuccess(alice, collection, bob.address); - await enableAllowListExpectSuccess(alice, collection); - await enablePublicMintingExpectSuccess(alice, collection); + itSub('NFT: disallows to nest token if nesting is disabled', async ({helper}) => { + // Collection is implicitly not allowed nesting at creation + const collection = await helper.nft.mintCollection(alice); + const targetToken = await collection.mintToken(alice); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collection, 'NFT'); + // Try to create a nested token as token owner when it's disallowed + await expect(collection.mintToken(alice, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + // Try to create a token to be nested and nest + const newToken = await collection.mintToken(alice); + await expect(newToken.nest(alice, targetToken)) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.AddressNotInAllowlist/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - }); + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('NFT: disallows to nest token in an unlisted collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true, restricted:[]}}); + itSub('NFT: disallows a non-Owner to nest someone else\'s token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const targetToken = await collection.mintToken(alice); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); + await collection.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}); + await collection.addToAllowList(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, targetToken.nestingAccount()); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collection, - {Ethereum: tokenIdToAddress(collection, targetToken)}, - {nft: {}} as any, - )), 'while creating nested token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + // Try to create a token to be nested and nest + const newToken = await collection.mintToken(alice); + await expect(newToken.nest(bob, targetToken)).to.be.rejectedWith(/common\.NoPermission/); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collection, 'NFT'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collection, targetToken)}, collection, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - expect(await getTokenOwner(api, collection, newToken)).to.be.deep.equal({Substrate: alice.address}); - }); + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - // ---------- Fungible ---------- + itSub('NFT: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const targetToken = await collection.mintToken(alice); - it('Fungible: disallows to nest token if nesting is disabled', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; - - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - - // Try to create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - )), 'while creating nested token').to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - // Create a token to be nested - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - // Try to nest - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - - // Create another token to be nested - const newToken2 = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - // Try to nest inside a fungible token - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collectionFT, newToken)}, collectionFT, newToken2, 1)), 'while nesting new token inside fungible').to.be.rejectedWith(/fungible\.FungibleDisallowsNesting/); - }); - }); + await collection.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}); + await collection.addToAllowList(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, targetToken.nestingAccount()); - it('Fungible: disallows a non-Owner to nest someone else\'s token', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true}}); + const collectionB = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true}}); + await collectionB.addToAllowList(alice, {Substrate: bob.address}); + await collectionB.addToAllowList(alice, targetToken.nestingAccount()); - await addToAllowListExpectSuccess(alice, collectionNFT, bob.address); - await enableAllowListExpectSuccess(alice, collectionNFT); - await enablePublicMintingExpectSuccess(alice, collectionNFT); + // Try to create a token to be nested and nest + const newToken = await collectionB.mintToken(alice); + await expect(newToken.nest(bob, targetToken)).to.be.rejectedWith(/common\.NoPermission/); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getOwner()).to.be.deep.equal({Substrate: alice.address}); + }); - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); + itSub('NFT: disallows to nest token in an unlisted collection', async ({helper}) => { + // Create collection with restricted nesting -- even self is not allowed + const collection = await helper.nft.mintCollection(alice, {permissions: {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true, restricted: []}}}); + const targetToken = await collection.mintToken(alice, {Substrate: bob.address}); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - )), 'while creating nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + await collection.addToAllowList(alice, {Substrate: bob.address}); + await collection.addToAllowList(alice, targetToken.nestingAccount()); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - }); + // Try to mint in own collection after allowlisting the accounts + await expect(collection.mintToken(bob, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); }); - it('Fungible: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addToAllowListExpectSuccess(alice, collectionNFT, bob.address); - await enableAllowListExpectSuccess(alice, collectionNFT); - await enablePublicMintingExpectSuccess(alice, collectionNFT); + // ---------- Fungible ---------- + + itSub('Fungible: disallows to nest token if nesting is disabled', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + // Try to create an immediately nested token + await expect(collectionFT.mint(alice, 5n, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted:[collectionFT]}}); + // Try to create a token to be nested and nest + await collectionFT.mint(alice, 5n); + await expect(collectionFT.transfer(alice, targetToken.nestingAccount(), 2n)) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + expect(await collectionFT.getBalance({Substrate: alice.address})).to.be.equal(5n); + }); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - )), 'while creating nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + itSub('Fungible: disallows a non-Owner to unnest someone else\'s token', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice, {permissions: {nesting: {collectionAdmin: true, tokenOwner: true}}}); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: bob.address}); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - }); + // Nest some tokens as Alice into Bob's token + await collectionFT.mint(alice, 5n, targetToken.nestingAccount()); + + // Try to pull it out + await expect(collectionFT.transferFrom(alice, targetToken.nestingAccount(), {Substrate: bob.address}, 1n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(5n); }); - it('Fungible: disallows to nest token in an unlisted collection', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted:[]}}); + itSub('Fungible: disallows a non-Owner to unnest someone else\'s token (Restricted nesting)', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice, {Substrate: bob.address}); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + await collectionNFT.setPermissions(alice, {nesting: {collectionAdmin: true, tokenOwner: true, restricted: [collectionFT.collectionId]}}); - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); + // Nest some tokens as Alice into Bob's token + await collectionFT.mint(alice, 5n, targetToken.nestingAccount()); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionFT, - targetAddress, - {Fungible: {Value: 10}}, - )), 'while creating a nested token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + // Try to pull it out as Alice still + await expect(collectionFT.transferFrom(alice, targetToken.nestingAccount(), {Substrate: bob.address}, 1n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(5n); + }); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - }); + itSub('Fungible: disallows to nest token in an unlisted collection', async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice, {permissions: {nesting: {collectionAdmin: true, tokenOwner: true, restricted: []}}}); + const collectionFT = await helper.ft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); + + // Try to mint an immediately nested token + await expect(collectionFT.mint(alice, 5n, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + + // Mint a token and try to nest it + await collectionFT.mint(alice, 5n); + await expect(collectionFT.transfer(alice, targetToken.nestingAccount(), 1n)) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(0n); + expect(await collectionFT.getBalance({Substrate: alice.address})).to.be.equal(5n); }); // ---------- Re-Fungible ---------- - it('ReFungible: disallows to nest token if nesting is disabled', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {}}); - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; - - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - - // Create a nested token - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - )), 'while creating a nested token').to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - // Create a token to be nested - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - // Try to nest - await transferExpectFailure(collectionRFT, newToken, alice, targetAddress, 100); - // Try to nest - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionRFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - - // Create another token to be nested - const newToken2 = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - // Try to nest inside a fungible token - await expect(executeTransaction(api, alice, api.tx.unique.transfer({Ethereum: tokenIdToAddress(collectionRFT, newToken)}, collectionRFT, newToken2, 1)), 'while nesting new token inside refungible').to.be.rejectedWith(/refungible\.RefungibleDisallowsNesting/); - }); + itSub.ifWithPallets('ReFungible: disallows to nest token if nesting is disabled', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); + + // Try to create an immediately nested token + await expect(collectionRFT.mintToken(alice, 5n, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + + // Try to create a token to be nested and nest + const token = await collectionRFT.mintToken(alice, 5n); + await expect(token.transfer(alice, targetToken.nestingAccount(), 2n)) + .to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(5n); }); - it('ReFungible: disallows a non-Owner to nest someone else\'s token', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true}}); + itSub.ifWithPallets('ReFungible: disallows a non-Owner to nest someone else\'s token', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); - await addToAllowListExpectSuccess(alice, collectionNFT, bob.address); - await enableAllowListExpectSuccess(alice, collectionNFT); - await enablePublicMintingExpectSuccess(alice, collectionNFT); + await collectionNFT.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true}}); + await collectionNFT.addToAllowList(alice, {Substrate: bob.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + // Try to create a token to be nested and nest + const newToken = await collectionRFT.mintToken(alice); + await expect(newToken.transfer(bob, targetToken.nestingAccount())).to.be.rejectedWith(/common\.TokenValueTooLow/); - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getBalance({Substrate: alice.address})).to.be.equal(1n); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - )), 'while creating a nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + // Nest some tokens as Alice into Bob's token + await newToken.transfer(alice, targetToken.nestingAccount()); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionRFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - }); + // Try to pull it out + await expect(newToken.transferFrom(bob, targetToken.nestingAccount(), {Substrate: alice.address}, 1n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await newToken.getBalance(targetToken.nestingAccount())).to.be.equal(1n); }); - it('ReFungible: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addToAllowListExpectSuccess(alice, collectionNFT, bob.address); - await enableAllowListExpectSuccess(alice, collectionNFT); - await enablePublicMintingExpectSuccess(alice, collectionNFT); + itSub.ifWithPallets('ReFungible: disallows a non-Owner to nest someone else\'s token (Restricted nesting)', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(bob, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + await collectionNFT.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {tokenOwner: true, restricted: [collectionRFT.collectionId]}}); + await collectionNFT.addToAllowList(alice, {Substrate: bob.address}); + await collectionNFT.addToAllowList(alice, targetToken.nestingAccount()); - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted:[collectionRFT]}}); + // Try to create a token to be nested and nest + const newToken = await collectionRFT.mintToken(alice); + await expect(newToken.transfer(bob, targetToken.nestingAccount())).to.be.rejectedWith(/common\.TokenValueTooLow/); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - )), 'while creating a nested token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); + expect(await targetToken.getChildren()).to.be.length(0); + expect(await newToken.getBalance({Substrate: alice.address})).to.be.equal(1n); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionRFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.UserIsNotAllowedToNest/); - }); - }); - - it('ReFungible: disallows to nest token to an unlisted collection', async () => { - await usingApi(async api => { - const collectionNFT = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collectionNFT, {nesting: {tokenOwner: true, restricted:[]}}); + // Nest some tokens as Alice into Bob's token + await newToken.transfer(alice, targetToken.nestingAccount()); - // Create a token to attempt to be nested into - const targetToken = await createItemExpectSuccess(alice, collectionNFT, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collectionNFT, targetToken)}; + // Try to pull it out + await expect(newToken.transferFrom(bob, targetToken.nestingAccount(), {Substrate: alice.address}, 1n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await newToken.getBalance(targetToken.nestingAccount())).to.be.equal(1n); + }); - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); + itSub.ifWithPallets('ReFungible: disallows to nest token to an unlisted collection', [Pallets.ReFungible], async ({helper}) => { + const collectionNFT = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true, restricted: []}}}); + const collectionRFT = await helper.rft.mintCollection(alice); + const targetToken = await collectionNFT.mintToken(alice); - // Try to create a nested token in the wrong collection - await expect(executeTransaction(api, alice, api.tx.unique.createItem( - collectionRFT, - targetAddress, - {ReFungible: {pieces: 100}}, - )), 'while creating a nested token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + // Try to create an immediately nested token + await expect(collectionRFT.mintToken(alice, 5n, targetToken.nestingAccount())) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - // Try to create and nest a token in the wrong collection - const newToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(targetAddress, collectionRFT, newToken, 1)), 'while nesting new token').to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); - }); + // Try to create a token to be nested and nest + const token = await collectionRFT.mintToken(alice, 5n); + await expect(token.transfer(alice, targetToken.nestingAccount(), 2n)) + .to.be.rejectedWith(/common\.SourceCollectionIsNotAllowedToNest/); + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(5n); }); }); diff --git a/tests/src/nesting/properties.test.ts b/tests/src/nesting/properties.test.ts deleted file mode 100644 index ab0a88ee65..0000000000 --- a/tests/src/nesting/properties.test.ts +++ /dev/null @@ -1,976 +0,0 @@ -import {expect} from 'chai'; -import usingApi, {executeTransaction} from '../substrate/substrate-api'; -import { - addCollectionAdminExpectSuccess, - createCollectionExpectSuccess, - setCollectionPermissionsExpectSuccess, - createItemExpectSuccess, - getCreateCollectionResult, - transferExpectSuccess, -} from '../util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; -import {tokenIdToAddress} from '../eth/util/helpers'; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; - -describe('Composite Properties Test', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Makes sure collectionById supplies required fields', async () => { - await usingApi(async api => { - const collectionId = await createCollectionExpectSuccess(); - - const collectionOption = await api.rpc.unique.collectionById(collectionId); - expect(collectionOption.isSome).to.be.true; - let collection = collectionOption.unwrap(); - expect(collection.tokenPropertyPermissions.toHuman()).to.be.empty; - expect(collection.properties.toHuman()).to.be.empty; - - const propertyPermissions = [ - {key: 'mindgame', permission: {collectionAdmin: true, mutable: false, tokenOwner: true}}, - {key: 'skullduggery', permission: {collectionAdmin: false, mutable: true, tokenOwner: false}}, - ]; - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collectionId, propertyPermissions), - )).to.not.be.rejected; - - const collectionProperties = [ - {key: 'black_hole', value: 'LIGO'}, - {key: 'electron', value: 'come bond'}, - ]; - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collectionId, collectionProperties), - )).to.not.be.rejected; - - collection = (await api.rpc.unique.collectionById(collectionId)).unwrap(); - expect(collection.tokenPropertyPermissions.toHuman()).to.be.deep.equal(propertyPermissions); - expect(collection.properties.toHuman()).to.be.deep.equal(collectionProperties); - }); - }); -}); - -// ---------- COLLECTION PROPERTIES - -describe('Integration Test: Collection Properties', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Reads properties from a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - const properties = (await api.query.common.collectionProperties(collection)).toJSON(); - expect(properties.map).to.be.empty; - expect(properties.consumedSpace).to.equal(0); - }); - }); - - it('Sets properties for a collection', async () => { - await usingApi(async api => { - const events = await executeTransaction(api, bob, api.tx.unique.createCollectionEx({mode: 'NFT'})); - const {collectionId} = getCreateCollectionResult(events); - - // As owner - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'electron', value: 'come bond'}]), - )).to.not.be.rejected; - - await addCollectionAdminExpectSuccess(bob, collectionId, alice.address); - - // As administrator - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'black_hole'}]), - )).to.not.be.rejected; - - const properties = (await api.rpc.unique.collectionProperties(collectionId, ['electron', 'black_hole'])).toHuman(); - expect(properties).to.be.deep.equal([ - {key: 'electron', value: 'come bond'}, - {key: 'black_hole', value: ''}, - ]); - }); - }); - - it('Check valid names for collection properties keys', async () => { - await usingApi(async api => { - const events = await executeTransaction(api, bob, api.tx.unique.createCollectionEx({mode: 'NFT'})); - const {collectionId} = getCreateCollectionResult(events); - - // alpha symbols - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'alpha'}]), - )).to.not.be.rejected; - - // numeric symbols - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: '123'}]), - )).to.not.be.rejected; - - // underscore symbol - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'black_hole'}]), - )).to.not.be.rejected; - - // dash symbol - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'semi-automatic'}]), - )).to.not.be.rejected; - - // underscore symbol - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collectionId, [{key: 'build.rs'}]), - )).to.not.be.rejected; - - const propertyKeys = ['alpha', '123', 'black_hole', 'semi-automatic', 'build.rs']; - const properties = (await api.rpc.unique.collectionProperties(collectionId, propertyKeys)).toHuman(); - expect(properties).to.be.deep.equal([ - {key: 'alpha', value: ''}, - {key: '123', value: ''}, - {key: 'black_hole', value: ''}, - {key: 'semi-automatic', value: ''}, - {key: 'build.rs', value: ''}, - ]); - }); - }); - - it('Changes properties of a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: 'electron', value: 'come bond'}, {key: 'black_hole'}]), - )).to.not.be.rejected; - - // Mutate the properties - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: 'electron', value: 'bonded'}, {key: 'black_hole', value: 'LIGO'}]), - )).to.not.be.rejected; - - const properties = (await api.rpc.unique.collectionProperties(collection, ['electron', 'black_hole'])).toHuman(); - expect(properties).to.be.deep.equal([ - {key: 'electron', value: 'bonded'}, - {key: 'black_hole', value: 'LIGO'}, - ]); - }); - }); - - it('Deletes properties of a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: 'electron', value: 'come bond'}, {key: 'black_hole', value: 'LIGO'}]), - )).to.not.be.rejected; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.deleteCollectionProperties(collection, ['electron']), - )).to.not.be.rejected; - - const properties = (await api.rpc.unique.collectionProperties(collection, ['electron', 'black_hole'])).toHuman(); - expect(properties).to.be.deep.equal([ - {key: 'black_hole', value: 'LIGO'}, - ]); - }); - }); -}); - -describe('Negative Integration Test: Collection Properties', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Fails to set properties in a collection if not its onwer/administrator', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - bob, - api.tx.unique.setCollectionProperties(collection, [{key: 'electron', value: 'come bond'}, {key: 'black_hole', value: 'LIGO'}]), - )).to.be.rejectedWith(/common\.NoPermission/); - - const properties = (await api.query.common.collectionProperties(collection)).toJSON(); - expect(properties.map).to.be.empty; - expect(properties.consumedSpace).to.equal(0); - }); - }); - - it('Fails to set properties that exceed the limits', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - const spaceLimit = (await api.query.common.collectionProperties(collection)).toJSON().spaceLimit as number; - - // Mute the general tx parsing error, too many bytes to process - { - console.error = () => {}; - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: 'electron', value: 'low high '.repeat(Math.ceil(spaceLimit! / 9))}]), - )).to.be.rejected; - } - - let properties = (await api.rpc.unique.collectionProperties(collection, ['electron'])).toJSON(); - expect(properties).to.be.empty; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [ - {key: 'electron', value: 'low high '.repeat(Math.ceil(spaceLimit! / 18))}, - {key: 'black_hole', value: '0'.repeat(Math.ceil(spaceLimit! / 2))}, - ]), - )).to.be.rejectedWith(/common\.NoSpaceForProperty/); - - properties = (await api.rpc.unique.collectionProperties(collection, ['electron', 'black hole'])).toJSON(); - expect(properties).to.be.empty; - }); - }); - - it('Fails to set more properties than it is allowed', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - const propertiesToBeSet = []; - for (let i = 0; i < 65; i++) { - propertiesToBeSet.push({ - key: 'electron_' + i, - value: Math.random() > 0.5 ? 'high' : 'low', - }); - } - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, propertiesToBeSet), - )).to.be.rejectedWith(/common\.PropertyLimitReached/); - - const properties = (await api.query.common.collectionProperties(collection)).toJSON(); - expect(properties.map).to.be.empty; - expect(properties.consumedSpace).to.equal(0); - }); - }); - - it('Fails to set properties with invalid names', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - const invalidProperties = [ - [{key: 'electron', value: 'negative'}, {key: 'string theory', value: 'understandable'}], - [{key: 'Mr/Sandman', value: 'Bring me a gene'}], - [{key: 'déjà vu', value: 'hmm...'}], - ]; - - for (let i = 0; i < invalidProperties.length; i++) { - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, invalidProperties[i]), - ), `on rejecting the new badly-named property #${i}`).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); - } - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: '', value: 'nothing must not exist'}]), - ), 'on rejecting an unnamed property').to.be.rejectedWith(/common\.EmptyPropertyKey/); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [ - {key: 'CRISPR-Cas9', value: 'rewriting nature!'}, - ]), - ), 'on setting the correctly-but-still-badly-named property').to.not.be.rejected; - - const keys = invalidProperties.flatMap(propertySet => propertySet.map(property => property.key)).concat('CRISPR-Cas9').concat(''); - - const properties = (await api.rpc.unique.collectionProperties(collection, keys)).toHuman(); - expect(properties).to.be.deep.equal([ - {key: 'CRISPR-Cas9', value: 'rewriting nature!'}, - ]); - - for (let i = 0; i < invalidProperties.length; i++) { - await expect(executeTransaction( - api, - alice, - api.tx.unique.deleteCollectionProperties(collection, invalidProperties[i].map(propertySet => propertySet.key)), - ), `on trying to delete the non-existent badly-named property #${i}`).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); - } - }); - }); -}); - -// ---------- ACCESS RIGHTS - -describe('Integration Test: Access Rights to Token Properties', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Reads access rights to properties of a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - const propertyRights = (await api.query.common.collectionPropertyPermissions(collection)).toJSON(); - expect(propertyRights).to.be.empty; - }); - }); - - it('Sets access rights to properties of a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {mutable: true}}]), - )).to.not.be.rejected; - - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'mindgame', permission: {collectionAdmin: true, tokenOwner: false}}]), - )).to.not.be.rejected; - - const propertyRights = (await api.rpc.unique.propertyPermissions(collection, ['skullduggery', 'mindgame'])).toHuman(); - expect(propertyRights).to.be.deep.equal([ - {key: 'skullduggery', permission: {'mutable': true, 'collectionAdmin': false, 'tokenOwner': false}}, - {key: 'mindgame', permission: {'mutable': false, 'collectionAdmin': true, 'tokenOwner': false}}, - ]); - }); - }); - - it('Changes access rights to properties of a collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {mutable: true, collectionAdmin: true}}]), - )).to.not.be.rejected; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {mutable: false, tokenOwner: true}}]), - )).to.not.be.rejected; - - const propertyRights = (await api.rpc.unique.propertyPermissions(collection, ['skullduggery'])).toHuman(); - expect(propertyRights).to.be.deep.equal([ - {key: 'skullduggery', permission: {'mutable': false, 'collectionAdmin': false, 'tokenOwner': true}}, - ]); - }); - }); -}); - -describe('Negative Integration Test: Access Rights to Token Properties', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Prevents from setting access rights to properties of a collection if not an onwer/admin', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - bob, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {mutable: true, tokenOwner: true}}]), - )).to.be.rejectedWith(/common\.NoPermission/); - - const propertyRights = (await api.rpc.unique.propertyPermissions(collection, ['skullduggery'])).toJSON(); - expect(propertyRights).to.be.empty; - }); - }); - - it('Prevents from adding too many possible properties', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - const constitution = []; - for (let i = 0; i < 65; i++) { - constitution.push({ - key: 'property_' + i, - permission: Math.random() > 0.5 ? {mutable: true, collectionAdmin: true, tokenOwner: true} : {}, - }); - } - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, constitution), - )).to.be.rejectedWith(/common\.PropertyLimitReached/); - - const propertyRights = (await api.query.common.collectionPropertyPermissions(collection)).toJSON(); - expect(propertyRights).to.be.empty; - }); - }); - - it('Prevents access rights to be modified if constant', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {mutable: false, tokenOwner: true}}]), - )).to.not.be.rejected; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'skullduggery', permission: {}}]), - )).to.be.rejectedWith(/common\.NoPermission/); - - const propertyRights = (await api.rpc.unique.propertyPermissions(collection, ['skullduggery'])).toHuman(); - expect(propertyRights).to.deep.equal([ - {key: 'skullduggery', permission: {'mutable': false, 'collectionAdmin': false, 'tokenOwner': true}}, - ]); - }); - }); - - it('Prevents adding properties with invalid names', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - - const invalidProperties = [ - [{key: 'skullduggery', permission: {tokenOwner: true}}, {key: 'im possible', permission: {collectionAdmin: true}}], - [{key: 'G#4', permission: {tokenOwner: true}}], - [{key: 'HÆMILTON', permission: {mutable: false, collectionAdmin: true, tokenOwner: true}}], - ]; - - for (let i = 0; i < invalidProperties.length; i++) { - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, invalidProperties[i]), - ), `on setting the new badly-named property #${i}`).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); - } - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: '', permission: {}}]), - ), 'on rejecting an unnamed property').to.be.rejectedWith(/common\.EmptyPropertyKey/); - - const correctKey = '--0x03116e387820CA05'; // PolkadotJS would parse this as an already encoded hex-string - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [ - {key: correctKey, permission: {collectionAdmin: true}}, - ]), - ), 'on setting the correctly-but-still-badly-named property').to.not.be.rejected; - - const keys = invalidProperties.flatMap(propertySet => propertySet.map(property => property.key)).concat(correctKey).concat(''); - - const propertyRights = (await api.rpc.unique.propertyPermissions(collection, keys)).toHuman(); - expect(propertyRights).to.be.deep.equal([ - {key: correctKey, permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}, - ]); - }); - }); -}); - -// ---------- TOKEN PROPERTIES - -describe('Integration Test: Token Properties', () => { - let collection: number; - let token: number; - let nestedToken: number; - let permissions: {permission: any, signers: IKeyringPair[]}[]; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - - permissions = [ - {permission: {mutable: true, collectionAdmin: true}, signers: [alice, bob]}, - {permission: {mutable: false, collectionAdmin: true}, signers: [alice, bob]}, - {permission: {mutable: true, tokenOwner: true}, signers: [charlie]}, - {permission: {mutable: false, tokenOwner: true}, signers: [charlie]}, - {permission: {mutable: true, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie]}, - {permission: {mutable: false, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie]}, - ]; - }); - }); - - beforeEach(async () => { - await usingApi(async () => { - collection = await createCollectionExpectSuccess(); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - - token = await createItemExpectSuccess(alice, collection, 'NFT'); - nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, token)}); - - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - await transferExpectSuccess(collection, token, alice, charlie); - }); - }); - - it('Reads yet empty properties of a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess(); - const token = await createItemExpectSuccess(alice, collection, 'NFT'); - - const properties = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON(); - expect(properties.map).to.be.empty; - expect(properties.consumedSpace).to.be.equal(0); - - const tokenData = (await api.rpc.unique.tokenData(collection, token, ['anything'])).toJSON().properties; - expect(tokenData).to.be.empty; - }); - }); - - it('Assigns properties to a token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - for (const permission of permissions) { - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, token, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, token, propertyKeys)).toHuman() as any[]; - const tokensData = (await api.rpc.unique.tokenData(collection, token, propertyKeys)).toHuman().properties as any[]; - for (let i = 0; i < properties.length; i++) { - expect(properties[i].value).to.be.equal('Serotonin increase'); - expect(tokensData[i].value).to.be.equal('Serotonin increase'); - } - }); - }); - - it('Changes properties of a token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - for (const permission of permissions) { - if (!permission.permission.mutable) continue; - - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, token, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, token, [{key: key, value: 'Serotonin stable'}]), - ), `on changing property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, token, propertyKeys)).toHuman() as any[]; - const tokensData = (await api.rpc.unique.tokenData(collection, token, propertyKeys)).toHuman().properties as any[]; - for (let i = 0; i < properties.length; i++) { - expect(properties[i].value).to.be.equal('Serotonin stable'); - expect(tokensData[i].value).to.be.equal('Serotonin stable'); - } - }); - }); - - it('Deletes properties of a token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - - for (const permission of permissions) { - if (!permission.permission.mutable) continue; - - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, token, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.deleteTokenProperties(collection, token, [key]), - ), `on deleting property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, token, propertyKeys)).toJSON() as any[]; - expect(properties).to.be.empty; - const tokensData = (await api.rpc.unique.tokenData(collection, token, propertyKeys)).toJSON().properties as any[]; - expect(tokensData).to.be.empty; - expect((await api.query.nonfungible.tokenProperties(collection, token)).toJSON().consumedSpace).to.be.equal(0); - }); - }); - - it('Assigns properties to a nested token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - for (const permission of permissions) { - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, nestedToken, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, nestedToken, propertyKeys)).toHuman() as any[]; - const tokensData = (await api.rpc.unique.tokenData(collection, nestedToken, propertyKeys)).toHuman().properties as any[]; - for (let i = 0; i < properties.length; i++) { - expect(properties[i].value).to.be.equal('Serotonin increase'); - expect(tokensData[i].value).to.be.equal('Serotonin increase'); - } - }); - }); - - it('Changes properties of a nested token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - for (const permission of permissions) { - if (!permission.permission.mutable) continue; - - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, nestedToken, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, nestedToken, [{key: key, value: 'Serotonin stable'}]), - ), `on changing property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, nestedToken, propertyKeys)).toHuman() as any[]; - const tokensData = (await api.rpc.unique.tokenData(collection, nestedToken, propertyKeys)).toHuman().properties as any[]; - for (let i = 0; i < properties.length; i++) { - expect(properties[i].value).to.be.equal('Serotonin stable'); - expect(tokensData[i].value).to.be.equal('Serotonin stable'); - } - }); - }); - - it('Deletes properties of a nested token according to permissions', async () => { - await usingApi(async api => { - const propertyKeys: string[] = []; - let i = 0; - - for (const permission of permissions) { - if (!permission.permission.mutable) continue; - - for (const signer of permission.signers) { - const key = i + '_' + signer.address; - propertyKeys.push(key); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: key, permission: permission.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, nestedToken, [{key: key, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.deleteTokenProperties(collection, nestedToken, [key]), - ), `on deleting property ${i} by ${signer.address}`).to.not.be.rejected; - } - - i++; - } - - const properties = (await api.rpc.unique.tokenProperties(collection, nestedToken, propertyKeys)).toJSON() as any[]; - expect(properties).to.be.empty; - const tokensData = (await api.rpc.unique.tokenData(collection, nestedToken, propertyKeys)).toJSON().properties as any[]; - expect(tokensData).to.be.empty; - expect((await api.query.nonfungible.tokenProperties(collection, nestedToken)).toJSON().consumedSpace).to.be.equal(0); - }); - }); -}); - -describe('Negative Integration Test: Token Properties', () => { - let collection: number; - let token: number; - let originalSpace: number; - let constitution: {permission: any, signers: IKeyringPair[], sinner: IKeyringPair}[]; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - const dave = privateKeyWrapper('//Dave'); - - constitution = [ - {permission: {mutable: true, collectionAdmin: true}, signers: [alice, bob], sinner: charlie}, - {permission: {mutable: false, collectionAdmin: true}, signers: [alice, bob], sinner: charlie}, - {permission: {mutable: true, tokenOwner: true}, signers: [charlie], sinner: alice}, - {permission: {mutable: false, tokenOwner: true}, signers: [charlie], sinner: alice}, - {permission: {mutable: true, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie], sinner: dave}, - {permission: {mutable: false, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie], sinner: dave}, - ]; - }); - }); - - beforeEach(async () => { - collection = await createCollectionExpectSuccess(); - token = await createItemExpectSuccess(alice, collection, 'NFT'); - await addCollectionAdminExpectSuccess(alice, collection, bob.address); - await transferExpectSuccess(collection, token, alice, charlie); - - await usingApi(async api => { - let i = 0; - for (const passage of constitution) { - const signer = passage.signers[0]; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: `${i}`, permission: passage.permission}]), - ), `on setting permission ${i} by ${signer.address}`).to.not.be.rejected; - - await expect(executeTransaction( - api, - signer, - api.tx.unique.setTokenProperties(collection, token, [{key: `${i}`, value: 'Serotonin increase'}]), - ), `on adding property ${i} by ${signer.address}`).to.not.be.rejected; - - i++; - } - - originalSpace = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON().consumedSpace as number; - }); - }); - - it('Forbids changing/deleting properties of a token if the user is outside of permissions', async () => { - await usingApi(async api => { - let i = -1; - for (const forbiddance of constitution) { - i++; - if (!forbiddance.permission.mutable) continue; - - await expect(executeTransaction( - api, - forbiddance.sinner, - api.tx.unique.setTokenProperties(collection, token, [{key: `${i}`, value: 'Serotonin down'}]), - ), `on failing to change property ${i} by ${forbiddance.sinner.address}`).to.be.rejectedWith(/common\.NoPermission/); - - await expect(executeTransaction( - api, - forbiddance.sinner, - api.tx.unique.deleteTokenProperties(collection, token, [`${i}`]), - ), `on failing to delete property ${i} by ${forbiddance.sinner.address}`).to.be.rejectedWith(/common\.NoPermission/); - } - - const properties = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON(); - expect(properties.consumedSpace).to.be.equal(originalSpace); - }); - }); - - it('Forbids changing/deleting properties of a token if the property is permanent (immutable)', async () => { - await usingApi(async api => { - let i = -1; - for (const permission of constitution) { - i++; - if (permission.permission.mutable) continue; - - await expect(executeTransaction( - api, - permission.signers[0], - api.tx.unique.setTokenProperties(collection, token, [{key: `${i}`, value: 'Serotonin down'}]), - ), `on failing to change property ${i} by ${permission.signers[0].address}`).to.be.rejectedWith(/common\.NoPermission/); - - await expect(executeTransaction( - api, - permission.signers[0], - api.tx.unique.deleteTokenProperties(collection, token, [i.toString()]), - ), `on failing to delete property ${i} by ${permission.signers[0].address}`).to.be.rejectedWith(/common\.NoPermission/); - } - - const properties = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON(); - expect(properties.consumedSpace).to.be.equal(originalSpace); - }); - }); - - it('Forbids adding properties to a token if the property is not declared / forbidden with the \'None\' permission', async () => { - await usingApi(async api => { - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenProperties(collection, token, [{key: 'non-existent', value: 'I exist!'}]), - ), 'on failing to add a previously non-existent property').to.be.rejectedWith(/common\.NoPermission/); - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [{key: 'now-existent', permission: {}}]), - ), 'on setting a new non-permitted property').to.not.be.rejected; - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenProperties(collection, token, [{key: 'now-existent', value: 'I exist!'}]), - ), 'on failing to add a property forbidden by the \'None\' permission').to.be.rejectedWith(/common\.NoPermission/); - - expect((await api.rpc.unique.tokenProperties(collection, token, ['non-existent', 'now-existent'])).toJSON()).to.be.empty; - const properties = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON(); - expect(properties.consumedSpace).to.be.equal(originalSpace); - }); - }); - - it('Forbids adding too many properties to a token', async () => { - await usingApi(async api => { - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenPropertyPermissions(collection, [ - {key: 'a_holy_book', permission: {collectionAdmin: true, tokenOwner: true}}, - {key: 'young_years', permission: {collectionAdmin: true, tokenOwner: true}}, - ]), - ), 'on setting a new non-permitted property').to.not.be.rejected; - - // Mute the general tx parsing error - { - console.error = () => {}; - await expect(executeTransaction( - api, - alice, - api.tx.unique.setCollectionProperties(collection, [{key: 'a_holy_book', value: 'word '.repeat(6554)}]), - )).to.be.rejected; - } - - await expect(executeTransaction( - api, - alice, - api.tx.unique.setTokenProperties(collection, token, [ - {key: 'a_holy_book', value: 'word '.repeat(3277)}, - {key: 'young_years', value: 'neverending'.repeat(1490)}, - ]), - )).to.be.rejectedWith(/common\.NoSpaceForProperty/); - - expect((await api.rpc.unique.tokenProperties(collection, token, ['a_holy_book', 'young_years'])).toJSON()).to.be.empty; - const propertiesMap = (await api.query.nonfungible.tokenProperties(collection, token)).toJSON(); - expect(propertiesMap.consumedSpace).to.be.equal(originalSpace); - }); - }); -}); diff --git a/tests/src/nesting/propertyPermissions.test.ts b/tests/src/nesting/propertyPermissions.test.ts new file mode 100644 index 0000000000..b8bd0cd818 --- /dev/null +++ b/tests/src/nesting/propertyPermissions.test.ts @@ -0,0 +1,198 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, Pallets, usingPlaygrounds, expect} from '../util'; +import {UniqueNFTCollection, UniqueRFTCollection} from '../util/playgrounds/unique'; + +describe('Integration Test: Access Rights to Token Properties', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 10n], donor); + }); + }); + + itSub('Reads access rights to properties of a collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const propertyRights = (await helper.callRpc('api.query.common.collectionPropertyPermissions', [collection.collectionId])).toJSON(); + expect(propertyRights).to.be.empty; + }); + + async function testSetsAccessRightsToProperties(collection: UniqueNFTCollection | UniqueRFTCollection) { + await expect(collection.setTokenPropertyPermissions(alice, [{key: 'skullduggery', permission: {mutable: true}}])) + .to.be.fulfilled; + + await collection.addAdmin(alice, {Substrate: bob.address}); + + await expect(collection.setTokenPropertyPermissions(bob, [{key: 'mindgame', permission: {collectionAdmin: true, tokenOwner: false}}])) + .to.be.fulfilled; + + const propertyRights = await collection.getPropertyPermissions(['skullduggery', 'mindgame']); + expect(propertyRights).to.include.deep.members([ + {key: 'skullduggery', permission: {mutable: true, collectionAdmin: false, tokenOwner: false}}, + {key: 'mindgame', permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}, + ]); + } + + itSub('Sets access rights to properties of a collection (NFT)', async ({helper}) => { + await testSetsAccessRightsToProperties(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Sets access rights to properties of a collection (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testSetsAccessRightsToProperties(await helper.rft.mintCollection(alice)); + }); + + async function testChangesAccessRightsToProperty(collection: UniqueNFTCollection | UniqueRFTCollection) { + await expect(collection.setTokenPropertyPermissions(alice, [{key: 'skullduggery', permission: {mutable: true, collectionAdmin: true}}])) + .to.be.fulfilled; + + await expect(collection.setTokenPropertyPermissions(alice, [{key: 'skullduggery', permission: {mutable: false, tokenOwner: true}}])) + .to.be.fulfilled; + + const propertyRights = await collection.getPropertyPermissions(); + expect(propertyRights).to.be.deep.equal([ + {key: 'skullduggery', permission: {'mutable': false, 'collectionAdmin': false, 'tokenOwner': true}}, + ]); + } + + itSub('Changes access rights to properties of a NFT collection', async ({helper}) => { + await testChangesAccessRightsToProperty(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Changes access rights to properties of a ReFungible collection', [Pallets.ReFungible], async ({helper}) => { + await testChangesAccessRightsToProperty(await helper.rft.mintCollection(alice)); + }); +}); + +describe('Negative Integration Test: Access Rights to Token Properties', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); + }); + }); + + async function testPreventsFromSettingAccessRightsNotAdminOrOwner(collection: UniqueNFTCollection | UniqueRFTCollection) { + await expect(collection.setTokenPropertyPermissions(bob, [{key: 'skullduggery', permission: {mutable: true, tokenOwner: true}}])) + .to.be.rejectedWith(/common\.NoPermission/); + + const propertyRights = await collection.getPropertyPermissions(['skullduggery']); + expect(propertyRights).to.be.empty; + } + + itSub('Prevents from setting access rights to properties of a NFT collection if not an onwer/admin', async ({helper}) => { + await testPreventsFromSettingAccessRightsNotAdminOrOwner(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Prevents from setting access rights to properties of a ReFungible collection if not an onwer/admin', [Pallets.ReFungible], async ({helper}) => { + await testPreventsFromSettingAccessRightsNotAdminOrOwner(await helper.rft.mintCollection(alice)); + }); + + async function testPreventFromAddingTooManyPossibleProperties(collection: UniqueNFTCollection | UniqueRFTCollection) { + const constitution = []; + for (let i = 0; i < 65; i++) { + constitution.push({ + key: 'property_' + i, + permission: Math.random() > 0.5 ? {mutable: true, collectionAdmin: true, tokenOwner: true} : {}, + }); + } + + await expect(collection.setTokenPropertyPermissions(alice, constitution)) + .to.be.rejectedWith(/common\.PropertyLimitReached/); + + const propertyRights = await collection.getPropertyPermissions(); + expect(propertyRights).to.be.empty; + } + + itSub('Prevents from adding too many possible properties (NFT)', async ({helper}) => { + await testPreventFromAddingTooManyPossibleProperties(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Prevents from adding too many possible properties (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testPreventFromAddingTooManyPossibleProperties(await helper.rft.mintCollection(alice)); + }); + + async function testPreventAccessRightsModifiedIfConstant(collection: UniqueNFTCollection | UniqueRFTCollection) { + await expect(collection.setTokenPropertyPermissions(alice, [{key: 'skullduggery', permission: {mutable: false, tokenOwner: true}}])) + .to.be.fulfilled; + + await expect(collection.setTokenPropertyPermissions(alice, [{key: 'skullduggery', permission: {collectionAdmin: true}}])) + .to.be.rejectedWith(/common\.NoPermission/); + + const propertyRights = await collection.getPropertyPermissions(['skullduggery']); + expect(propertyRights).to.deep.equal([ + {key: 'skullduggery', permission: {'mutable': false, 'collectionAdmin': false, 'tokenOwner': true}}, + ]); + } + + itSub('Prevents access rights to be modified if constant (NFT)', async ({helper}) => { + await testPreventAccessRightsModifiedIfConstant(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Prevents access rights to be modified if constant (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testPreventAccessRightsModifiedIfConstant(await helper.rft.mintCollection(alice)); + }); + + async function testPreventsAddingPropertiesWithInvalidNames(collection: UniqueNFTCollection | UniqueRFTCollection) { + const invalidProperties = [ + [{key: 'skullduggery', permission: {tokenOwner: true}}, {key: 'im possible', permission: {collectionAdmin: true}}], + [{key: 'G#4', permission: {tokenOwner: true}}], + [{key: 'HÆMILTON', permission: {mutable: false, collectionAdmin: true, tokenOwner: true}}], + ]; + + for (let i = 0; i < invalidProperties.length; i++) { + await expect( + collection.setTokenPropertyPermissions(alice, invalidProperties[i]), + `on setting the new badly-named property #${i}`, + ).to.be.rejectedWith(/common\.InvalidCharacterInPropertyKey/); + } + + await expect( + collection.setTokenPropertyPermissions(alice, [{key: '', permission: {}}]), + 'on rejecting an unnamed property', + ).to.be.rejectedWith(/common\.EmptyPropertyKey/); + + const correctKey = '--0x03116e387820CA05'; // PolkadotJS would parse this as an already encoded hex-string + await expect( + collection.setTokenPropertyPermissions(alice, [ + {key: correctKey, permission: {collectionAdmin: true}}, + ]), + 'on setting the correctly-but-still-badly-named property', + ).to.be.fulfilled; + + const keys = invalidProperties.flatMap(propertySet => propertySet.map(property => property.key)).concat(correctKey).concat(''); + + const propertyRights = await collection.getPropertyPermissions(keys); + expect(propertyRights).to.be.deep.equal([ + {key: correctKey, permission: {mutable: false, collectionAdmin: true, tokenOwner: false}}, + ]); + } + + itSub('Prevents adding properties with invalid names (NFT)', async ({helper}) => { + await testPreventsAddingPropertiesWithInvalidNames(await helper.nft.mintCollection(alice)); + }); + + itSub.ifWithPallets('Prevents adding properties with invalid names (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + await testPreventsAddingPropertiesWithInvalidNames(await helper.rft.mintCollection(alice)); + }); +}); \ No newline at end of file diff --git a/tests/src/nesting/rules-smoke.test.ts b/tests/src/nesting/rules-smoke.test.ts deleted file mode 100644 index 2ca6d7af09..0000000000 --- a/tests/src/nesting/rules-smoke.test.ts +++ /dev/null @@ -1,61 +0,0 @@ -import {expect} from 'chai'; -import {tokenIdToAddress} from '../eth/util/helpers'; -import usingApi, {executeTransaction} from '../substrate/substrate-api'; -import {createCollectionExpectSuccess, createFungibleItemExpectSuccess, createItemExpectSuccess, CrossAccountId, getCreateCollectionResult} from '../util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; - -describe('nesting check', () => { - let alice!: IKeyringPair; - let nestTarget!: CrossAccountId; - before(async() => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const events = await executeTransaction(api, alice, api.tx.unique.createCollectionEx({ - mode: 'NFT', - permissions: { - nesting: {tokenOwner: true, restricted: []}, - }, - })); - const collection = getCreateCollectionResult(events).collectionId; - const token = await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: bob.address}); - nestTarget = {Ethereum: tokenIdToAddress(collection, token)}; - }); - }); - - it('called for fungible', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'Fungible',decimalPoints:0}}); - await expect(executeTransaction(api, alice, api.tx.unique.createItem(collection, nestTarget, {Fungible: {Value: 1}}))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - await createFungibleItemExpectSuccess(alice, collection, {Value:1n}, {Substrate: alice.address}); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(nestTarget, collection, 0, 1n))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - }); - }); - - it('called for nonfungible', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await expect(executeTransaction(api, alice, api.tx.unique.createItem(collection, nestTarget, {NFT: {properties: []}}))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - const token = await createItemExpectSuccess(alice, collection, 'NFT', {Substrate: alice.address}); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(nestTarget, collection, token, 1n))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - }); - }); - - it('called for refungible', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await expect(executeTransaction(api, alice, api.tx.unique.createItem(collection, nestTarget, {ReFungible: {}}))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - - const token = await createItemExpectSuccess(alice, collection, 'ReFungible', {Substrate: alice.address}); - await expect(executeTransaction(api, alice, api.tx.unique.transfer(nestTarget, collection, token, 1n))) - .to.be.rejectedWith(/^common\.UserIsNotAllowedToNest$/); - }); - }); -}); diff --git a/tests/src/nesting/tokenProperties.test.ts b/tests/src/nesting/tokenProperties.test.ts new file mode 100644 index 0000000000..ae2b1c13ca --- /dev/null +++ b/tests/src/nesting/tokenProperties.test.ts @@ -0,0 +1,594 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, Pallets, requirePalletsOrSkip, usingPlaygrounds, expect} from '../util'; +import {UniqueHelper, UniqueNFToken, UniqueRFToken} from '../util/playgrounds/unique'; + +describe('Integration Test: Token Properties', () => { + let alice: IKeyringPair; // collection owner + let bob: IKeyringPair; // collection admin + let charlie: IKeyringPair; // token owner + + let permissions: {permission: any, signers: IKeyringPair[]}[]; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); + }); + + permissions = [ + {permission: {mutable: true, collectionAdmin: true}, signers: [alice, bob]}, + {permission: {mutable: false, collectionAdmin: true}, signers: [alice, bob]}, + {permission: {mutable: true, tokenOwner: true}, signers: [charlie]}, + {permission: {mutable: false, tokenOwner: true}, signers: [charlie]}, + {permission: {mutable: true, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie]}, + {permission: {mutable: false, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie]}, + ]; + }); + + async function mintCollectionWithAllPermissionsAndToken(helper: UniqueHelper, mode: 'NFT' | 'RFT'): Promise<[UniqueNFToken | UniqueRFToken, bigint]> { + const collection = await (mode == 'NFT' ? helper.nft : helper.rft).mintCollection(alice, { + tokenPropertyPermissions: permissions.flatMap(({permission, signers}, i) => + signers.map(signer => {return {key: `${i+1}_${signer.address}`, permission};})), + }); + return mode == 'NFT' ? [await collection.mintToken(alice), 1n] : [await collection.mintToken(alice, 100n), 100n]; + } + + async function testReadsYetEmptyProperties(token: UniqueNFToken | UniqueRFToken) { + const properties = await token.getProperties(); + expect(properties).to.be.empty; + + const tokenData = await token.getData(); + expect(tokenData!.properties).to.be.empty; + } + + itSub('Reads yet empty properties of a token (NFT)', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice); + const token = await collection.mintToken(alice); + await testReadsYetEmptyProperties(token); + }); + + itSub.ifWithPallets('Reads yet empty properties of a token (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice); + await testReadsYetEmptyProperties(token); + }); + + async function testAssignPropertiesAccordingToPermissions(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + await token.collection.addAdmin(alice, {Substrate: bob.address}); + await token.transfer(alice, {Substrate: charlie.address}, pieces); + + const propertyKeys: string[] = []; + let i = 0; + for (const permission of permissions) { + i++; + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + token.setProperties(signer, [{key: key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + const properties = await token.getProperties(propertyKeys); + const tokenData = await token.getData(); + for (let i = 0; i < properties.length; i++) { + expect(properties[i].value).to.be.equal('Serotonin increase'); + expect(tokenData!.properties[i].value).to.be.equal('Serotonin increase'); + } + } + + itSub('Assigns properties to a token according to permissions (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testAssignPropertiesAccordingToPermissions(token, amount); + }); + + itSub.ifWithPallets('Assigns properties to a token according to permissions (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testAssignPropertiesAccordingToPermissions(token, amount); + }); + + async function testChangesPropertiesAccordingPermission(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + await token.collection.addAdmin(alice, {Substrate: bob.address}); + await token.transfer(alice, {Substrate: charlie.address}, pieces); + + const propertyKeys: string[] = []; + let i = 0; + for (const permission of permissions) { + i++; + if (!permission.permission.mutable) continue; + + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + token.setProperties(signer, [{key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + + await expect( + token.setProperties(signer, [{key, value: 'Serotonin stable'}]), + `on changing property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + const properties = await token.getProperties(propertyKeys); + const tokenData = await token.getData(); + for (let i = 0; i < properties.length; i++) { + expect(properties[i].value).to.be.equal('Serotonin stable'); + expect(tokenData!.properties[i].value).to.be.equal('Serotonin stable'); + } + } + + itSub('Changes properties of a token according to permissions (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testChangesPropertiesAccordingPermission(token, amount); + }); + + itSub.ifWithPallets('Changes properties of a token according to permissions (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testChangesPropertiesAccordingPermission(token, amount); + }); + + async function testDeletePropertiesAccordingPermission(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + await token.collection.addAdmin(alice, {Substrate: bob.address}); + await token.transfer(alice, {Substrate: charlie.address}, pieces); + + const propertyKeys: string[] = []; + let i = 0; + + for (const permission of permissions) { + i++; + if (!permission.permission.mutable) continue; + + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + token.setProperties(signer, [{key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + + await expect( + token.deleteProperties(signer, [key]), + `on deleting property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + expect(await token.getProperties(propertyKeys)).to.be.empty; + expect((await token.getData())!.properties).to.be.empty; + } + + itSub('Deletes properties of a token according to permissions (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testDeletePropertiesAccordingPermission(token, amount); + }); + + itSub.ifWithPallets('Deletes properties of a token according to permissions (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testDeletePropertiesAccordingPermission(token, amount); + }); + + itSub('Assigns properties to a nested token according to permissions', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const collectionB = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: permissions.flatMap(({permission, signers}, i) => + signers.map(signer => {return {key: `${i+1}_${signer.address}`, permission};})), + }); + const targetToken = await collectionA.mintToken(alice); + const nestedToken = await collectionB.mintToken(alice, targetToken.nestingAccount()); + + await collectionB.addAdmin(alice, {Substrate: bob.address}); + await targetToken.transfer(alice, {Substrate: charlie.address}); + + const propertyKeys: string[] = []; + let i = 0; + for (const permission of permissions) { + i++; + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + nestedToken.setProperties(signer, [{key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + const properties = await nestedToken.getProperties(propertyKeys); + const tokenData = await nestedToken.getData(); + for (let i = 0; i < properties.length; i++) { + expect(properties[i].value).to.be.equal('Serotonin increase'); + expect(tokenData!.properties[i].value).to.be.equal('Serotonin increase'); + } + expect(await targetToken.getProperties()).to.be.empty; + }); + + itSub('Changes properties of a nested token according to permissions', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const collectionB = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: permissions.flatMap(({permission, signers}, i) => + signers.map(signer => {return {key: `${i+1}_${signer.address}`, permission};})), + }); + const targetToken = await collectionA.mintToken(alice); + const nestedToken = await collectionB.mintToken(alice, targetToken.nestingAccount()); + + await collectionB.addAdmin(alice, {Substrate: bob.address}); + await targetToken.transfer(alice, {Substrate: charlie.address}); + + const propertyKeys: string[] = []; + let i = 0; + for (const permission of permissions) { + i++; + if (!permission.permission.mutable) continue; + + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + nestedToken.setProperties(signer, [{key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + + await expect( + nestedToken.setProperties(signer, [{key, value: 'Serotonin stable'}]), + `on changing property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + const properties = await nestedToken.getProperties(propertyKeys); + const tokenData = await nestedToken.getData(); + for (let i = 0; i < properties.length; i++) { + expect(properties[i].value).to.be.equal('Serotonin stable'); + expect(tokenData!.properties[i].value).to.be.equal('Serotonin stable'); + } + expect(await targetToken.getProperties()).to.be.empty; + }); + + itSub('Deletes properties of a nested token according to permissions', async ({helper}) => { + const collectionA = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const collectionB = await helper.nft.mintCollection(alice, { + tokenPropertyPermissions: permissions.flatMap(({permission, signers}, i) => + signers.map(signer => {return {key: `${i+1}_${signer.address}`, permission};})), + }); + const targetToken = await collectionA.mintToken(alice); + const nestedToken = await collectionB.mintToken(alice, targetToken.nestingAccount()); + + await collectionB.addAdmin(alice, {Substrate: bob.address}); + await targetToken.transfer(alice, {Substrate: charlie.address}); + + const propertyKeys: string[] = []; + let i = 0; + for (const permission of permissions) { + i++; + if (!permission.permission.mutable) continue; + + let j = 0; + for (const signer of permission.signers) { + j++; + const key = i + '_' + signer.address; + propertyKeys.push(key); + + await expect( + nestedToken.setProperties(signer, [{key, value: 'Serotonin increase'}]), + `on adding property #${i} by signer #${j}`, + ).to.be.fulfilled; + + await expect( + nestedToken.deleteProperties(signer, [key]), + `on deleting property #${i} by signer #${j}`, + ).to.be.fulfilled; + } + } + + expect(await nestedToken.getProperties(propertyKeys)).to.be.empty; + expect((await nestedToken.getData())!.properties).to.be.empty; + expect(await targetToken.getProperties()).to.be.empty; + }); +}); + +describe('Negative Integration Test: Token Properties', () => { + let alice: IKeyringPair; // collection owner + let bob: IKeyringPair; // collection admin + let charlie: IKeyringPair; // token owner + + let constitution: {permission: any, signers: IKeyringPair[], sinner: IKeyringPair}[]; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + let dave: IKeyringPair; + [alice, bob, charlie, dave] = await helper.arrange.createAccounts([100n, 100n, 100n, 100n], donor); + + // todo:playgrounds probably separate these tests later + constitution = [ + {permission: {mutable: true, collectionAdmin: true}, signers: [alice, bob], sinner: charlie}, + {permission: {mutable: false, collectionAdmin: true}, signers: [alice, bob], sinner: charlie}, + {permission: {mutable: true, tokenOwner: true}, signers: [charlie], sinner: alice}, + {permission: {mutable: false, tokenOwner: true}, signers: [charlie], sinner: alice}, + {permission: {mutable: true, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie], sinner: dave}, + {permission: {mutable: false, collectionAdmin: true, tokenOwner: true}, signers: [alice, bob, charlie], sinner: dave}, + ]; + }); + }); + + async function mintCollectionWithAllPermissionsAndToken(helper: UniqueHelper, mode: 'NFT' | 'RFT'): Promise<[UniqueNFToken | UniqueRFToken, bigint]> { + const collection = await (mode == 'NFT' ? helper.nft : helper.rft).mintCollection(alice, { + tokenPropertyPermissions: constitution.map(({permission}, i) => {return {key: `${i+1}`, permission};}), + }); + return mode == 'NFT' ? [await collection.mintToken(alice), 1n] : [await collection.mintToken(alice, 100n), 100n]; + } + + async function getConsumedSpace(api: any, collectionId: number, tokenId: number, mode: 'NFT' | 'RFT'): Promise { + return (await (mode == 'NFT' ? api.query.nonfungible : api.query.refungible).tokenProperties(collectionId, tokenId)).toJSON().consumedSpace; + } + + async function prepare(token: UniqueNFToken | UniqueRFToken, pieces: bigint): Promise { + await token.collection.addAdmin(alice, {Substrate: bob.address}); + await token.transfer(alice, {Substrate: charlie.address}, pieces); + + let i = 0; + for (const passage of constitution) { + i++; + const signer = passage.signers[0]; + await expect( + token.setProperties(signer, [{key: `${i}`, value: 'Serotonin increase'}]), + `on adding property ${i} by ${signer.address}`, + ).to.be.fulfilled; + } + + const originalSpace = await getConsumedSpace(token.collection.helper.getApi(), token.collectionId, token.tokenId, pieces == 1n ? 'NFT' : 'RFT'); + return originalSpace; + } + + async function testForbidsChangingDeletingPropertiesUserOutsideOfPermissions(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + const originalSpace = await prepare(token, pieces); + + let i = 0; + for (const forbiddance of constitution) { + i++; + if (!forbiddance.permission.mutable) continue; + + await expect( + token.setProperties(forbiddance.sinner, [{key: `${i}`, value: 'Serotonin down'}]), + `on failing to change property ${i} by the malefactor`, + ).to.be.rejectedWith(/common\.NoPermission/); + + await expect( + token.deleteProperties(forbiddance.sinner, [`${i}`]), + `on failing to delete property ${i} by the malefactor`, + ).to.be.rejectedWith(/common\.NoPermission/); + } + + const consumedSpace = await getConsumedSpace(token.collection.helper.getApi(), token.collectionId, token.tokenId, pieces == 1n ? 'NFT' : 'RFT'); + expect(consumedSpace).to.be.equal(originalSpace); + } + + itSub('Forbids changing/deleting properties of a token if the user is outside of permissions (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testForbidsChangingDeletingPropertiesUserOutsideOfPermissions(token, amount); + }); + + itSub.ifWithPallets('Forbids changing/deleting properties of a token if the user is outside of permissions (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testForbidsChangingDeletingPropertiesUserOutsideOfPermissions(token, amount); + }); + + async function testForbidsChangingDeletingPropertiesIfPropertyImmutable(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + const originalSpace = await prepare(token, pieces); + + let i = 0; + for (const permission of constitution) { + i++; + if (permission.permission.mutable) continue; + + await expect( + token.setProperties(permission.signers[0], [{key: `${i}`, value: 'Serotonin down'}]), + `on failing to change property ${i} by signer #0`, + ).to.be.rejectedWith(/common\.NoPermission/); + + await expect( + token.deleteProperties(permission.signers[0], [i.toString()]), + `on failing to delete property ${i} by signer #0`, + ).to.be.rejectedWith(/common\.NoPermission/); + } + + const consumedSpace = await getConsumedSpace(token.collection.helper.getApi(), token.collectionId, token.tokenId, pieces == 1n ? 'NFT' : 'RFT'); + expect(consumedSpace).to.be.equal(originalSpace); + } + + itSub('Forbids changing/deleting properties of a token if the property is permanent (immutable) (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testForbidsChangingDeletingPropertiesIfPropertyImmutable(token, amount); + }); + + itSub.ifWithPallets('Forbids changing/deleting properties of a token if the property is permanent (immutable) (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testForbidsChangingDeletingPropertiesIfPropertyImmutable(token, amount); + }); + + async function testForbidsAddingPropertiesIfPropertyNotDeclared(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + const originalSpace = await prepare(token, pieces); + + await expect( + token.setProperties(alice, [{key: 'non-existent', value: 'I exist!'}]), + 'on failing to add a previously non-existent property', + ).to.be.rejectedWith(/common\.NoPermission/); + + await expect( + token.collection.setTokenPropertyPermissions(alice, [{key: 'now-existent', permission: {}}]), + 'on setting a new non-permitted property', + ).to.be.fulfilled; + + await expect( + token.setProperties(alice, [{key: 'now-existent', value: 'I exist!'}]), + 'on failing to add a property forbidden by the \'None\' permission', + ).to.be.rejectedWith(/common\.NoPermission/); + + expect(await token.getProperties(['non-existent', 'now-existent'])).to.be.empty; + + const consumedSpace = await getConsumedSpace(token.collection.helper.getApi(), token.collectionId, token.tokenId, pieces == 1n ? 'NFT' : 'RFT'); + expect(consumedSpace).to.be.equal(originalSpace); + } + + itSub('Forbids adding properties to a token if the property is not declared / forbidden with the \'None\' permission (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testForbidsAddingPropertiesIfPropertyNotDeclared(token, amount); + }); + + itSub.ifWithPallets('Forbids adding properties to a token if the property is not declared / forbidden with the \'None\' permission (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testForbidsAddingPropertiesIfPropertyNotDeclared(token, amount); + }); + + async function testForbidsAddingTooManyProperties(token: UniqueNFToken | UniqueRFToken, pieces: bigint) { + const originalSpace = await prepare(token, pieces); + + await expect( + token.collection.setTokenPropertyPermissions(alice, [ + {key: 'a_holy_book', permission: {collectionAdmin: true, tokenOwner: true}}, + {key: 'young_years', permission: {collectionAdmin: true, tokenOwner: true}}, + ]), + 'on setting new permissions for properties', + ).to.be.fulfilled; + + // Mute the general tx parsing error + { + console.error = () => {}; + await expect(token.setProperties(alice, [{key: 'a_holy_book', value: 'word '.repeat(6554)}])) + .to.be.rejected; + } + + await expect(token.setProperties(alice, [ + {key: 'a_holy_book', value: 'word '.repeat(3277)}, + {key: 'young_years', value: 'neverending'.repeat(1490)}, + ])).to.be.rejectedWith(/common\.NoSpaceForProperty/); + + expect(await token.getProperties(['a_holy_book', 'young_years'])).to.be.empty; + const consumedSpace = await getConsumedSpace(token.collection.helper.getApi(), token.collectionId, token.tokenId, pieces == 1n ? 'NFT' : 'RFT'); + expect(consumedSpace).to.be.equal(originalSpace); + } + + itSub('Forbids adding too many properties to a token (NFT)', async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'NFT'); + await testForbidsAddingTooManyProperties(token, amount); + }); + + itSub.ifWithPallets('Forbids adding too many properties to a token (ReFungible)', [Pallets.ReFungible], async ({helper}) => { + const [token, amount] = await mintCollectionWithAllPermissionsAndToken(helper, 'RFT'); + await testForbidsAddingTooManyProperties(token, amount); + }); +}); + +describe('ReFungible token properties permissions tests', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async function() { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); + + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([100n, 100n, 100n], donor); + }); + }); + + async function prepare(helper: UniqueHelper): Promise { + const collection = await helper.rft.mintCollection(alice); + const token = await collection.mintToken(alice, 100n); + + await collection.addAdmin(alice, {Substrate: bob.address}); + await collection.setTokenPropertyPermissions(alice, [{key: 'fractals', permission: {mutable: true, tokenOwner: true}}]); + + return token; + } + + itSub('Forbids adding token property with tokenOwner==true when signer doesn\'t have all pieces', async ({helper}) => { + const token = await prepare(helper); + + await token.transfer(alice, {Substrate: charlie.address}, 33n); + + await expect(token.setProperties(alice, [ + {key: 'fractals', value: 'multiverse'}, + ])).to.be.rejectedWith(/common\.NoPermission/); + }); + + itSub('Forbids mutating token property with tokenOwner==true when signer doesn\'t have all pieces', async ({helper}) => { + const token = await prepare(helper); + + await expect(token.collection.setTokenPropertyPermissions(alice, [{key: 'fractals', permission: {mutable:true, tokenOwner: true}}])) + .to.be.fulfilled; + + await expect(token.setProperties(alice, [ + {key: 'fractals', value: 'multiverse'}, + ])).to.be.fulfilled; + + await token.transfer(alice, {Substrate: charlie.address}, 33n); + + await expect(token.setProperties(alice, [ + {key: 'fractals', value: 'want to rule the world'}, + ])).to.be.rejectedWith(/common\.NoPermission/); + }); + + itSub('Forbids deleting token property with tokenOwner==true when signer doesn\'t have all pieces', async ({helper}) => { + const token = await prepare(helper); + + await expect(token.setProperties(alice, [ + {key: 'fractals', value: 'one headline - why believe it'}, + ])).to.be.fulfilled; + + await token.transfer(alice, {Substrate: charlie.address}, 33n); + + await expect(token.deleteProperties(alice, ['fractals'])). + to.be.rejectedWith(/common\.NoPermission/); + }); + + itSub('Allows token property mutation with collectionOwner==true when admin doesn\'t have all pieces', async ({helper}) => { + const token = await prepare(helper); + + await token.transfer(alice, {Substrate: charlie.address}, 33n); + + await expect(token.collection.setTokenPropertyPermissions(alice, [{key: 'fractals', permission: {mutable:true, collectionAdmin: true}}])) + .to.be.fulfilled; + + await expect(token.setProperties(alice, [ + {key: 'fractals', value: 'multiverse'}, + ])).to.be.fulfilled; + }); +}); diff --git a/tests/src/nesting/unnest.test.ts b/tests/src/nesting/unnest.test.ts index 419b1bd5dd..0da84556bb 100644 --- a/tests/src/nesting/unnest.test.ts +++ b/tests/src/nesting/unnest.test.ts @@ -1,158 +1,126 @@ -import {expect} from 'chai'; -import {tokenIdToAddress} from '../eth/util/helpers'; -import usingApi, {executeTransaction} from '../substrate/substrate-api'; -import { - createCollectionExpectSuccess, - createItemExpectSuccess, - getBalance, - getTokenOwner, - normalizeAccountId, - setCollectionPermissionsExpectSuccess, - transferExpectSuccess, - transferFromExpectSuccess, -} from '../util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . -let alice: IKeyringPair; -let bob: IKeyringPair; +import {IKeyringPair} from '@polkadot/types/types'; +import {expect, itSub, Pallets, usingPlaygrounds} from '../util'; describe('Integration Test: Unnesting', () => { + let alice: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice] = await helper.arrange.createAccounts([50n], donor); }); }); - it('NFT: allows the owner to successfully unnest a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collection, targetToken)}; - - // Create a nested token - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', targetAddress); - - // Unnest - await expect(executeTransaction( - api, - alice, - api.tx.unique.transferFrom(normalizeAccountId(targetAddress), normalizeAccountId(alice), collection, nestedToken, 1), - ), 'while unnesting').to.not.be.rejected; - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Substrate: alice.address}); - - // Nest and burn - await transferExpectSuccess(collection, nestedToken, alice, targetAddress); - await expect(executeTransaction( - api, - alice, - api.tx.unique.burnFrom(collection, normalizeAccountId(targetAddress), nestedToken, 1), - ), 'while burning').to.not.be.rejected; - await expect(getTokenOwner(api, collection, nestedToken)).to.be.rejected; - }); + itSub('NFT: allows the owner to successfully unnest a token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + // Create a nested token + const nestedToken = await collection.mintToken(alice, targetToken.nestingAccount()); + + // Unnest + await expect(nestedToken.transferFrom(alice, targetToken.nestingAccount(), {Substrate: alice.address}), 'while unnesting').to.be.fulfilled; + expect(await nestedToken.getOwner()).to.be.deep.equal({Substrate: alice.address}); + + // Nest and burn + await nestedToken.nest(alice, targetToken); + await expect(nestedToken.burnFrom(alice, targetToken.nestingAccount()), 'while burning').to.be.fulfilled; + await expect(nestedToken.getOwner()).to.be.rejected; }); - it('Fungible: allows the owner to successfully unnest a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collection, targetToken)}; - - const collectionFT = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const nestedToken = await createItemExpectSuccess(alice, collectionFT, 'Fungible'); - - // Nest and unnest - await transferExpectSuccess(collectionFT, nestedToken, alice, targetAddress, 1, 'Fungible'); - await transferFromExpectSuccess(collectionFT, nestedToken, alice, targetAddress, alice, 1, 'Fungible'); - - // Nest and burn - await transferExpectSuccess(collectionFT, nestedToken, alice, targetAddress, 1, 'Fungible'); - const balanceBefore = await getBalance(api, collectionFT, normalizeAccountId(targetAddress), nestedToken); - await expect(executeTransaction( - api, - alice, - api.tx.unique.burnFrom(collectionFT, normalizeAccountId(targetAddress), nestedToken, 1), - ), 'while burning').to.not.be.rejected; - const balanceAfter = await getBalance(api, collectionFT, normalizeAccountId(targetAddress), nestedToken); - expect(balanceAfter + BigInt(1)).to.be.equal(balanceBefore); - }); + itSub('Fungible: allows the owner to successfully unnest a token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + const collectionFT = await helper.ft.mintCollection(alice); + + // Nest and unnest + await collectionFT.mint(alice, 10n, targetToken.nestingAccount()); + await expect(collectionFT.transferFrom(alice, targetToken.nestingAccount(), {Substrate: alice.address}, 9n), 'while unnesting').to.be.fulfilled; + expect(await collectionFT.getBalance({Substrate: alice.address})).to.be.equal(9n); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(1n); + + // Nest and burn + await collectionFT.transfer(alice, targetToken.nestingAccount(), 5n); + await expect(collectionFT.burnTokensFrom(alice, targetToken.nestingAccount(), 6n), 'while burning').to.be.fulfilled; + expect(await collectionFT.getBalance({Substrate: alice.address})).to.be.equal(4n); + expect(await collectionFT.getBalance(targetToken.nestingAccount())).to.be.equal(0n); + expect(await targetToken.getChildren()).to.be.length(0); }); - it('ReFungible: allows the owner to successfully unnest a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collection, targetToken)}; - - const collectionRFT = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const nestedToken = await createItemExpectSuccess(alice, collectionRFT, 'ReFungible'); - - // Nest and unnest - await transferExpectSuccess(collectionRFT, nestedToken, alice, targetAddress, 1, 'ReFungible'); - await transferFromExpectSuccess(collectionRFT, nestedToken, alice, targetAddress, alice, 1, 'ReFungible'); - - // Nest and burn - await transferExpectSuccess(collectionRFT, nestedToken, alice, targetAddress, 1, 'ReFungible'); - await expect(executeTransaction( - api, - alice, - api.tx.unique.burnFrom(collectionRFT, normalizeAccountId(targetAddress), nestedToken, 1), - ), 'while burning').to.not.be.rejected; - const balance = await getBalance(api, collectionRFT, normalizeAccountId(targetAddress), nestedToken); - expect(balance).to.be.equal(0n); - }); + itSub.ifWithPallets('ReFungible: allows the owner to successfully unnest a token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + const collectionRFT = await helper.rft.mintCollection(alice); + + // Nest and unnest + const token = await collectionRFT.mintToken(alice, 10n, targetToken.nestingAccount()); + await expect(token.transferFrom(alice, targetToken.nestingAccount(), {Substrate: alice.address}, 9n), 'while unnesting').to.be.fulfilled; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(9n); + expect(await token.getBalance(targetToken.nestingAccount())).to.be.equal(1n); + + // Nest and burn + await token.transfer(alice, targetToken.nestingAccount(), 5n); + await expect(token.burnFrom(alice, targetToken.nestingAccount(), 6n), 'while burning').to.be.fulfilled; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(4n); + expect(await token.getBalance(targetToken.nestingAccount())).to.be.equal(0n); + expect(await targetToken.getChildren()).to.be.length(0); }); }); describe('Negative Test: Unnesting', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); }); }); - it('Disallows a non-owner to unnest/burn a token', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - const targetAddress = {Ethereum: tokenIdToAddress(collection, targetToken)}; - - // Create a nested token - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', targetAddress); - - // Try to unnest - await expect(executeTransaction( - api, - bob, - api.tx.unique.transferFrom(normalizeAccountId(targetAddress), normalizeAccountId(bob), collection, nestedToken, 1), - ), 'while unnesting').to.be.rejectedWith(/^common\.ApprovedValueTooLow$/); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - - // Try to burn - await expect(executeTransaction( - api, - bob, - api.tx.unique.burnFrom(collection, normalizeAccountId(bob.address), nestedToken, 1), - ), 'while burning').to.not.be.rejectedWith(/^common\.ApprovedValueTooLow$/); - expect(await getTokenOwner(api, collection, nestedToken)).to.be.deep.equal({Ethereum: tokenIdToAddress(collection, targetToken).toLowerCase()}); - }); + itSub('Disallows a non-owner to unnest/burn a token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + // Create a nested token + const nestedToken = await collection.mintToken(alice, targetToken.nestingAccount()); + + // Try to unnest + await expect(nestedToken.unnest(bob, targetToken, {Substrate: alice.address})).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); + + // Try to burn + await expect(nestedToken.burnFrom(bob, targetToken.nestingAccount())).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await nestedToken.getOwner()).to.be.deep.equal(targetToken.nestingAccount().toLowerCase()); }); // todo another test for creating excessive depth matryoshka with Ethereum? // Recursive nesting - it('Prevents Ouroboros creation', async () => { - const collection = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setCollectionPermissionsExpectSuccess(alice, collection, {nesting: {tokenOwner: true}}); - const targetToken = await createItemExpectSuccess(alice, collection, 'NFT'); - - // Create a nested token ouroboros - const nestedToken = await createItemExpectSuccess(alice, collection, 'NFT', {Ethereum: tokenIdToAddress(collection, targetToken)}); - await expect(transferExpectSuccess(collection, targetToken, alice, {Ethereum: tokenIdToAddress(collection, nestedToken)})).to.be.rejectedWith(/^structure\.OuroborosDetected$/); + itSub('Prevents Ouroboros creation', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {permissions: {nesting: {tokenOwner: true}}}); + const targetToken = await collection.mintToken(alice); + + // Fail to create a nested token ouroboros + const nestedToken = await collection.mintToken(alice, targetToken.nestingAccount()); + await expect(targetToken.nest(alice, nestedToken)).to.be.rejectedWith(/^structure\.OuroborosDetected$/); }); }); diff --git a/tests/src/nextSponsoring.test.ts b/tests/src/nextSponsoring.test.ts index 7d66901e7f..33ceca0d4c 100644 --- a/tests/src/nextSponsoring.test.ts +++ b/tests/src/nextSponsoring.test.ts @@ -14,96 +14,84 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - setCollectionSponsorExpectSuccess, - confirmSponsorshipExpectSuccess, - createItemExpectSuccess, - transferExpectSuccess, - normalizeAccountId, - getNextSponsored, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - +import {expect, itSub, Pallets, usingPlaygrounds} from './util'; +const SPONSORING_TIMEOUT = 5; describe('Integration Test getNextSponsored(collection_id, owner, item_id):', () => { let alice: IKeyringPair; let bob: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([20n, 10n], donor); }); }); - it('NFT', async () => { - await usingApi(async (api: ApiPromise) => { - - // Not existing collection - expect(await getNextSponsored(api, 0, normalizeAccountId(alice), 0)).to.be.equal(-1); + itSub('NFT', async ({helper}) => { + // Non-existing collection + expect(await helper.collection.getTokenNextSponsored(0, 0, {Substrate: alice.address})).to.be.null; - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', alice.address); + const collection = await helper.nft.mintCollection(alice, {}); + const token = await collection.mintToken(alice); - // Check with Disabled sponsoring state - expect(await getNextSponsored(api, collectionId, normalizeAccountId(alice), itemId)).to.be.equal(-1); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); + // Check with Disabled sponsoring state + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.null; + + // Check with Unconfirmed sponsoring state + await collection.setSponsor(alice, bob.address); + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.null; - // Check with Unconfirmed sponsoring state - expect(await getNextSponsored(api, collectionId, normalizeAccountId(alice), itemId)).to.be.equal(-1); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); + // Check with Confirmed sponsoring state + await collection.confirmSponsorship(bob); + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.equal(0); - // Check with Confirmed sponsoring state - expect(await getNextSponsored(api, collectionId, normalizeAccountId(alice), itemId)).to.be.equal(0); + // Check after transfer + await token.transfer(alice, {Substrate: bob.address}); + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.lessThanOrEqual(SPONSORING_TIMEOUT); - // After transfer - await transferExpectSuccess(collectionId, itemId, alice, bob, 1); - expect(await getNextSponsored(api, collectionId, normalizeAccountId(alice), itemId)).to.be.lessThanOrEqual(5); - - // Not existing token - expect(await getNextSponsored(api, collectionId, normalizeAccountId(alice), itemId+1)).to.be.equal(-1); - }); + // Non-existing token + expect(await collection.getTokenNextSponsored(0, {Substrate: alice.address})).to.be.null; }); - it('Fungible', async () => { - await usingApi(async (api: ApiPromise) => { + itSub('Fungible', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {}); + await collection.mint(alice, 10n); - const createMode = 'Fungible'; - const funCollectionId = await createCollectionExpectSuccess({mode: {type: createMode, decimalPoints: 0}}); - await createItemExpectSuccess(alice, funCollectionId, createMode); - await setCollectionSponsorExpectSuccess(funCollectionId, bob.address); - await confirmSponsorshipExpectSuccess(funCollectionId, '//Bob'); - expect(await getNextSponsored(api, funCollectionId, normalizeAccountId(alice), 0)).to.be.equal(0); + // Check with Disabled sponsoring state + expect(await collection.getTokenNextSponsored(0, {Substrate: alice.address})).to.be.null; - await transferExpectSuccess(funCollectionId, 0, alice, bob, 10, 'Fungible'); - expect(await getNextSponsored(api, funCollectionId, normalizeAccountId(alice), 0)).to.be.lessThanOrEqual(5); - }); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + + // Check with Confirmed sponsoring state + expect(await collection.getTokenNextSponsored(0, {Substrate: alice.address})).to.be.equal(0); + + // Check after transfer + await collection.transfer(alice, {Substrate: bob.address}); + expect(await collection.getTokenNextSponsored(0, {Substrate: alice.address})).to.be.lessThanOrEqual(SPONSORING_TIMEOUT); }); - it('ReFungible', async () => { - await usingApi(async (api: ApiPromise) => { + itSub.ifWithPallets('ReFungible', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {}); + const token = await collection.mintToken(alice, 10n); - const createMode = 'ReFungible'; - const refunCollectionId = await createCollectionExpectSuccess({mode: {type: createMode}}); - const refunItemId = await createItemExpectSuccess(alice, refunCollectionId, createMode); - await setCollectionSponsorExpectSuccess(refunCollectionId, bob.address); - await confirmSponsorshipExpectSuccess(refunCollectionId, '//Bob'); - expect(await getNextSponsored(api, refunCollectionId, normalizeAccountId(alice), refunItemId)).to.be.equal(0); + // Check with Disabled sponsoring state + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.null; - await transferExpectSuccess(refunCollectionId, refunItemId, alice, bob, 10, 'ReFungible'); - expect(await getNextSponsored(api, refunCollectionId, normalizeAccountId(alice), refunItemId)).to.be.lessThanOrEqual(5); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); - // Not existing token - expect(await getNextSponsored(api, refunCollectionId, normalizeAccountId(alice), refunItemId+1)).to.be.equal(-1); - }); + // Check with Confirmed sponsoring state + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.equal(0); + + // Check after transfer + await token.transfer(alice, {Substrate: bob.address}); + expect(await token.getNextSponsored({Substrate: alice.address})).to.be.lessThanOrEqual(SPONSORING_TIMEOUT); + + // Non-existing token + expect(await collection.getTokenNextSponsored(0, {Substrate: alice.address})).to.be.null; }); }); diff --git a/tests/src/pallet-presence.test.ts b/tests/src/pallet-presence.test.ts index cecf67b5a7..69b7a92a17 100644 --- a/tests/src/pallet-presence.test.ts +++ b/tests/src/pallet-presence.test.ts @@ -14,13 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; -import {expect} from 'chai'; -import usingApi from './substrate/substrate-api'; - -function getModuleNames(api: ApiPromise): string[] { - return api.runtimeMetadata.asLatest.pallets.map(m => m.name.toString().toLowerCase()); -} +import {itSub, usingPlaygrounds, expect} from './util'; // Pallets that must always be present const requiredPallets = [ @@ -49,9 +43,10 @@ const requiredPallets = [ 'inflation', 'unique', 'nonfungible', - 'refungible', - 'scheduler', 'charging', + 'configuration', + 'tokens', + 'xtokens', 'maintenance', ]; @@ -64,32 +59,40 @@ const consensusPallets = [ describe('Pallet presence', () => { before(async () => { - await usingApi(async api => { - const chain = await api.rpc.system.chain(); + await usingPlaygrounds(async helper => { + const chain = await helper.callRpc('api.rpc.system.chain', []); + + const refungible = 'refungible'; + // const scheduler = 'scheduler'; + const foreignAssets = 'foreignassets'; + const rmrkPallets = ['rmrkcore', 'rmrkequip']; + const appPromotion = 'apppromotion'; - if (!chain.eq('UNIQUE')) { - requiredPallets.push(...['rmrkcore', 'rmrkequip']); + if (chain.eq('OPAL by UNIQUE')) { + requiredPallets.push( + refungible, + // scheduler, + foreignAssets, + appPromotion, + ...rmrkPallets, + ); + } else if (chain.eq('QUARTZ by UNIQUE')) { + requiredPallets.push(refungible); + } else if (chain.eq('UNIQUE')) { + // Insert Unique additional pallets here } }); }); - it('Required pallets are present', async () => { - await usingApi(async api => { - for (let i=0; i { + expect(helper.fetchAllPalletNames()).to.contain.members([...requiredPallets]); }); - it('Governance and consensus pallets are present', async () => { - await usingApi(async api => { - for (let i=0; i { + expect(helper.fetchAllPalletNames()).to.contain.members([...consensusPallets]); }); - it('No extra pallets are included', async () => { - await usingApi(async api => { - expect(getModuleNames(api).sort()).to.be.deep.equal([...requiredPallets, ...consensusPallets].sort()); - }); + + itSub('No extra pallets are included', async ({helper}) => { + expect(helper.fetchAllPalletNames().sort()).to.be.deep.equal([...requiredPallets, ...consensusPallets].sort()); }); }); diff --git a/tests/src/refungible.test.ts b/tests/src/refungible.test.ts index 72890520d7..03bae16176 100644 --- a/tests/src/refungible.test.ts +++ b/tests/src/refungible.test.ts @@ -14,176 +14,244 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {default as usingApi} from './substrate/substrate-api'; import {IKeyringPair} from '@polkadot/types/types'; -import { - createCollectionExpectSuccess, - getBalance, - createMultipleItemsExpectSuccess, - isTokenExists, - getLastTokenId, - getAllowance, - approve, - transferFrom, - createCollection, - createRefungibleToken, - transfer, - burnItem, - repartitionRFT, -} from './util/helpers'; - -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; - -describe('integration test: Refungible functionality:', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); +import {itSub, Pallets, requirePalletsOrSkip, usingPlaygrounds, expect} from './util'; - it('Create refungible collection and token', async () => { - await usingApi(async api => { - const createCollectionResult = await createCollection(api, alice, {mode: {type: 'ReFungible'}}); - expect(createCollectionResult.success).to.be.true; - const collectionId = createCollectionResult.collectionId; +const MAX_REFUNGIBLE_PIECES = 1_000_000_000_000_000_000_000n; - const itemCountBefore = await getLastTokenId(api, collectionId); - const result = await createRefungibleToken(api, alice, collectionId, 100n); +describe('integration test: Refungible functionality:', async () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; - const itemCountAfter = await getLastTokenId(api, collectionId); + before(async function() { + await usingPlaygrounds(async (helper, privateKey) => { + requirePalletsOrSkip(this, helper, [Pallets.ReFungible]); - // What to expect - // tslint:disable-next-line:no-unused-expression - expect(result.success).to.be.true; - expect(itemCountAfter).to.be.equal(itemCountBefore + 1); - expect(collectionId).to.be.equal(result.collectionId); - expect(itemCountAfter.toString()).to.be.equal(result.itemId.toString()); + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([100n, 10n], donor); }); }); - - it('Transfer token pieces', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; - - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); - expect(await transfer(api, collectionId, tokenId, alice, bob, 60n)).to.be.true; - - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(40n); - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(60n); - await expect(transfer(api, collectionId, tokenId, alice, bob, 41n)).to.eventually.be.rejected; - }); + + itSub('Create refungible collection and token', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + const itemCountBefore = await collection.getLastTokenId(); + const token = await collection.mintToken(alice, 100n); + + const itemCountAfter = await collection.getLastTokenId(); + + // What to expect + expect(token?.tokenId).to.be.gte(itemCountBefore); + expect(itemCountAfter).to.be.equal(itemCountBefore + 1); + expect(itemCountAfter.toString()).to.be.equal(token?.tokenId.toString()); }); - - it('Create multiple tokens', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const args = [ - {ReFungible: {pieces: 1}}, - {ReFungible: {pieces: 2}}, - {ReFungible: {pieces: 100}}, - ]; - await createMultipleItemsExpectSuccess(alice, collectionId, args); - - await usingApi(async api => { - const tokenId = await getLastTokenId(api, collectionId); - expect(tokenId).to.be.equal(3); - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); - }); + + itSub('Checking RPC methods when interacting with maximum allowed values (MAX_REFUNGIBLE_PIECES)', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + const token = await collection.mintToken(alice, MAX_REFUNGIBLE_PIECES); + + expect(await collection.getTokenBalance(token.tokenId, {Substrate: alice.address})).to.be.equal(MAX_REFUNGIBLE_PIECES); + + await collection.transferToken(alice, token.tokenId, {Substrate: bob.address}, MAX_REFUNGIBLE_PIECES); + expect(await collection.getTokenBalance(token.tokenId, {Substrate: bob.address})).to.be.equal(MAX_REFUNGIBLE_PIECES); + expect(await token.getTotalPieces()).to.be.equal(MAX_REFUNGIBLE_PIECES); + + await expect(collection.mintToken(alice, MAX_REFUNGIBLE_PIECES + 1n)) + .to.eventually.be.rejectedWith(/refungible\.WrongRefungiblePieces/); + }); + + itSub('RPC method tokenOwners for refungible collection and token', async ({helper}) => { + const ethAcc = {Ethereum: '0x67fb3503a61b284dc83fa96dceec4192db47dc7c'}; + const facelessCrowd = (await helper.arrange.createAccounts(Array(7).fill(0n), donor)).map(keyring => {return {Substrate: keyring.address};}); + + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + + const token = await collection.mintToken(alice, 10_000n); + + await token.transfer(alice, {Substrate: bob.address}, 1000n); + await token.transfer(alice, ethAcc, 900n); + + for (let i = 0; i < 7; i++) { + await token.transfer(alice, facelessCrowd[i], 50n * BigInt(i + 1)); + } + + const owners = await token.getTop10Owners(); + + // What to expect + expect(owners).to.deep.include.members([{Substrate: alice.address}, ethAcc, {Substrate: bob.address}, ...facelessCrowd]); + expect(owners.length).to.be.equal(10); + + const [eleven] = await helper.arrange.createAccounts([0n], donor); + expect(await token.transfer(alice, {Substrate: eleven.address}, 10n)).to.be.true; + expect((await token.getTop10Owners()).length).to.be.equal(10); + }); + + itSub('Transfer token pieces', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(100n); + expect(await token.transfer(alice, {Substrate: bob.address}, 60n)).to.be.true; + + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(40n); + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(60n); + + await expect(token.transfer(alice, {Substrate: bob.address}, 41n)) + .to.eventually.be.rejectedWith(/common\.TokenValueTooLow/); }); - it('Burn some pieces', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); - expect(await burnItem(api, alice, collectionId, tokenId, 99n)).to.be.true; - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(1n); - }); + itSub('Create multiple tokens', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + // TODO: fix mintMultipleTokens + // await collection.mintMultipleTokens(alice, [ + // {owner: {Substrate: alice.address}, pieces: 1n}, + // {owner: {Substrate: alice.address}, pieces: 2n}, + // {owner: {Substrate: alice.address}, pieces: 100n}, + // ]); + await helper.rft.mintMultipleTokensWithOneOwner(alice, collection.collectionId, {Substrate: alice.address}, [ + {pieces: 1n}, + {pieces: 2n}, + {pieces: 100n}, + ]); + const lastTokenId = await collection.getLastTokenId(); + expect(lastTokenId).to.be.equal(3); + expect(await collection.getTokenBalance(lastTokenId, {Substrate: alice.address})).to.be.equal(100n); }); - it('Burn all pieces', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); - expect(await burnItem(api, alice, collectionId, tokenId, 100n)).to.be.true; - expect(await isTokenExists(api, collectionId, tokenId)).to.be.false; - }); + itSub('Burn some pieces', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(100n); + expect(await token.burn(alice, 99n)).to.be.true; + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(1n); }); - it('Burn some pieces for multiple users', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); - expect(await transfer(api, collectionId, tokenId, alice, bob, 60n)).to.be.true; - - - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(40n); - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(60n); - expect(await burnItem(api, alice, collectionId, tokenId, 40n)).to.be.true; - - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(0n); - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - expect(await burnItem(api, bob, collectionId, tokenId, 59n)).to.be.true; - - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(1n); - expect(await isTokenExists(api, collectionId, tokenId)).to.be.true; - expect(await burnItem(api, bob, collectionId, tokenId, 1n)).to.be.true; - - expect(await isTokenExists(api, collectionId, tokenId)).to.be.false; - }); + itSub('Burn all pieces', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(100n); + + expect(await token.burn(alice, 100n)).to.be.true; + expect(await collection.doesTokenExist(token.tokenId)).to.be.false; }); - it('Set allowance for token', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; + itSub('Burn some pieces for multiple users', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(100n); + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(100n); + expect(await token.transfer(alice, {Substrate: bob.address}, 60n)).to.be.true; - expect(await approve(api, collectionId, tokenId, alice, bob, 60n)).to.be.true; - expect(await getAllowance(api, collectionId, alice, bob, tokenId)).to.be.equal(60n); + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(40n); + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(60n); - expect(await transferFrom(api, collectionId, tokenId, bob, alice, bob, 20n)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(80n); - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(20n); - expect(await getAllowance(api, collectionId, alice, bob, tokenId)).to.be.equal(40n); - }); + expect(await token.burn(alice, 40n)).to.be.true; + + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(0n); + + expect(await token.burn(bob, 59n)).to.be.true; + + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(1n); + expect(await collection.doesTokenExist(token.tokenId)).to.be.true; + + expect(await token.burn(bob, 1n)).to.be.true; + + expect(await collection.doesTokenExist(token.tokenId)).to.be.false; }); - it('Repartition', async () => { - await usingApi(async api => { - const collectionId = (await createCollection(api, alice, {mode: {type: 'ReFungible'}})).collectionId; - const tokenId = (await createRefungibleToken(api, alice, collectionId, 100n)).itemId; + itSub('Set allowance for token', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(100n); - expect(await repartitionRFT(api, collectionId, alice, tokenId, 200n)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(200n); + expect(await token.approve(alice, {Substrate: bob.address}, 60n)).to.be.true; + expect(await token.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(60n); - expect(await transfer(api, collectionId, tokenId, alice, bob, 110n)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(90n); - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(110n); + expect(await token.transferFrom(bob, {Substrate: alice.address}, {Substrate: bob.address}, 20n)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(80n); + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(20n); + expect(await token.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(40n); + }); - await expect(repartitionRFT(api, collectionId, alice, tokenId, 80n)).to.eventually.be.rejected; + itSub('Repartition', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + + expect(await token.repartition(alice, 200n)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(200n); + expect(await token.getTotalPieces()).to.be.equal(200n); + + expect(await token.transfer(alice, {Substrate: bob.address}, 110n)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(90n); + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(110n); + + await expect(token.repartition(alice, 80n)) + .to.eventually.be.rejectedWith(/refungible\.RepartitionWhileNotOwningAllPieces/); + + expect(await token.transfer(alice, {Substrate: bob.address}, 90n)).to.be.true; + expect(await token.getBalance({Substrate: alice.address})).to.be.equal(0n); + expect(await token.getBalance({Substrate: bob.address})).to.be.equal(200n); + + expect(await token.repartition(bob, 150n)).to.be.true; + await expect(token.transfer(bob, {Substrate: alice.address}, 160n)) + .to.eventually.be.rejectedWith(/common\.TokenValueTooLow/); + }); - expect(await transfer(api, collectionId, tokenId, alice, bob, 90n)).to.be.true; - expect(await getBalance(api, collectionId, alice, tokenId)).to.be.equal(0n); - expect(await getBalance(api, collectionId, bob, tokenId)).to.be.equal(200n); + itSub('Repartition with increased amount', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + await token.repartition(alice, 200n); + const chainEvents = helper.chainLog.slice(-1)[0].events; + const event = chainEvents.find((event: any) => event.section === 'common' && event.method === 'ItemCreated'); + expect(event).to.deep.include({ + section: 'common', + method: 'ItemCreated', + index: [66, 2], + data: [ + collection.collectionId, + token.tokenId, + {substrate: alice.address}, + 100n, + ], + }); + }); - expect(await repartitionRFT(api, collectionId, bob, tokenId, 150n)).to.be.true; - await expect(transfer(api, collectionId, tokenId, bob, alice, 160n)).to.eventually.be.rejected; + itSub('Repartition with decreased amount', async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test'}); + const token = await collection.mintToken(alice, 100n); + await token.repartition(alice, 50n); + const chainEvents = helper.chainLog.slice(-1)[0].events; + const event = chainEvents.find((event: any) => event.section === 'common' && event.method === 'ItemDestroyed'); + expect(event).to.deep.include({ + section: 'common', + method: 'ItemDestroyed', + index: [66, 3], + data: [ + collection.collectionId, + token.tokenId, + {substrate: alice.address}, + 50n, + ], }); }); + + itSub('Create new collection with properties', async ({helper}) => { + const properties = [{key: 'key1', value: 'val1'}]; + const tokenPropertyPermissions = [{key: 'key1', permission: {tokenOwner: true, mutable: false, collectionAdmin: true}}]; + const collection = await helper.rft.mintCollection(alice, {name: 'test', description: 'test', tokenPrefix: 'test', properties, tokenPropertyPermissions}); + const info = await collection.getData(); + expect(info?.raw.properties).to.be.deep.equal(properties); + expect(info?.raw.tokenPropertyPermissions).to.be.deep.equal(tokenPropertyPermissions); + }); }); + diff --git a/tests/src/removeCollectionAdmin.test.ts b/tests/src/removeCollectionAdmin.test.ts index 03bf7ed04f..8c3e62d285 100644 --- a/tests/src/removeCollectionAdmin.test.ts +++ b/tests/src/removeCollectionAdmin.test.ts @@ -14,125 +14,99 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import {createCollectionExpectSuccess, destroyCollectionExpectSuccess, getAdminList, normalizeAccountId, queryCollectionExpectSuccess} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; describe('Integration Test removeCollectionAdmin(collection_id, account_id):', () => { - it('Remove collection admin.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const collection = await queryCollectionExpectSuccess(api, collectionId); - expect(collection.owner.toString()).to.be.deep.eq(alice.address); - // first - add collection admin Bob - const addAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, addAdminTx); - - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); - - // then remove bob from admins of collection - const removeAdminTx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, removeAdminTx); - - const adminListAfterRemoveAdmin = await getAdminList(api, collectionId); - expect(adminListAfterRemoveAdmin).not.to.be.deep.contains(normalizeAccountId(bob.address)); + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([20n, 10n], donor); }); }); - it('Remove admin from collection that has no admins', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const collectionId = await createCollectionExpectSuccess(); + itSub('Remove collection admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionAdmin-1', tokenPrefix: 'RCA'}); + const collectionInfo = await collection.getData(); + expect(collectionInfo?.raw.owner.toString()).to.be.deep.eq(alice.address); + // first - add collection admin Bob + await collection.addAdmin(alice, {Substrate: bob.address}); - const adminListBeforeAddAdmin = await getAdminList(api, collectionId); - expect(adminListBeforeAddAdmin).to.have.lengthOf(0); + const adminListAfterAddAdmin = await collection.getAdmins(); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: bob.address}); - const tx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(alice.address)); - await submitTransactionAsync(alice, tx); - }); + // then remove bob from admins of collection + await collection.removeAdmin(alice, {Substrate: bob.address}); + + const adminListAfterRemoveAdmin = await collection.getAdmins(); + expect(adminListAfterRemoveAdmin).not.to.be.deep.contains({Substrate: bob.address}); + }); + + itSub('Remove admin from collection that has no admins', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionAdmin-2', tokenPrefix: 'RCA'}); + + const adminListBeforeAddAdmin = await collection.getAdmins(); + expect(adminListBeforeAddAdmin).to.have.lengthOf(0); + + await collection.removeAdmin(alice, {Substrate: alice.address}); }); }); describe('Negative Integration Test removeCollectionAdmin(collection_id, account_id):', () => { - it('Can\'t remove collection admin from not existing collection', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // tslint:disable-next-line: no-bitwise - const collectionId = (1 << 32) - 1; - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - - const changeOwnerTx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, changeOwnerTx)).to.be.rejected; - - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([20n, 10n, 10n], donor); }); }); - it('Can\'t remove collection admin from deleted collection', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // tslint:disable-next-line: no-bitwise - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); + itSub('Can\'t remove collection admin from not existing collection', async ({helper}) => { + const collectionId = (1 << 32) - 1; - await destroyCollectionExpectSuccess(collectionId); + await expect(helper.collection.removeAdmin(alice, collectionId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); - const changeOwnerTx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(alice, changeOwnerTx)).to.be.rejected; + itSub('Can\'t remove collection admin from deleted collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionAdmin-Neg-2', tokenPrefix: 'RCA'}); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); - }); + expect(await collection.burn(alice)).to.be.true; - it('Regular user can\'t remove collection admin', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); + await expect(helper.collection.removeAdmin(alice, collection.collectionId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); - const addAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, addAdminTx); + itSub('Regular user can\'t remove collection admin', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionAdmin-Neg-3', tokenPrefix: 'RCA'}); - const changeOwnerTx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(charlie, changeOwnerTx)).to.be.rejected; + await collection.addAdmin(alice, {Substrate: bob.address}); - // Verifying that nothing bad happened (network is live, new collections can be created, etc.) - await createCollectionExpectSuccess(); - }); + await expect(collection.removeAdmin(charlie, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.NoPermission/); }); - it('Admin can\'t remove collection admin.', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const collectionId = await createCollectionExpectSuccess(); - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); - - const addBobAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await submitTransactionAsync(alice, addBobAdminTx); - const addCharlieAdminTx = api.tx.unique.addCollectionAdmin(collectionId, normalizeAccountId(charlie.address)); - await submitTransactionAsync(alice, addCharlieAdminTx); + itSub('Admin can\'t remove collection admin.', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionAdmin-Neg-4', tokenPrefix: 'RCA'}); + + await collection.addAdmin(alice, {Substrate: bob.address}); + await collection.addAdmin(alice, {Substrate: charlie.address}); - const adminListAfterAddAdmin = await getAdminList(api, collectionId); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); - expect(adminListAfterAddAdmin).to.be.deep.contains(normalizeAccountId(charlie.address)); + const adminListAfterAddAdmin = await collection.getAdmins(); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: bob.address}); + expect(adminListAfterAddAdmin).to.be.deep.contains({Substrate: charlie.address}); - const removeAdminTx = api.tx.unique.removeCollectionAdmin(collectionId, normalizeAccountId(bob.address)); - await expect(submitTransactionExpectFailAsync(charlie, removeAdminTx)).to.be.rejected; + await expect(collection.removeAdmin(charlie, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.NoPermission/); - const adminListAfterRemoveAdmin = await getAdminList(api, collectionId); - expect(adminListAfterRemoveAdmin).to.be.deep.contains(normalizeAccountId(bob.address)); - expect(adminListAfterRemoveAdmin).to.be.deep.contains(normalizeAccountId(charlie.address)); - }); + const adminListAfterRemoveAdmin = await collection.getAdmins(); + expect(adminListAfterRemoveAdmin).to.be.deep.contains({Substrate: bob.address}); + expect(adminListAfterRemoveAdmin).to.be.deep.contains({Substrate: charlie.address}); }); }); diff --git a/tests/src/removeCollectionSponsor.test.ts b/tests/src/removeCollectionSponsor.test.ts index c0e0788e79..467d69b110 100644 --- a/tests/src/removeCollectionSponsor.test.ts +++ b/tests/src/removeCollectionSponsor.test.ts @@ -14,138 +14,112 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - setCollectionSponsorExpectSuccess, - destroyCollectionExpectSuccess, - confirmSponsorshipExpectSuccess, - confirmSponsorshipExpectFailure, - createItemExpectSuccess, - findUnusedAddress, - removeCollectionSponsorExpectSuccess, - removeCollectionSponsorExpectFailure, - normalizeAccountId, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, -} from './util/helpers'; import {IKeyringPair} from '@polkadot/types/types'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; +import {itSub, usingPlaygrounds, expect} from './util'; describe('integration test: ext. removeCollectionSponsor():', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); }); }); - it('Removing NFT collection sponsor stops sponsorship', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - await removeCollectionSponsorExpectSuccess(collectionId); - - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const zeroBalance = await findUnusedAddress(api, privateKeyWrapper); - - // Mint token for unused address - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', zeroBalance.address); - - // Transfer this tokens from unused address to Alice - should fail - const sponsorBalanceBefore = (await api.query.system.account(bob.address)).data.free.toBigInt(); - const zeroToAlice = api.tx.unique.transfer(normalizeAccountId(alice.address), collectionId, itemId, 0); - const badTransaction = async function () { - await submitTransactionExpectFailAsync(zeroBalance, zeroToAlice); - }; - await expect(badTransaction()).to.be.rejectedWith('Inability to pay some fees'); - const sponsorBalanceAfter = (await api.query.system.account(bob.address)).data.free.toBigInt(); - - expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); - }); + itSub('Removing NFT collection sponsor stops sponsorship', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-1', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + await collection.removeSponsor(alice); + + // Find unused address + const [zeroBalance] = await helper.arrange.createAccounts([0n], donor); + + // Mint token for unused address + const token = await collection.mintToken(alice, {Substrate: zeroBalance.address}); + + // Transfer this tokens from unused address to Alice - should fail + const sponsorBalanceBefore = await helper.balance.getSubstrate(bob.address); + await expect(token.transfer(zeroBalance, {Substrate: alice.address})) + .to.be.rejectedWith('Inability to pay some fees'); + const sponsorBalanceAfter = await helper.balance.getSubstrate(bob.address); + + expect(sponsorBalanceAfter).to.be.equal(sponsorBalanceBefore); }); - it('Remove a sponsor after it was already removed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - await removeCollectionSponsorExpectSuccess(collectionId); - await removeCollectionSponsorExpectSuccess(collectionId); + itSub('Remove a sponsor after it was already removed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-2', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + await expect(collection.removeSponsor(alice)).to.not.be.rejected; + await expect(collection.removeSponsor(alice)).to.not.be.rejected; }); - it('Remove sponsor in a collection that never had the sponsor set', async () => { - const collectionId = await createCollectionExpectSuccess(); - await removeCollectionSponsorExpectSuccess(collectionId); + itSub('Remove sponsor in a collection that never had the sponsor set', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-3', tokenPrefix: 'RCS'}); + await expect(collection.removeSponsor(alice)).to.not.be.rejected; }); - it('Remove sponsor for a collection that had the sponsor set, but not confirmed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await removeCollectionSponsorExpectSuccess(collectionId); + itSub('Remove sponsor for a collection that had the sponsor set, but not confirmed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-4', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await expect(collection.removeSponsor(alice)).to.not.be.rejected; }); }); describe('(!negative test!) integration test: ext. removeCollectionSponsor():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([20n, 10n, 10n], donor); }); }); - it('(!negative test!) Remove sponsor for a collection that never existed', async () => { - // Find the collection that never existed - let collectionId = 0; - await usingApi(async (api) => { - collectionId = await getCreatedCollectionCount(api) + 1; - }); - - await removeCollectionSponsorExpectFailure(collectionId); + itSub('(!negative test!) Remove sponsor for a collection that never existed', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.collection.removeSponsor(alice, collectionId)).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Remove sponsor for a collection with collection admin permissions', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await removeCollectionSponsorExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Remove sponsor for a collection with collection admin permissions', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-Neg-1', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.addAdmin(alice, {Substrate: charlie.address}); + await expect(collection.removeSponsor(charlie)).to.be.rejectedWith(/common\.NoPermission/); }); - it('(!negative test!) Remove sponsor for a collection by regular user', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await removeCollectionSponsorExpectFailure(collectionId, '//Bob'); + itSub('(!negative test!) Remove sponsor for a collection by regular user', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-Neg-2', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await expect(collection.removeSponsor(charlie)).to.be.rejectedWith(/common\.NoPermission/); }); - it('(!negative test!) Remove sponsor in a destroyed collection', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await destroyCollectionExpectSuccess(collectionId); - await removeCollectionSponsorExpectFailure(collectionId); + itSub('(!negative test!) Remove sponsor in a destroyed collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-Neg-3', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.burn(alice); + await expect(collection.removeSponsor(alice)).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('Set - remove - confirm: fails', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await removeCollectionSponsorExpectSuccess(collectionId); - await confirmSponsorshipExpectFailure(collectionId, '//Bob'); + itSub('Set - remove - confirm: fails', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-Neg-4', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.removeSponsor(alice); + await expect(collection.confirmSponsorship(bob)).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - it('Set - confirm - remove - confirm: Sponsor cannot come back', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await confirmSponsorshipExpectSuccess(collectionId, '//Bob'); - await removeCollectionSponsorExpectSuccess(collectionId); - await confirmSponsorshipExpectFailure(collectionId, '//Bob'); + itSub('Set - confirm - remove - confirm: Sponsor cannot come back', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'RemoveCollectionSponsor-Neg-5', tokenPrefix: 'RCS'}); + await collection.setSponsor(alice, bob.address); + await collection.confirmSponsorship(bob); + await collection.removeSponsor(alice); + await expect(collection.confirmSponsorship(bob)).to.be.rejectedWith(/unique\.ConfirmUnsetSponsorFail/); }); - }); diff --git a/tests/src/removeFromAllowList.test.ts b/tests/src/removeFromAllowList.test.ts deleted file mode 100644 index f788309d66..0000000000 --- a/tests/src/removeFromAllowList.test.ts +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, - destroyCollectionExpectSuccess, - enableAllowListExpectSuccess, - addToAllowListExpectSuccess, - removeFromAllowListExpectSuccess, - isAllowlisted, - findNotExistingCollection, - removeFromAllowListExpectFailure, - disableAllowListExpectSuccess, - normalizeAccountId, - addCollectionAdminExpectSuccess, -} from './util/helpers'; -import {IKeyringPair} from '@polkadot/types/types'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -describe('Integration Test removeFromAllowList', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('ensure bob is not in allowlist after removal', async () => { - await usingApi(async api => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - - await removeFromAllowListExpectSuccess(alice, collectionId, normalizeAccountId(bob.address)); - expect(await isAllowlisted(api, collectionId, bob.address)).to.be.false; - }); - }); - - it('allows removal from collection with unset allowlist status', async () => { - await usingApi(async () => { - const collectionWithoutAllowlistId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionWithoutAllowlistId); - await addToAllowListExpectSuccess(alice, collectionWithoutAllowlistId, bob.address); - await disableAllowListExpectSuccess(alice, collectionWithoutAllowlistId); - - await removeFromAllowListExpectSuccess(alice, collectionWithoutAllowlistId, normalizeAccountId(bob.address)); - }); - }); -}); - -describe('Negative Integration Test removeFromAllowList', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('fails on removal from not existing collection', async () => { - await usingApi(async (api) => { - const collectionId = await findNotExistingCollection(api); - - await removeFromAllowListExpectFailure(alice, collectionId, normalizeAccountId(bob.address)); - }); - }); - - it('fails on removal from removed collection', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await destroyCollectionExpectSuccess(collectionId); - - await removeFromAllowListExpectFailure(alice, collectionId, normalizeAccountId(bob.address)); - }); - }); -}); - -describe('Integration Test removeFromAllowList with collection admin permissions', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - let charlie: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); - }); - }); - - it('ensure address is not in allowlist after removal', async () => { - await usingApi(async api => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await addToAllowListExpectSuccess(alice, collectionId, charlie.address); - await removeFromAllowListExpectSuccess(bob, collectionId, normalizeAccountId(charlie.address)); - expect(await isAllowlisted(api, collectionId, charlie.address)).to.be.false; - }); - }); - - it('Collection admin allowed to remove from allowlist with unset allowlist status', async () => { - await usingApi(async () => { - const collectionWithoutAllowlistId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionWithoutAllowlistId); - await addCollectionAdminExpectSuccess(alice, collectionWithoutAllowlistId, bob.address); - await addToAllowListExpectSuccess(alice, collectionWithoutAllowlistId, charlie.address); - await disableAllowListExpectSuccess(alice, collectionWithoutAllowlistId); - await removeFromAllowListExpectSuccess(bob, collectionWithoutAllowlistId, normalizeAccountId(charlie.address)); - }); - }); - - it('Regular user can`t remove from allowlist', async () => { - await usingApi(async () => { - const collectionWithoutAllowlistId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionWithoutAllowlistId); - await addToAllowListExpectSuccess(alice, collectionWithoutAllowlistId, charlie.address); - await removeFromAllowListExpectFailure(bob, collectionWithoutAllowlistId, normalizeAccountId(charlie.address)); - }); - }); -}); diff --git a/tests/src/rmrk/acceptNft.test.ts b/tests/src/rmrk/acceptNft.seqtest.ts similarity index 94% rename from tests/src/rmrk/acceptNft.test.ts rename to tests/src/rmrk/acceptNft.seqtest.ts index 439f333cce..63d9b9c292 100644 --- a/tests/src/rmrk/acceptNft.test.ts +++ b/tests/src/rmrk/acceptNft.seqtest.ts @@ -7,19 +7,19 @@ import { acceptNft, } from './util/tx'; import {NftIdTuple} from './util/fetch'; -import {isNftChildOfAnother, expectTxFailure} from './util/helpers'; -import {Pallets, requirePallets} from '../util/helpers'; +import {isNftChildOfAnother, expectTxFailure, requirePallets, Pallets} from './util/helpers'; describe('integration test: accept NFT', () => { let api: any; - before(async function() { + before(async function() { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); - + + const alice = '//Alice'; const bob = '//Bob'; - + const createTestCollection = async (issuerUri: string) => { return await createCollection( api, @@ -103,5 +103,5 @@ describe('integration test: accept NFT', () => { expect(isChild).to.be.false; }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/addResource.test.ts b/tests/src/rmrk/addResource.seqtest.ts similarity index 87% rename from tests/src/rmrk/addResource.test.ts rename to tests/src/rmrk/addResource.seqtest.ts index cddfe8edf7..eb661268a8 100644 --- a/tests/src/rmrk/addResource.test.ts +++ b/tests/src/rmrk/addResource.seqtest.ts @@ -1,7 +1,7 @@ import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; import {NftIdTuple} from './util/fetch'; -import {expectTxFailure, getResourceById} from './util/helpers'; +import {expectTxFailure, getResourceById, requirePallets, Pallets} from './util/helpers'; import { addNftBasicResource, acceptNftResource, @@ -12,11 +12,10 @@ import { addNftComposableResource, } from './util/tx'; import {RmrkTraitsResourceResourceInfo as ResourceInfo} from '@polkadot/types/lookup'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: add NFT resource', () => { - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; const src = 'test-res-src'; const metadata = 'test-res-metadata'; const license = 'test-res-license'; @@ -33,7 +32,7 @@ describe('integration test: add NFT resource', () => { it('add resource', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -41,15 +40,15 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -63,22 +62,22 @@ describe('integration test: add NFT resource', () => { it('add a resource to the nested NFT', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ); - const parentNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'parent-nft-metadata'); - const childNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'child-nft-metadata'); + const parentNftId = await mintNft(api, alice, alice, collectionIdAlice, 'parent-nft-metadata'); + const childNftId = await mintNft(api, alice, alice, collectionIdAlice, 'child-nft-metadata'); const newOwnerNFT: NftIdTuple = [collectionIdAlice, parentNftId]; - await sendNft(api, 'sent', Alice, collectionIdAlice, childNftId, newOwnerNFT); + await sendNft(api, 'sent', alice, collectionIdAlice, childNftId, newOwnerNFT); await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, childNftId, @@ -92,7 +91,7 @@ describe('integration test: add NFT resource', () => { it('add multiple resources', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -100,8 +99,8 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); @@ -152,7 +151,7 @@ describe('integration test: add NFT resource', () => { const firstBasicResourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -164,7 +163,7 @@ describe('integration test: add NFT resource', () => { const secondBasicResourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -176,7 +175,7 @@ describe('integration test: add NFT resource', () => { const composableResourceId = await addNftComposableResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -190,7 +189,7 @@ describe('integration test: add NFT resource', () => { const slotResourceId = await addNftSlotResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -218,7 +217,7 @@ describe('integration test: add NFT resource', () => { it('[negative]: unable to add a resource to the non-existing NFT', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -226,7 +225,7 @@ describe('integration test: add NFT resource', () => { const tx = addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nonexistentId, @@ -242,7 +241,7 @@ describe('integration test: add NFT resource', () => { it('[negative]: unable to add a resource by a not-an-owner user', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -250,15 +249,15 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); const tx = addNftBasicResource( api, - Bob, + bob, 'added', collectionIdAlice, nftAlice, @@ -274,22 +273,22 @@ describe('integration test: add NFT resource', () => { it('[negative]: unable to add a resource to the nested NFT if it isnt root owned by the caller', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ); - const parentNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'parent-nft-metadata'); - const childNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'child-nft-metadata'); + const parentNftId = await mintNft(api, alice, alice, collectionIdAlice, 'parent-nft-metadata'); + const childNftId = await mintNft(api, alice, alice, collectionIdAlice, 'child-nft-metadata'); const newOwnerNFT: NftIdTuple = [collectionIdAlice, parentNftId]; - await sendNft(api, 'sent', Alice, collectionIdAlice, childNftId, newOwnerNFT); + await sendNft(api, 'sent', alice, collectionIdAlice, childNftId, newOwnerNFT); const tx = addNftBasicResource( api, - Bob, + bob, 'added', collectionIdAlice, childNftId, @@ -305,7 +304,7 @@ describe('integration test: add NFT resource', () => { it('accept resource', async () => { const collectionIdBob = await createCollection( api, - Bob, + bob, 'test-metadata', null, 'test-symbol', @@ -313,15 +312,15 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Bob, - Alice, + bob, + alice, collectionIdBob, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Bob, + bob, 'pending', collectionIdBob, nftAlice, @@ -331,13 +330,13 @@ describe('integration test: add NFT resource', () => { thumb, ); - await acceptNftResource(api, Alice, collectionIdBob, nftAlice, resourceId); + await acceptNftResource(api, alice, collectionIdBob, nftAlice, resourceId); }); it('[negative]: unable to accept a non-existing resource', async () => { const collectionIdBob = await createCollection( api, - Bob, + bob, 'test-metadata', null, 'test-symbol', @@ -345,20 +344,20 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Bob, - Alice, + bob, + alice, collectionIdBob, 'nft-metadata', ); - const tx = acceptNftResource(api, Alice, collectionIdBob, nftAlice, nonexistentId); + const tx = acceptNftResource(api, alice, collectionIdBob, nftAlice, nonexistentId); await expectTxFailure(/rmrkCore\.ResourceDoesntExist/, tx); }); it('[negative]: unable to accept a resource by a not-an-NFT-owner user', async () => { const collectionIdBob = await createCollection( api, - Bob, + bob, 'test-metadata', null, 'test-symbol', @@ -366,15 +365,15 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Bob, - Alice, + bob, + alice, collectionIdBob, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Bob, + bob, 'pending', collectionIdBob, nftAlice, @@ -384,7 +383,7 @@ describe('integration test: add NFT resource', () => { thumb, ); - const tx = acceptNftResource(api, Bob, collectionIdBob, nftAlice, resourceId); + const tx = acceptNftResource(api, bob, collectionIdBob, nftAlice, resourceId); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); @@ -392,7 +391,7 @@ describe('integration test: add NFT resource', () => { it('[negative]: unable to accept a resource to a non-target NFT', async () => { const collectionIdBob = await createCollection( api, - Bob, + bob, 'test-metadata', null, 'test-symbol', @@ -400,23 +399,23 @@ describe('integration test: add NFT resource', () => { const nftAlice = await mintNft( api, - Bob, - Alice, + bob, + alice, collectionIdBob, 'nft-metadata', ); const wrongNft = await mintNft( api, - Bob, - Alice, + bob, + alice, collectionIdBob, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Bob, + bob, 'pending', collectionIdBob, nftAlice, @@ -426,13 +425,11 @@ describe('integration test: add NFT resource', () => { thumb, ); - const tx = acceptNftResource(api, Bob, collectionIdBob, wrongNft, resourceId); + const tx = acceptNftResource(api, bob, collectionIdBob, wrongNft, resourceId); await expectTxFailure(/rmrkCore\.ResourceDoesntExist/, tx); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/addTheme.test.ts b/tests/src/rmrk/addTheme.seqtest.ts similarity index 94% rename from tests/src/rmrk/addTheme.test.ts rename to tests/src/rmrk/addTheme.seqtest.ts index dad7b728d9..8f582f66e5 100644 --- a/tests/src/rmrk/addTheme.test.ts +++ b/tests/src/rmrk/addTheme.seqtest.ts @@ -1,15 +1,14 @@ import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; import {createBase, addTheme} from './util/tx'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {getThemeNames} from './util/fetch'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: add Theme to Base', () => { let api: any; before(async function() { api = await getApiConnection(); - await requirePallets(this, [Pallets.RmrkCore]); + await requirePallets(this, [Pallets.RmrkEquip]); }); const alice = '//Alice'; @@ -126,5 +125,5 @@ describe('integration test: add Theme to Base', () => { await expectTxFailure(/rmrkEquip\.PermissionError/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/burnNft.test.ts b/tests/src/rmrk/burnNft.seqtest.ts similarity index 80% rename from tests/src/rmrk/burnNft.test.ts rename to tests/src/rmrk/burnNft.seqtest.ts index 29864a6709..0b45a3fe43 100644 --- a/tests/src/rmrk/burnNft.test.ts +++ b/tests/src/rmrk/burnNft.seqtest.ts @@ -1,8 +1,7 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {NftIdTuple, getChildren} from './util/fetch'; import {burnNft, createCollection, sendNft, mintNft} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; @@ -11,8 +10,8 @@ chai.use(chaiAsPromised); const expect = chai.expect; describe('integration test: burn nft', () => { - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; let api: any; before(async function() { @@ -20,29 +19,30 @@ describe('integration test: burn nft', () => { await requirePallets(this, [Pallets.RmrkCore]); }); + it('burn nft', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { const nftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); - await burnNft(api, Alice, collectionId, nftId); + await burnNft(api, alice, collectionId, nftId); }); }); it('burn nft with children', async () => { const collectionId = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -50,23 +50,23 @@ describe('integration test: burn nft', () => { const parentNftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); const childNftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); const newOwnerNFT: NftIdTuple = [collectionId, parentNftId]; - await sendNft(api, 'sent', Alice, collectionId, childNftId, newOwnerNFT); + await sendNft(api, 'sent', alice, collectionId, childNftId, newOwnerNFT); const childrenBefore = await getChildren(api, collectionId, parentNftId); expect(childrenBefore.length === 1, 'Error: parent NFT should have children') @@ -79,7 +79,7 @@ describe('integration test: burn nft', () => { expect(child.nftId.eq(childNftId), 'Error: invalid child NFT Id') .to.be.true; - await burnNft(api, Alice, collectionId, parentNftId); + await burnNft(api, alice, collectionId, parentNftId); const childrenAfter = await getChildren(api, collectionId, parentNftId); @@ -89,7 +89,7 @@ describe('integration test: burn nft', () => { it('burn child nft', async () => { const collectionId = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -97,23 +97,23 @@ describe('integration test: burn nft', () => { const parentNftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); const childNftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); const newOwnerNFT: NftIdTuple = [collectionId, parentNftId]; - await sendNft(api, 'sent', Alice, collectionId, childNftId, newOwnerNFT); + await sendNft(api, 'sent', alice, collectionId, childNftId, newOwnerNFT); const childrenBefore = await getChildren(api, collectionId, parentNftId); expect(childrenBefore.length === 1, 'Error: parent NFT should have children') @@ -126,7 +126,7 @@ describe('integration test: burn nft', () => { expect(child.nftId.eq(childNftId), 'Error: invalid child NFT Id') .to.be.true; - await burnNft(api, Alice, collectionId, childNftId); + await burnNft(api, alice, collectionId, childNftId); const childrenAfter = await getChildren(api, collectionId, parentNftId); @@ -136,12 +136,12 @@ describe('integration test: burn nft', () => { it('[negative] burn non-existing NFT', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - const tx = burnNft(api, Alice, collectionId, 99999); + const tx = burnNft(api, alice, collectionId, 99999); await expectTxFailure(/rmrkCore\.NoAvailableNftId/, tx); }); }); @@ -149,24 +149,22 @@ describe('integration test: burn nft', () => { it('[negative] burn not an owner NFT user', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { const nftId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); - const tx = burnNft(api, Bob, collectionId, nftId); + const tx = burnNft(api, bob, collectionId, nftId); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/changeCollectionIssuer.test.ts b/tests/src/rmrk/changeCollectionIssuer.seqtest.ts similarity index 68% rename from tests/src/rmrk/changeCollectionIssuer.test.ts rename to tests/src/rmrk/changeCollectionIssuer.seqtest.ts index 6d2dd3714a..aeecec03dd 100644 --- a/tests/src/rmrk/changeCollectionIssuer.test.ts +++ b/tests/src/rmrk/changeCollectionIssuer.seqtest.ts @@ -1,14 +1,13 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import { changeIssuer, createCollection, } from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: collection issuer', () => { - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; let api: any; before(async function() { @@ -16,21 +15,23 @@ describe('integration test: collection issuer', () => { await requirePallets(this, [Pallets.RmrkCore]); }); + + it('change collection issuer', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - await changeIssuer(api, Alice, collectionId, Bob); + await changeIssuer(api, alice, collectionId, bob); }); }); it('[negative] change not an owner NFT collection issuer', async () => { - await createCollection(api, Bob, 'test-metadata', null, 'test-symbol').then(async (collectionId) => { - const tx = changeIssuer(api, Alice, collectionId, Bob); + await createCollection(api, bob, 'test-metadata', null, 'test-symbol').then(async (collectionId) => { + const tx = changeIssuer(api, alice, collectionId, bob); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); }); @@ -38,17 +39,15 @@ describe('integration test: collection issuer', () => { it('[negative] change non-existigit NFT collection issuer', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async () => { - const tx = changeIssuer(api, Alice, 99999, Bob); + const tx = changeIssuer(api, alice, 99999, bob); await expectTxFailure(/rmrkCore\.CollectionUnknown/, tx); }); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/createBase.test.ts b/tests/src/rmrk/createBase.seqtest.ts similarity index 91% rename from tests/src/rmrk/createBase.test.ts rename to tests/src/rmrk/createBase.seqtest.ts index 3f8234b100..c57106645b 100644 --- a/tests/src/rmrk/createBase.test.ts +++ b/tests/src/rmrk/createBase.seqtest.ts @@ -1,12 +1,12 @@ import {getApiConnection} from '../substrate/substrate-api'; +import {requirePallets, Pallets} from './util/helpers'; import {createCollection, createBase} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: create new Base', () => { let api: any; before(async function() { api = await getApiConnection(); - await requirePallets(this, [Pallets.RmrkCore]); + await requirePallets(this, [Pallets.RmrkCore, Pallets.RmrkEquip]); }); const alice = '//Alice'; @@ -84,5 +84,5 @@ describe('integration test: create new Base', () => { ]); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/createCollection.test.ts b/tests/src/rmrk/createCollection.seqtest.ts similarity index 81% rename from tests/src/rmrk/createCollection.test.ts rename to tests/src/rmrk/createCollection.seqtest.ts index c15e0fe65a..90e1fef04a 100644 --- a/tests/src/rmrk/createCollection.test.ts +++ b/tests/src/rmrk/createCollection.seqtest.ts @@ -1,14 +1,16 @@ import {getApiConnection} from '../substrate/substrate-api'; +import {requirePallets, Pallets} from './util/helpers'; import {createCollection} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('Integration test: create new collection', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); + + const alice = '//Alice'; it('create NFT collection', async () => { @@ -19,5 +21,5 @@ describe('Integration test: create new collection', () => { await createCollection(api, alice, 'no-limit-metadata', null, 'no-limit-symbol'); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/deleteCollection.test.ts b/tests/src/rmrk/deleteCollection.seqtest.ts similarity index 67% rename from tests/src/rmrk/deleteCollection.test.ts rename to tests/src/rmrk/deleteCollection.seqtest.ts index 9ee5e40d91..63c97bd7a1 100644 --- a/tests/src/rmrk/deleteCollection.test.ts +++ b/tests/src/rmrk/deleteCollection.seqtest.ts @@ -1,49 +1,46 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, deleteCollection} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: delete collection', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; it('delete NFT collection', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - await deleteCollection(api, Alice, collectionId.toString()); + await deleteCollection(api, alice, collectionId.toString()); }); }); it('[negative] delete non-existing NFT collection', async () => { - const tx = deleteCollection(api, Alice, '99999'); + const tx = deleteCollection(api, alice, '99999'); await expectTxFailure(/rmrkCore\.CollectionUnknown/, tx); }); it('[negative] delete not an owner NFT collection', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - const tx = deleteCollection(api, Bob, collectionId.toString()); + const tx = deleteCollection(api, bob, collectionId.toString()); await expectTxFailure(/rmrkCore.NoPermission/, tx); }); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/equipNft.test.ts b/tests/src/rmrk/equipNft.seqtest.ts similarity index 87% rename from tests/src/rmrk/equipNft.test.ts rename to tests/src/rmrk/equipNft.seqtest.ts index c9242af73a..61c3ed123b 100644 --- a/tests/src/rmrk/equipNft.test.ts +++ b/tests/src/rmrk/equipNft.seqtest.ts @@ -1,8 +1,8 @@ import {ApiPromise} from '@polkadot/api'; import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; -import {getNft, getParts, NftIdTuple} from './util/fetch'; -import {expectTxFailure} from './util/helpers'; +import {getNft, NftIdTuple} from './util/fetch'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import { addNftComposableResource, addNftSlotResource, @@ -13,10 +13,9 @@ import { sendNft, unequipNft, } from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; -const Alice = '//Alice'; -const Bob = '//Bob'; +const alice = '//Alice'; +const bob = '//Bob'; const composableParts: number[] = [5, 2, 7]; const composableSrc = 'test-cmp-src'; @@ -34,7 +33,7 @@ const slotId = 1; async function createTestCollection(api: ApiPromise): Promise { return createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -44,8 +43,8 @@ async function createTestCollection(api: ApiPromise): Promise { async function mintTestNft(api: ApiPromise, collectionId: number): Promise { return await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); @@ -56,13 +55,13 @@ async function mintChildNft(api: ApiPromise, collectionId: number, parentNftId: const parentNFT: NftIdTuple = [collectionId, parentNftId]; - await sendNft(api, 'sent', Alice, collectionId, nftChildId, parentNFT); + await sendNft(api, 'sent', alice, collectionId, nftChildId, parentNFT); return nftChildId; } async function createTestBase(api: ApiPromise): Promise { - return createBase(api, Alice, 'test-base', 'DTBase', [ + return createBase(api, alice, 'test-base', 'DTBase', [ { SlotPart: { id: slotId, @@ -77,7 +76,7 @@ async function createTestBase(api: ApiPromise): Promise { async function addTestComposable(api: ApiPromise, collectionId: number, nftId: number, baseId: number) { await addNftComposableResource( api, - Alice, + alice, 'added', collectionId, nftId, @@ -93,7 +92,7 @@ async function addTestComposable(api: ApiPromise, collectionId: number, nftId: n async function addTestSlot(api: ApiPromise, collectionId: number, nftId: number, baseId: number, slotId: number): Promise { return await addNftSlotResource( api, - Alice, + alice, 'added', collectionId, nftId, @@ -123,9 +122,10 @@ async function checkEquipStatus( describe.skip('integration test: Equip NFT', () => { let api: any; - before(async function() { + + before(async function () { api = await getApiConnection(); - await requirePallets(this, [Pallets.RmrkCore]); + await requirePallets(this, [Pallets.RmrkCore, Pallets.RmrkEquip]); }); it('equip nft', async () => { @@ -141,7 +141,7 @@ describe.skip('integration test: Equip NFT', () => { const equipperNFT: NftIdTuple = [collectionId, nftParentId]; const itemNFT: NftIdTuple = [collectionId, nftChildId]; - await equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + await equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await checkEquipStatus(api, 'equipped', collectionId, nftChildId); }); @@ -159,11 +159,11 @@ describe.skip('integration test: Equip NFT', () => { const equipperNFT: NftIdTuple = [collectionId, nftParentId]; const itemNFT: NftIdTuple = [collectionId, nftChildId]; - await equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + await equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await checkEquipStatus(api, 'equipped', collectionId, nftChildId); - await unequipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + await unequipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await checkEquipStatus(api, 'unequipped', collectionId, nftChildId); }); @@ -172,8 +172,8 @@ describe.skip('integration test: Equip NFT', () => { const nftChildId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); @@ -184,7 +184,7 @@ describe.skip('integration test: Equip NFT', () => { const baseId = 0; const resourceId = 0; - const tx = equipNft(api, Alice, itemNFT, invalidEquipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, alice, itemNFT, invalidEquipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkCore\.NoAvailableNftId/, tx); }); @@ -192,8 +192,8 @@ describe.skip('integration test: Equip NFT', () => { const collectionId = await createTestCollection(api); const nftParentId = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'nft-metadata', ); @@ -207,7 +207,7 @@ describe.skip('integration test: Equip NFT', () => { const resourceId = 0; - const tx = equipNft(api, Alice, invalidItemNFT, equipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, alice, invalidItemNFT, equipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkCore\.NoAvailableNftId/, tx); }); @@ -225,7 +225,7 @@ describe.skip('integration test: Equip NFT', () => { const resourceId = await addTestSlot(api, collectionId, nftChildId, baseId, slotId); - const tx = equipNft(api, Bob, itemNFT, equipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, bob, itemNFT, equipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkEquip\.PermissionError/, tx); }); @@ -243,7 +243,7 @@ describe.skip('integration test: Equip NFT', () => { const equipperNFT: NftIdTuple = [collectionId, nftParentId]; const itemNFT: NftIdTuple = [collectionId, nftGrandchildId]; - const tx = equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkEquip\.MustBeDirectParent/, tx); }); @@ -262,7 +262,7 @@ describe.skip('integration test: Equip NFT', () => { const invalidBaseId = 99999; - const tx = equipNft(api, Alice, itemNFT, equipperNFT, resourceId, invalidBaseId, slotId); + const tx = equipNft(api, alice, itemNFT, equipperNFT, resourceId, invalidBaseId, slotId); await expectTxFailure(/rmrkEquip\.NoResourceForThisBaseFoundOnNft/, tx); }); @@ -280,7 +280,7 @@ describe.skip('integration test: Equip NFT', () => { const itemNFT: NftIdTuple = [collectionId, nftChildId]; const incorrectSlotId = slotId + 1; - const tx = equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, incorrectSlotId); + const tx = equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, incorrectSlotId); await expectTxFailure(/rmrkEquip\.ItemHasNoResourceToEquipThere/, tx); }); @@ -289,7 +289,7 @@ describe.skip('integration test: Equip NFT', () => { const nftParentId = await mintTestNft(api, collectionId); const nftChildId = await mintChildNft(api, collectionId, nftParentId); - const baseId = await createBase(api, Alice, 'test-base', 'DTBase', [ + const baseId = await createBase(api, alice, 'test-base', 'DTBase', [ { FixedPart: { id: slotId, @@ -306,7 +306,7 @@ describe.skip('integration test: Equip NFT', () => { const equipperNFT: NftIdTuple = [collectionId, nftParentId]; const itemNFT: NftIdTuple = [collectionId, nftChildId]; - const tx = equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkEquip\.CantEquipFixedPart/, tx); }); @@ -315,7 +315,7 @@ describe.skip('integration test: Equip NFT', () => { const nftParentId = await mintTestNft(api, collectionId); const nftChildId = await mintChildNft(api, collectionId, nftParentId); - const baseId = await createBase(api, Alice, 'test-base', 'DTBase', [ + const baseId = await createBase(api, alice, 'test-base', 'DTBase', [ { SlotPart: { id: 1, @@ -332,11 +332,9 @@ describe.skip('integration test: Equip NFT', () => { const equipperNFT: NftIdTuple = [collectionId, nftParentId]; const itemNFT: NftIdTuple = [collectionId, nftChildId]; - const tx = equipNft(api, Alice, itemNFT, equipperNFT, resourceId, baseId, slotId); + const tx = equipNft(api, alice, itemNFT, equipperNFT, resourceId, baseId, slotId); await expectTxFailure(/rmrkEquip\.CollectionNotEquippable/, tx); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/getOwnedNfts.test.ts b/tests/src/rmrk/getOwnedNfts.seqtest.ts similarity index 92% rename from tests/src/rmrk/getOwnedNfts.test.ts rename to tests/src/rmrk/getOwnedNfts.seqtest.ts index 039148ae92..8badf9a569 100644 --- a/tests/src/rmrk/getOwnedNfts.test.ts +++ b/tests/src/rmrk/getOwnedNfts.seqtest.ts @@ -1,16 +1,18 @@ import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; +import {requirePallets, Pallets} from './util/helpers'; import {getOwnedNfts} from './util/fetch'; import {mintNft, createCollection} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: get owned NFTs', () => { let api: any; - before(async function() { + + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); + const alice = '//Alice'; it('fetch all NFTs owned by a user', async () => { @@ -74,5 +76,5 @@ describe('integration test: get owned NFTs', () => { }); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/lockCollection.test.ts b/tests/src/rmrk/lockCollection.seqtest.ts similarity index 73% rename from tests/src/rmrk/lockCollection.test.ts rename to tests/src/rmrk/lockCollection.seqtest.ts index 361088c96d..1891985656 100644 --- a/tests/src/rmrk/lockCollection.test.ts +++ b/tests/src/rmrk/lockCollection.seqtest.ts @@ -1,15 +1,14 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, lockCollection, mintNft} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: lock collection', () => { - const Alice = '//Alice'; - const Bob = '//Bob'; - const Max = 5; + const alice = '//Alice'; + const bob = '//Bob'; + const max = 5; let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -17,29 +16,29 @@ describe('integration test: lock collection', () => { it('lock collection', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - await lockCollection(api, Alice, collectionId); + await lockCollection(api, alice, collectionId); }); }); it('[negative] lock non-existing NFT collection', async () => { - const tx = lockCollection(api, Alice, 99999); + const tx = lockCollection(api, alice, 99999); await expectTxFailure(/rmrkCore\.CollectionUnknown/, tx); }); it('[negative] lock not an owner NFT collection issuer', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - const tx = lockCollection(api, Bob, collectionId); + const tx = lockCollection(api, bob, collectionId); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); }); @@ -47,39 +46,39 @@ describe('integration test: lock collection', () => { it('lock collection with minting', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', - Max, + max, 'test-symbol', ).then(async (collectionId) => { for (let i = 0; i < 5; i++) { await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'test-metadata', null, null, ); } - await lockCollection(api, Alice, collectionId, Max); + await lockCollection(api, alice, collectionId, max); }); }); it('[negative] unable to mint NFT inside a locked collection', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', - Max, + max, 'test-symbol', ).then(async (collectionId) => { - await lockCollection(api, Alice, collectionId); + await lockCollection(api, alice, collectionId); const tx = mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'test-metadata', null, @@ -90,11 +89,11 @@ describe('integration test: lock collection', () => { }); it('[negative] unable to mint NFT inside a full collection', async () => { - await createCollection(api, Alice, 'test-metadata', 1, 'test-symbol').then(async (collectionId) => { + await createCollection(api, alice, 'test-metadata', 1, 'test-symbol').then(async (collectionId) => { await mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'test-metadata', null, @@ -102,8 +101,8 @@ describe('integration test: lock collection', () => { ); const tx = mintNft( api, - Alice, - Alice, + alice, + alice, collectionId, 'test-metadata', null, @@ -113,7 +112,5 @@ describe('integration test: lock collection', () => { }); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/mintNft.test.ts b/tests/src/rmrk/mintNft.seqtest.ts similarity index 96% rename from tests/src/rmrk/mintNft.test.ts rename to tests/src/rmrk/mintNft.seqtest.ts index b0e49dae19..6b4d184050 100644 --- a/tests/src/rmrk/mintNft.test.ts +++ b/tests/src/rmrk/mintNft.seqtest.ts @@ -1,17 +1,18 @@ import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; import {getNft} from './util/fetch'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, mintNft} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: mint new NFT', () => { let api: any; - before(async function() { + + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); + const alice = '//Alice'; const bob = '//Bob'; const maxCollectionId = 0xFFFFFFFF; @@ -206,5 +207,5 @@ describe('integration test: mint new NFT', () => { expect(nft.isSome).to.be.false; }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/rejectNft.test.ts b/tests/src/rmrk/rejectNft.seqtest.ts similarity index 92% rename from tests/src/rmrk/rejectNft.test.ts rename to tests/src/rmrk/rejectNft.seqtest.ts index 4721370a5c..5f04388d85 100644 --- a/tests/src/rmrk/rejectNft.test.ts +++ b/tests/src/rmrk/rejectNft.seqtest.ts @@ -6,17 +6,18 @@ import { sendNft, rejectNft, } from './util/tx'; -import {getChildren, NftIdTuple} from './util/fetch'; -import {isNftChildOfAnother, expectTxFailure} from './util/helpers'; -import {Pallets, requirePallets} from '../util/helpers'; +import {NftIdTuple} from './util/fetch'; +import {isNftChildOfAnother, expectTxFailure, requirePallets, Pallets} from './util/helpers'; describe('integration test: reject NFT', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); + + const alice = '//Alice'; const bob = '//Bob'; @@ -89,5 +90,5 @@ describe('integration test: reject NFT', () => { await expectTxFailure(/rmrkCore\.CannotRejectNonPendingNft/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/removeResource.test.ts b/tests/src/rmrk/removeResource.seqtest.ts similarity index 73% rename from tests/src/rmrk/removeResource.test.ts rename to tests/src/rmrk/removeResource.seqtest.ts index 7e78e120f0..31a66106fe 100644 --- a/tests/src/rmrk/removeResource.test.ts +++ b/tests/src/rmrk/removeResource.seqtest.ts @@ -1,29 +1,22 @@ -import {expect} from 'chai'; -import privateKey from '../substrate/privateKey'; -import {executeTransaction, getApiConnection} from '../substrate/substrate-api'; -import {getNft, NftIdTuple} from './util/fetch'; -import {expectTxFailure} from './util/helpers'; +import {getApiConnection} from '../substrate/substrate-api'; +import {NftIdTuple} from './util/fetch'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import { - acceptNft, acceptResourceRemoval, addNftBasicResource, - createBase, - createCollection, - mintNft, removeNftResource, sendNft, + acceptResourceRemoval, addNftBasicResource, createCollection, mintNft, removeNftResource, sendNft, } from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; + describe('Integration test: remove nft resource', () => { let api: any; - let ss58Format: string; before(async function() { api = await getApiConnection(); - ss58Format = api.registry.getChainProperties()!.toJSON().ss58Format; await requirePallets(this, [Pallets.RmrkCore]); }); - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; const src = 'test-basic-src'; const metadata = 'test-basic-metadata'; const license = 'test-basic-license'; @@ -32,7 +25,7 @@ describe('Integration test: remove nft resource', () => { it('deleting a resource directly by the NFT owner', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -40,15 +33,15 @@ describe('Integration test: remove nft resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -58,24 +51,24 @@ describe('Integration test: remove nft resource', () => { thumb, ); - await removeNftResource(api, 'removed', Alice, collectionIdAlice, nftAlice, resourceId); + await removeNftResource(api, 'removed', alice, collectionIdAlice, nftAlice, resourceId); }); it('deleting resources indirectly by the NFT owner', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ); - const parentNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'parent-nft-metadata'); - const childNftId = await mintNft(api, Alice, Alice, collectionIdAlice, 'child-nft-metadata'); + const parentNftId = await mintNft(api, alice, alice, collectionIdAlice, 'parent-nft-metadata'); + const childNftId = await mintNft(api, alice, alice, collectionIdAlice, 'child-nft-metadata'); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, childNftId, @@ -87,15 +80,15 @@ describe('Integration test: remove nft resource', () => { const newOwnerNFT: NftIdTuple = [collectionIdAlice, parentNftId]; - await sendNft(api, 'sent', Alice, collectionIdAlice, childNftId, newOwnerNFT); + await sendNft(api, 'sent', alice, collectionIdAlice, childNftId, newOwnerNFT); - await removeNftResource(api, 'removed', Alice, collectionIdAlice, childNftId, resourceId); + await removeNftResource(api, 'removed', alice, collectionIdAlice, childNftId, resourceId); }); it('deleting a resource by the collection owner', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -103,15 +96,15 @@ describe('Integration test: remove nft resource', () => { const nftBob = await mintNft( api, - Alice, - Bob, + alice, + bob, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'pending', collectionIdAlice, nftBob, @@ -121,14 +114,14 @@ describe('Integration test: remove nft resource', () => { thumb, ); - await removeNftResource(api, 'pending', Alice, collectionIdAlice, nftBob, resourceId); - await acceptResourceRemoval(api, Bob, collectionIdAlice, nftBob, resourceId); + await removeNftResource(api, 'pending', alice, collectionIdAlice, nftBob, resourceId); + await acceptResourceRemoval(api, bob, collectionIdAlice, nftBob, resourceId); }); it('deleting a resource in a nested NFT by the collection owner', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -136,22 +129,22 @@ describe('Integration test: remove nft resource', () => { const parentNftId = await mintNft( api, - Alice, - Bob, + alice, + bob, collectionIdAlice, 'parent-nft-metadata', ); const childNftId = await mintNft( api, - Alice, - Bob, + alice, + bob, collectionIdAlice, 'child-nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'pending', collectionIdAlice, childNftId, @@ -163,16 +156,16 @@ describe('Integration test: remove nft resource', () => { const newOwnerNFT: NftIdTuple = [collectionIdAlice, parentNftId]; - await sendNft(api, 'sent', Bob, collectionIdAlice, childNftId, newOwnerNFT); + await sendNft(api, 'sent', bob, collectionIdAlice, childNftId, newOwnerNFT); - await removeNftResource(api, 'pending', Alice, collectionIdAlice, childNftId, resourceId); - await acceptResourceRemoval(api, Bob, collectionIdAlice, childNftId, resourceId); + await removeNftResource(api, 'pending', alice, collectionIdAlice, childNftId, resourceId); + await acceptResourceRemoval(api, bob, collectionIdAlice, childNftId, resourceId); }); it('[negative]: can\'t delete a resource in a non-existing collection', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -180,15 +173,15 @@ describe('Integration test: remove nft resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -198,14 +191,14 @@ describe('Integration test: remove nft resource', () => { thumb, ); - const tx = removeNftResource(api, 'removed', Alice, 0xFFFFFFFF, nftAlice, resourceId); + const tx = removeNftResource(api, 'removed', alice, 0xFFFFFFFF, nftAlice, resourceId); await expectTxFailure(/rmrkCore\.CollectionUnknown/, tx); // FIXME: inappropriate error message (NoAvailableNftId) }); it('[negative]: only collection owner can delete a resource', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -213,15 +206,15 @@ describe('Integration test: remove nft resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -231,14 +224,14 @@ describe('Integration test: remove nft resource', () => { thumb, ); - const tx = removeNftResource(api, 'removed', Bob, collectionIdAlice, nftAlice, resourceId); + const tx = removeNftResource(api, 'removed', bob, collectionIdAlice, nftAlice, resourceId); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); it('[negative]: cannot delete a resource that does not exist', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -246,20 +239,20 @@ describe('Integration test: remove nft resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); - const tx = removeNftResource(api, 'removed', Alice, collectionIdAlice, nftAlice, 127); + const tx = removeNftResource(api, 'removed', alice, collectionIdAlice, nftAlice, 127); await expectTxFailure(/rmrkCore\.ResourceDoesntExist/, tx); }); it('[negative]: Cannot accept deleting resource without owner attempt do delete it', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -267,15 +260,15 @@ describe('Integration test: remove nft resource', () => { const nftBob = await mintNft( api, - Alice, - Bob, + alice, + bob, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'pending', collectionIdAlice, nftBob, @@ -285,14 +278,14 @@ describe('Integration test: remove nft resource', () => { thumb, ); - const tx = acceptResourceRemoval(api, Bob, collectionIdAlice, nftBob, resourceId); + const tx = acceptResourceRemoval(api, bob, collectionIdAlice, nftBob, resourceId); await expectTxFailure(/rmrkCore\.ResourceNotPending/, tx); }); it('[negative]: cannot confirm the deletion of a non-existing resource', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -300,20 +293,20 @@ describe('Integration test: remove nft resource', () => { const nftBob = await mintNft( api, - Alice, - Bob, + alice, + bob, collectionIdAlice, 'nft-metadata', ); - const tx = acceptResourceRemoval(api, Bob, collectionIdAlice, nftBob, 127); + const tx = acceptResourceRemoval(api, bob, collectionIdAlice, nftBob, 127); await expectTxFailure(/rmrkCore\.ResourceDoesntExist/, tx); }); it('[negative]: Non-owner user cannot confirm the deletion of resource', async () => { const collectionIdAlice = await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', @@ -321,15 +314,15 @@ describe('Integration test: remove nft resource', () => { const nftAlice = await mintNft( api, - Alice, - Alice, + alice, + alice, collectionIdAlice, 'nft-metadata', ); const resourceId = await addNftBasicResource( api, - Alice, + alice, 'added', collectionIdAlice, nftAlice, @@ -339,11 +332,9 @@ describe('Integration test: remove nft resource', () => { thumb, ); - const tx = acceptResourceRemoval(api, Bob, collectionIdAlice, nftAlice, resourceId); + const tx = acceptResourceRemoval(api, bob, collectionIdAlice, nftAlice, resourceId); await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/rmrkIsolation.seqtest.ts b/tests/src/rmrk/rmrkIsolation.seqtest.ts new file mode 100644 index 0000000000..aa32205f46 --- /dev/null +++ b/tests/src/rmrk/rmrkIsolation.seqtest.ts @@ -0,0 +1,225 @@ +import {executeTransaction} from '../substrate/substrate-api'; +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, expect, usingPlaygrounds, Pallets, requirePalletsOrSkip} from '../util'; +import {UniqueHelper} from '../util/playgrounds/unique'; + +let alice: IKeyringPair; +let bob: IKeyringPair; + +async function createRmrkCollection(helper: UniqueHelper, sender: IKeyringPair): Promise<{uniqueId: number, rmrkId: number}> { + const result = await helper.executeExtrinsic(sender, 'api.tx.rmrkCore.createCollection', ['metadata', null, 'symbol'], true); + + const uniqueId = helper.util.extractCollectionIdFromCreationResult(result); + + let rmrkId = null; + result.result.events.forEach(({event: {data, method, section}}) => { + if ((section === 'rmrkCore') && (method === 'CollectionCreated')) { + rmrkId = parseInt(data[1].toString(), 10); + } + }); + + if (rmrkId === null) { + throw Error('No rmrkCore.CollectionCreated event was found!'); + } + + return { + uniqueId, + rmrkId, + }; +} + +async function createRmrkNft(helper: UniqueHelper, sender: IKeyringPair, collectionId: number): Promise { + const result = await helper.executeExtrinsic( + sender, + 'api.tx.rmrkCore.mintNft', + [ + sender.address, + collectionId, + sender.address, + null, + 'nft-metadata', + true, + null, + ], + true, + ); + + let rmrkNftId = null; + result.result.events.forEach(({event: {data, method, section}}) => { + if ((section === 'rmrkCore') && (method === 'NftMinted')) { + rmrkNftId = parseInt(data[2].toString(), 10); + } + }); + + if (rmrkNftId === null) { + throw Error('No rmrkCore.NftMinted event was found!'); + } + + return rmrkNftId; +} + +describe('RMRK External Integration Test', async () => { + before(async function() { + await usingPlaygrounds(async (_helper, privateKey) => { + alice = await privateKey('//Alice'); + }); + }); + + itSub.ifWithPallets('Creates a new RMRK collection that is mapped to a different ID and is tagged as external', [Pallets.RmrkCore], async ({helper}) => { + // throw away collection to bump last Unique collection ID to test ID mapping + await helper.nft.mintCollection(alice, {tokenPrefix: 'unqt'}); + + const collectionIds = await createRmrkCollection(helper, alice); + + expect(collectionIds.rmrkId).to.be.lessThan(collectionIds.uniqueId, 'collection ID mapping'); + + const collection = (await helper.nft.getCollectionObject(collectionIds.uniqueId).getData())!; // (await getDetailedCollectionInfo(api, collectionIds.uniqueId))!; + expect(collection.raw.readOnly, 'tagged external').to.be.true; + }); +}); + +describe('Negative Integration Test: External Collections, Internal Ops', async () => { + let uniqueCollectionId: number; + let rmrkCollectionId: number; + let rmrkNftId: number; + let normalizedAlice: {Substrate: string}; + + before(async function() { + await usingPlaygrounds(async (helper, privateKey) => { + alice = await privateKey('//Alice'); + bob = await privateKey('//Bob'); + normalizedAlice = {Substrate: helper.address.normalizeSubstrateToChainFormat(alice.address)}; + + requirePalletsOrSkip(this, helper, [Pallets.RmrkCore]); + + const collectionIds = await createRmrkCollection(helper, alice); + uniqueCollectionId = collectionIds.uniqueId; + rmrkCollectionId = collectionIds.rmrkId; + + rmrkNftId = await createRmrkNft(helper, alice, rmrkCollectionId); + }); + }); + + itSub.ifWithPallets('[Negative] Forbids Unique operations with an external collection, handled by dispatch_call', [Pallets.RmrkCore], async ({helper}) => { + // Collection item creation + + await expect(helper.nft.mintToken(alice, {collectionId: uniqueCollectionId, owner: {Substrate: alice.address}}), 'creating item') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + const txCreateMultipleItems = helper.getApi().tx.unique.createMultipleItems(uniqueCollectionId, normalizedAlice, [{NFT: {}}, {NFT: {}}]); + await expect(executeTransaction(helper.getApi(), alice, txCreateMultipleItems), 'creating multiple') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.mintMultipleTokens(alice, uniqueCollectionId, [{owner: {Substrate: alice.address}}]), 'creating multiple ex') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // Collection properties + + await expect(helper.nft.setProperties(alice, uniqueCollectionId, [{key: 'a', value: '1'}, {key: 'b'}]), 'setting collection properties') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.deleteProperties(alice, uniqueCollectionId, ['a']), 'deleting collection properties') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.setTokenPropertyPermissions(alice, uniqueCollectionId, [{key: 'a', permission: {mutable: true}}]), 'setting property permissions') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // NFT + + await expect(helper.nft.burnToken(alice, uniqueCollectionId, rmrkNftId, 1n), 'burning') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.burnTokenFrom(alice, uniqueCollectionId, rmrkNftId, {Substrate: alice.address}, 1n), 'burning-from') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.transferToken(alice, uniqueCollectionId, rmrkNftId, {Substrate: bob.address}), 'transferring') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.approveToken(alice, uniqueCollectionId, rmrkNftId, {Substrate: bob.address}), 'approving') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.transferTokenFrom(alice, uniqueCollectionId, rmrkNftId, {Substrate: alice.address}, {Substrate: bob.address}), 'transferring-from') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // NFT properties + + await expect(helper.nft.setTokenProperties(alice, uniqueCollectionId, rmrkNftId, [{key: 'a', value: '2'}]), 'setting token properties') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.deleteTokenProperties(alice, uniqueCollectionId, rmrkNftId, ['a'])) + .to.be.rejectedWith(/common\.CollectionIsExternal/); + }); + + itSub.ifWithPallets('[Negative] Forbids Unique collection operations with an external collection', [Pallets.RmrkCore], async ({helper}) => { + await expect(helper.nft.burn(alice, uniqueCollectionId), 'destroying collection') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // Allow list + + await expect(helper.nft.addToAllowList(alice, uniqueCollectionId, {Substrate: bob.address}), 'adding to allow list') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.removeFromAllowList(alice, uniqueCollectionId, {Substrate: bob.address}), 'removing from allowlist') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // Owner / Admin / Sponsor + + await expect(helper.nft.changeOwner(alice, uniqueCollectionId, bob.address), 'changing owner') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.addAdmin(alice, uniqueCollectionId, {Substrate: bob.address}), 'adding admin') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.removeAdmin(alice, uniqueCollectionId, {Substrate: bob.address}), 'removing admin') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.setSponsor(alice, uniqueCollectionId, bob.address), 'setting sponsor') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.confirmSponsorship(alice, uniqueCollectionId), 'confirming sponsor') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.removeSponsor(alice, uniqueCollectionId), 'removing sponsor') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + // Limits / permissions / transfers + + const txSetTransfers = helper.getApi().tx.unique.setTransfersEnabledFlag(uniqueCollectionId, true); + await expect(executeTransaction(helper.getApi(), alice, txSetTransfers), 'setting transfers enabled flag') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.setLimits(alice, uniqueCollectionId, {transfersEnabled: false}), 'setting collection limits') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + + await expect(helper.nft.setPermissions(alice, uniqueCollectionId, {access: 'AllowList'}), 'setting collection permissions') + .to.be.rejectedWith(/common\.CollectionIsExternal/); + }); +}); + +describe('Negative Integration Test: Internal Collections, External Ops', async () => { + let collectionId: number; + let nftId: number; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + alice = await privateKey('//Alice'); + bob = await privateKey('//Bob'); + + const collection = await helper.nft.mintCollection(alice, {tokenPrefix: 'iceo'}); + collectionId = collection.collectionId; + nftId = (await collection.mintToken(alice)).tokenId; + }); + }); + + itSub.ifWithPallets('[Negative] Forbids RMRK operations with the internal collection and NFT (due to the lack of mapping)', [Pallets.RmrkCore], async ({helper}) => { + const api = helper.getApi(); + + const txChangeOwner = api.tx.rmrkCore.changeCollectionIssuer(collectionId, bob.address); + await expect(executeTransaction(api, alice, txChangeOwner), 'changing collection issuer') + .to.be.rejectedWith(/rmrkCore\.CollectionUnknown/); + + const maxBurns = 10; + const txBurnItem = api.tx.rmrkCore.burnNft(collectionId, nftId, maxBurns); + await expect(executeTransaction(api, alice, txBurnItem), 'burning NFT').to.be.rejectedWith(/rmrkCore\.CollectionUnknown/); + }); +}); diff --git a/tests/src/rmrk/sendNft.test.ts b/tests/src/rmrk/sendNft.seqtest.ts similarity index 98% rename from tests/src/rmrk/sendNft.test.ts rename to tests/src/rmrk/sendNft.seqtest.ts index 78a7c65721..9b47ee28b0 100644 --- a/tests/src/rmrk/sendNft.test.ts +++ b/tests/src/rmrk/sendNft.seqtest.ts @@ -2,12 +2,11 @@ import {expect} from 'chai'; import {getApiConnection} from '../substrate/substrate-api'; import {createCollection, mintNft, sendNft} from './util/tx'; import {NftIdTuple} from './util/fetch'; -import {isNftChildOfAnother, expectTxFailure} from './util/helpers'; -import {Pallets, requirePallets} from '../util/helpers'; +import {isNftChildOfAnother, expectTxFailure, requirePallets, Pallets} from './util/helpers'; describe('integration test: send NFT', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -252,5 +251,5 @@ describe('integration test: send NFT', () => { await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/setCollectionProperty.test.ts b/tests/src/rmrk/setCollectionProperty.seqtest.ts similarity index 75% rename from tests/src/rmrk/setCollectionProperty.test.ts rename to tests/src/rmrk/setCollectionProperty.seqtest.ts index f19d5ed382..e1dc113391 100644 --- a/tests/src/rmrk/setCollectionProperty.test.ts +++ b/tests/src/rmrk/setCollectionProperty.seqtest.ts @@ -1,14 +1,13 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, setPropertyCollection} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: set collection property', () => { - const Alice = '//Alice'; - const Bob = '//Bob'; + const alice = '//Alice'; + const bob = '//Bob'; let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -16,16 +15,16 @@ describe('integration test: set collection property', () => { it('set collection property', async () => { await createCollection( api, - Alice, + alice, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { - await setPropertyCollection(api, Alice, collectionId, 'test_key', '42'); - await setPropertyCollection(api, Alice, collectionId, 'test_key', '10'); + await setPropertyCollection(api, alice, collectionId, 'test_key', '42'); + await setPropertyCollection(api, alice, collectionId, 'test_key', '10'); await setPropertyCollection( api, - Alice, + alice, collectionId, 'second_test_key', '111', @@ -36,7 +35,7 @@ describe('integration test: set collection property', () => { it('[negative] set non-existing collection property', async () => { const tx = setPropertyCollection( api, - Alice, + alice, 9999, 'test_key', '42', @@ -47,14 +46,14 @@ describe('integration test: set collection property', () => { it('[negative] set property not an owner NFT collection issuer', async () => { await createCollection( api, - Bob, + bob, 'test-metadata', null, 'test-symbol', ).then(async (collectionId) => { const tx = setPropertyCollection( api, - Alice, + alice, collectionId, 'test_key', '42', @@ -63,7 +62,5 @@ describe('integration test: set collection property', () => { }); }); - after(() => { - api.disconnect(); - }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/setEquippableList.test.ts b/tests/src/rmrk/setEquippableList.seqtest.ts similarity index 94% rename from tests/src/rmrk/setEquippableList.test.ts rename to tests/src/rmrk/setEquippableList.seqtest.ts index 5e6d3a009d..52a5630725 100644 --- a/tests/src/rmrk/setEquippableList.test.ts +++ b/tests/src/rmrk/setEquippableList.seqtest.ts @@ -1,11 +1,10 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, createBase, setEquippableList} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe("integration test: set slot's Equippable List", () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -111,5 +110,5 @@ describe("integration test: set slot's Equippable List", () => { await expectTxFailure(/rmrkEquip\.PartDoesntExist/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/setNftProperty.test.ts b/tests/src/rmrk/setNftProperty.seqtest.ts similarity index 94% rename from tests/src/rmrk/setNftProperty.test.ts rename to tests/src/rmrk/setNftProperty.seqtest.ts index 6b2ec7173a..003cf7d4ee 100644 --- a/tests/src/rmrk/setNftProperty.test.ts +++ b/tests/src/rmrk/setNftProperty.seqtest.ts @@ -1,12 +1,11 @@ import {getApiConnection} from '../substrate/substrate-api'; import {NftIdTuple} from './util/fetch'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {createCollection, mintNft, sendNft, setNftProperty} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: set NFT property', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -85,5 +84,5 @@ describe('integration test: set NFT property', () => { await expectTxFailure(/rmrkCore\.NoPermission/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/setResourcePriorities.test.ts b/tests/src/rmrk/setResourcePriorities.seqtest.ts similarity index 89% rename from tests/src/rmrk/setResourcePriorities.test.ts rename to tests/src/rmrk/setResourcePriorities.seqtest.ts index c425abe68a..f271854044 100644 --- a/tests/src/rmrk/setResourcePriorities.test.ts +++ b/tests/src/rmrk/setResourcePriorities.seqtest.ts @@ -1,11 +1,10 @@ import {getApiConnection} from '../substrate/substrate-api'; -import {expectTxFailure} from './util/helpers'; +import {expectTxFailure, requirePallets, Pallets} from './util/helpers'; import {mintNft, createCollection, setResourcePriorities} from './util/tx'; -import {Pallets, requirePallets} from '../util/helpers'; describe('integration test: set NFT resource priorities', () => { let api: any; - before(async function() { + before(async function () { api = await getApiConnection(); await requirePallets(this, [Pallets.RmrkCore]); }); @@ -45,8 +44,6 @@ describe('integration test: set NFT resource priorities', () => { }); it('[negative] set NFT resource priorities to non-existing NFT', async () => { - const owner = alice; - const collectionId = 0; const maxNftId = 0xFFFFFFFF; @@ -55,5 +52,5 @@ describe('integration test: set NFT resource priorities', () => { await expectTxFailure(/rmrkCore\.NoAvailableNftId/, tx); }); - after(() => { api.disconnect(); }); + after(async() => { await api.disconnect(); }); }); diff --git a/tests/src/rmrk/util/fetch.ts b/tests/src/rmrk/util/fetch.ts index 8122a669b9..d9458ed8df 100644 --- a/tests/src/rmrk/util/fetch.ts +++ b/tests/src/rmrk/util/fetch.ts @@ -1,5 +1,5 @@ import {ApiPromise} from '@polkadot/api'; -import type {Option, Bytes} from '@polkadot/types-codec'; +import type {Option} from '@polkadot/types-codec'; import type { RmrkTraitsCollectionCollectionInfo as Collection, RmrkTraitsNftNftInfo as Nft, diff --git a/tests/src/rmrk/util/helpers.ts b/tests/src/rmrk/util/helpers.ts index c43fccb5a9..3d44f81bb8 100644 --- a/tests/src/rmrk/util/helpers.ts +++ b/tests/src/rmrk/util/helpers.ts @@ -10,6 +10,8 @@ import privateKey from '../../substrate/privateKey'; import {NftIdTuple, getChildren, getOwnedNfts, getCollectionProperties, getNftProperties, getResources} from './fetch'; import chaiAsPromised from 'chai-as-promised'; import chai from 'chai'; +import {getApiConnection} from '../../substrate/substrate-api'; +import {Context} from 'mocha'; chai.use(chaiAsPromised); const expect = chai.expect; @@ -19,6 +21,46 @@ interface TxResult { successData: T | null; } +export enum Pallets { + Inflation = 'inflation', + RmrkCore = 'rmrkcore', + RmrkEquip = 'rmrkequip', + ReFungible = 'refungible', + Fungible = 'fungible', + NFT = 'nonfungible', + Scheduler = 'scheduler', + AppPromotion = 'apppromotion', +} + +let modulesNames: any; +export function getModuleNames(api: ApiPromise): string[] { + if (typeof modulesNames === 'undefined') + modulesNames = api.runtimeMetadata.asLatest.pallets.map(m => m.name.toString().toLowerCase()); + return modulesNames; +} + +export async function missingRequiredPallets(requiredPallets: string[]): Promise { + const api = await getApiConnection(); + const pallets = getModuleNames(api); + await api.disconnect(); + + return requiredPallets.filter(p => !pallets.includes(p)); +} + +export async function requirePallets(mocha: Context, requiredPallets: string[]) { + const missingPallets = await missingRequiredPallets(requiredPallets); + + if (missingPallets.length > 0) { + const skippingTestMsg = `\tSkipping test "${mocha.test?.title}".`; + const missingPalletsMsg = `\tThe following pallets are missing:\n\t- ${missingPallets.join('\n\t- ')}`; + const skipMsg = `${skippingTestMsg}\n${missingPalletsMsg}`; + + console.error('\x1b[38:5:208m%s\x1b[0m', skipMsg); + + mocha.skip(); + } +} + export function makeNftOwner(api: ApiPromise, owner: string | NftIdTuple): NftOwner { const isNftSending = (typeof owner !== 'string'); @@ -107,7 +149,7 @@ export async function isNftChildOfAnother( export function isTxResultSuccess(events: EventRecord[]): boolean { let success = false; - events.forEach(({event: {data, method, section}}) => { + events.forEach(({event: {method}}) => { if (method == 'ExtrinsicSuccess') { success = true; } diff --git a/tests/src/rmrk/util/tx.ts b/tests/src/rmrk/util/tx.ts index fe18443a10..b218784baa 100644 --- a/tests/src/rmrk/util/tx.ts +++ b/tests/src/rmrk/util/tx.ts @@ -230,7 +230,7 @@ export async function mintNft( royaltyOptional, metadata, transferable, - resources, + resources as any, ); const events = await executeTransaction(api, issuer, tx); diff --git a/tests/src/rpc.load.ts b/tests/src/rpc.load.ts deleted file mode 100644 index e16126103a..0000000000 --- a/tests/src/rpc.load.ts +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import usingApi, {submitTransactionAsync} from './substrate/substrate-api'; -import {IKeyringPair} from '@polkadot/types/types'; -import {Abi, BlueprintPromise as Blueprint, CodePromise, ContractPromise as Contract} from '@polkadot/api-contract'; -import {ApiPromise, Keyring} from '@polkadot/api'; -import {findUnusedAddress} from './util/helpers'; -import fs from 'fs'; - -const value = 0; -const gasLimit = 500000n * 1000000n; -const endowment = '1000000000000000'; - -/*eslint no-async-promise-executor: "off"*/ -function deployBlueprint(alice: IKeyringPair, code: CodePromise): Promise { - return new Promise(async (resolve) => { - const unsub = await code - .createBlueprint() - .signAndSend(alice, (result) => { - if (result.status.isInBlock || result.status.isFinalized) { - // here we have an additional field in the result, containing the blueprint - resolve(result.blueprint); - unsub(); - } - }); - }); -} - -/*eslint no-async-promise-executor: "off"*/ -function deployContract(alice: IKeyringPair, blueprint: Blueprint) : Promise { - return new Promise(async (resolve) => { - const unsub = await blueprint.tx - .new(endowment, gasLimit) - .signAndSend(alice, (result) => { - if (result.status.isInBlock || result.status.isFinalized) { - unsub(); - resolve(result); - } - }); - }); -} - -async function prepareDeployer(api: ApiPromise, privateKeyWrapper: ((account: string) => IKeyringPair)) { - // Find unused address - const deployer = await findUnusedAddress(api, privateKeyWrapper); - - // Transfer balance to it - const alice = privateKeyWrapper('//Alice'); - const amount = BigInt(endowment) + 10n**15n; - const tx = api.tx.balances.transfer(deployer.address, amount); - await submitTransactionAsync(alice, tx); - - return deployer; -} - -async function deployLoadTester(api: ApiPromise, privateKeyWrapper: ((account: string) => IKeyringPair)): Promise<[Contract, IKeyringPair]> { - const metadata = JSON.parse(fs.readFileSync('./src/load_test_sc/metadata.json').toString('utf-8')); - const abi = new Abi(metadata); - - const deployer = await prepareDeployer(api, privateKeyWrapper); - - const wasm = fs.readFileSync('./src/load_test_sc/loadtester.wasm'); - - const code = new CodePromise(api, abi, wasm); - - const blueprint = await deployBlueprint(deployer, code); - const contract = (await deployContract(deployer, blueprint))['contract'] as Contract; - - return [contract, deployer]; -} - -async function getScData(contract: Contract, deployer: IKeyringPair) { - const result = await contract.query.get(deployer.address, value, gasLimit); - - if(!result.result.isSuccess) { - throw 'Failed to get value'; - } - return result.result.asSuccess.data; -} - - -describe('RPC Tests', () => { - it('Simple RPC Load Test', async () => { - await usingApi(async api => { - let count = 0; - let hrTime = process.hrtime(); - let microsec1 = hrTime[0] * 1000000 + hrTime[1] / 1000; - let rate = 0; - const checkPoint = 1000; - - /* eslint no-constant-condition: "off" */ - while (true) { - await api.rpc.system.chain(); - count++; - process.stdout.write(`RPC reads: ${count} times at rate ${rate} r/s \r`); - - if (count % checkPoint == 0) { - hrTime = process.hrtime(); - const microsec2 = hrTime[0] * 1000000 + hrTime[1] / 1000; - rate = 1000000*checkPoint/(microsec2 - microsec1); - microsec1 = microsec2; - } - } - }); - }); - - it('Smart Contract RPC Load Test', async () => { - await usingApi(async (api, privateKeyWrapper) => { - - // Deploy smart contract - const [contract, deployer] = await deployLoadTester(api, privateKeyWrapper); - - // Fill smart contract up with data - const bob = privateKeyWrapper('//Bob'); - const tx = contract.tx.bloat(value, gasLimit, 200); - await submitTransactionAsync(bob, tx); - - // Run load test - let count = 0; - let hrTime = process.hrtime(); - let microsec1 = hrTime[0] * 1000000 + hrTime[1] / 1000; - let rate = 0; - const checkPoint = 10; - - /* eslint no-constant-condition: "off" */ - while (true) { - await getScData(contract, deployer); - count++; - process.stdout.write(`SC reads: ${count} times at rate ${rate} r/s \r`); - - if (count % checkPoint == 0) { - hrTime = process.hrtime(); - const microsec2 = hrTime[0] * 1000000 + hrTime[1] / 1000; - rate = 1000000*checkPoint/(microsec2 - microsec1); - microsec1 = microsec2; - } - } - }); - }); - -}); diff --git a/tests/src/rpc.test.ts b/tests/src/rpc.test.ts index 423686cfe5..5fd5a1ff7d 100644 --- a/tests/src/rpc.test.ts +++ b/tests/src/rpc.test.ts @@ -1,12 +1,68 @@ -import {expect} from 'chai'; -import usingApi from './substrate/substrate-api'; -import {createCollectionExpectSuccess, getTokenOwner} from './util/helpers'; - -describe('getTokenOwner', () => { - it('returns None for fungible collection', async () => { - await usingApi(async api => { - const collection = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await expect(getTokenOwner(api, collection, 0)).to.be.rejectedWith(/^owner == null$/); +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {usingPlaygrounds, itSub, expect} from './util'; +import {CrossAccountId} from './util/playgrounds/unique'; + +describe('integration test: RPC methods', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([20n, 10n], donor); }); }); + + itSub('returns None for fungible collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'RPC-1', tokenPrefix: 'RPC'}); + const owner = (await helper.callRpc('api.rpc.unique.tokenOwner', [collection.collectionId, 0])).toJSON() as any; + expect(owner).to.be.null; + }); + + itSub('RPC method tokenOwners for fungible collection and token', async ({helper}) => { + // Set-up a few token owners of all stripes + const ethAcc = {Ethereum: '0x67fb3503a61b284dc83fa96dceec4192db47dc7c'}; + const facelessCrowd = (await helper.arrange.createAccounts([0n, 0n, 0n, 0n, 0n, 0n, 0n], donor)) + .map(i => {return {Substrate: i.address};}); + + const collection = await helper.ft.mintCollection(alice, {name: 'RPC-2', tokenPrefix: 'RPC'}); + // mint some maximum (u128) amounts of tokens possible + await collection.mint(alice, (1n << 128n) - 1n); + + await collection.transfer(alice, {Substrate: bob.address}, 1000n); + await collection.transfer(alice, ethAcc, 900n); + + for (let i = 0; i < facelessCrowd.length; i++) { + await collection.transfer(alice, facelessCrowd[i], 1n); + } + // Set-up over + + const owners = await helper.callRpc('api.rpc.unique.tokenOwners', [collection.collectionId, 0]); + const ids = (owners.toJSON() as any[]).map(CrossAccountId.fromLowerCaseKeys); + + expect(ids).to.deep.include.members([{Substrate: alice.address}, ethAcc, {Substrate: bob.address}, ...facelessCrowd]); + expect(owners.length == 10).to.be.true; + + // Make sure only 10 results are returned with this RPC + const [eleven] = await helper.arrange.createAccounts([0n], donor); + expect(await collection.transfer(alice, {Substrate: eleven.address}, 10n)).to.be.true; + expect((await helper.callRpc('api.rpc.unique.tokenOwners', [collection.collectionId, 0])).length).to.be.equal(10); + }); }); \ No newline at end of file diff --git a/tests/src/setCollectionLimits.test.ts b/tests/src/setCollectionLimits.test.ts index e765ab6c93..9d4aacb7b2 100644 --- a/tests/src/setCollectionLimits.test.ts +++ b/tests/src/setCollectionLimits.test.ts @@ -15,26 +15,8 @@ // along with Unique Network. If not, see . // https://unique-network.readthedocs.io/en/latest/jsapi.html#setchainlimits -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionAsync, submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - createCollectionExpectSuccess, getCreatedCollectionCount, - getCreateItemResult, - setCollectionLimitsExpectFailure, - setCollectionLimitsExpectSuccess, - addCollectionAdminExpectSuccess, - queryCollectionExpectSuccess, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let collectionIdForTesting: number; +import {itSub, usingPlaygrounds, expect} from './util'; const accountTokenOwnershipLimit = 0; const sponsoredDataSize = 0; @@ -42,197 +24,177 @@ const sponsorTransferTimeout = 1; const tokenLimit = 10; describe('setCollectionLimits positive', () => { - let tx; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - collectionIdForTesting = await createCollectionExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}}); - }); - }); - it('execute setCollectionLimits with predefined params ', async () => { - await usingApi(async (api: ApiPromise) => { - tx = api.tx.unique.setCollectionLimits( - collectionIdForTesting, - { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredDataSize: sponsoredDataSize, - tokenLimit: tokenLimit, - sponsorTransferTimeout, - ownerCanTransfer: true, - ownerCanDestroy: true, - }, - ); - const events = await submitTransactionAsync(alice, tx); - const result = getCreateItemResult(events); - - // get collection limits defined previously - const collectionInfo = await queryCollectionExpectSuccess(api, collectionIdForTesting); - - // tslint:disable-next-line:no-unused-expression - expect(result.success).to.be.true; - expect(collectionInfo.limits.accountTokenOwnershipLimit.unwrap().toNumber()).to.be.equal(accountTokenOwnershipLimit); - expect(collectionInfo.limits.sponsoredDataSize.unwrap().toNumber()).to.be.equal(sponsoredDataSize); - expect(collectionInfo.limits.tokenLimit.unwrap().toNumber()).to.be.equal(tokenLimit); - expect(collectionInfo.limits.sponsorTransferTimeout.unwrap().toNumber()).to.be.equal(sponsorTransferTimeout); - expect(collectionInfo.limits.ownerCanTransfer.unwrap().toJSON()).to.be.true; - expect(collectionInfo.limits.ownerCanDestroy.unwrap().toJSON()).to.be.true; + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([20n, 10n], donor); }); }); - it('Set the same token limit twice', async () => { - await usingApi(async (api: ApiPromise) => { + itSub('execute setCollectionLimits with predefined params', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-1', tokenPrefix: 'SCL'}); - const collectionLimits = { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredMintSize: sponsoredDataSize, - tokenLimit: tokenLimit, + await collection.setLimits( + alice, + { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, sponsorTransferTimeout, ownerCanTransfer: true, ownerCanDestroy: true, - }; - - // The first time - const tx1 = api.tx.unique.setCollectionLimits( - collectionIdForTesting, - collectionLimits, - ); - const events1 = await submitTransactionAsync(alice, tx1); - const result1 = getCreateItemResult(events1); - expect(result1.success).to.be.true; - const collectionInfo1 = await queryCollectionExpectSuccess(api, collectionIdForTesting); - expect(collectionInfo1.limits.tokenLimit.unwrap().toNumber()).to.be.equal(tokenLimit); - - // The second time - const tx2 = api.tx.unique.setCollectionLimits( - collectionIdForTesting, - collectionLimits, - ); - const events2 = await submitTransactionAsync(alice, tx2); - const result2 = getCreateItemResult(events2); - expect(result2.success).to.be.true; - const collectionInfo2 = await queryCollectionExpectSuccess(api, collectionIdForTesting); - expect(collectionInfo2.limits.tokenLimit.unwrap().toNumber()).to.be.equal(tokenLimit); - }); + }, + ); + + // get collection limits defined previously + const collectionInfo = await collection.getEffectiveLimits(); + + expect(collectionInfo.accountTokenOwnershipLimit).to.be.equal(accountTokenOwnershipLimit); + expect(collectionInfo.sponsoredDataSize).to.be.equal(sponsoredDataSize); + expect(collectionInfo.tokenLimit).to.be.equal(tokenLimit); + expect(collectionInfo.sponsorTransferTimeout).to.be.equal(sponsorTransferTimeout); + expect(collectionInfo.ownerCanTransfer).to.be.true; + expect(collectionInfo.ownerCanDestroy).to.be.true; }); - it('execute setCollectionLimits from admin collection', async () => { - await addCollectionAdminExpectSuccess(alice, collectionIdForTesting, bob.address); - await usingApi(async (api: ApiPromise) => { - tx = api.tx.unique.setCollectionLimits( - collectionIdForTesting, - { - accountTokenOwnershipLimit, - sponsoredDataSize, - // sponsoredMintSize, - tokenLimit, - }, - ); - await expect(submitTransactionAsync(bob, tx)).to.be.not.rejected; - }); + itSub('Set the same token limit twice', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-2', tokenPrefix: 'SCL'}); + + const collectionLimits = { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, + sponsorTransferTimeout, + ownerCanTransfer: true, + ownerCanDestroy: true, + }; + + await collection.setLimits(alice, collectionLimits); + + const collectionInfo1 = await collection.getEffectiveLimits(); + + expect(collectionInfo1.tokenLimit).to.be.equal(tokenLimit); + + await collection.setLimits(alice, collectionLimits); + const collectionInfo2 = await collection.getEffectiveLimits(); + expect(collectionInfo2.tokenLimit).to.be.equal(tokenLimit); + }); + + itSub('execute setCollectionLimits from admin collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-3', tokenPrefix: 'SCL'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + + const collectionLimits = { + accountTokenOwnershipLimit, + sponsoredDataSize, + // sponsoredMintSize, + tokenLimit, + }; + + await expect(collection.setLimits(alice, collectionLimits)).to.not.be.rejected; }); }); describe('setCollectionLimits negative', () => { - let tx; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - collectionIdForTesting = await createCollectionExpectSuccess({name: 'A', description: 'B', tokenPrefix: 'C', mode: {type: 'NFT'}}); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([20n, 10n], donor); }); }); - it('execute setCollectionLimits for not exists collection', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionCount = await getCreatedCollectionCount(api); - const nonExistedCollectionId = collectionCount + 1; - tx = api.tx.unique.setCollectionLimits( - nonExistedCollectionId, - { - accountTokenOwnershipLimit, - sponsoredDataSize, - // sponsoredMintSize, - tokenLimit, - }, - ); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); + + itSub('execute setCollectionLimits for not exists collection', async ({helper}) => { + const nonExistentCollectionId = (1 << 32) - 1; + await expect(helper.collection.setLimits( + alice, + nonExistentCollectionId, + { + accountTokenOwnershipLimit, + sponsoredDataSize, + // sponsoredMintSize, + tokenLimit, + }, + )).to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('execute setCollectionLimits from user who is not owner of this collection', async () => { - await usingApi(async (api: ApiPromise) => { - tx = api.tx.unique.setCollectionLimits( - collectionIdForTesting, - { - accountTokenOwnershipLimit, - sponsoredDataSize, - // sponsoredMintSize, - tokenLimit, - }, - ); - await expect(submitTransactionExpectFailAsync(bob, tx)).to.be.rejected; - }); + + itSub('execute setCollectionLimits from user who is not owner of this collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-Neg-1', tokenPrefix: 'SCL'}); + + await expect(collection.setLimits(bob, { + accountTokenOwnershipLimit, + sponsoredDataSize, + // sponsoredMintSize, + tokenLimit, + })).to.be.rejectedWith(/common\.NoPermission/); }); - it('fails when trying to enable OwnerCanTransfer after it was disabled', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredMintSize: sponsoredDataSize, - tokenLimit: tokenLimit, + itSub('fails when trying to enable OwnerCanTransfer after it was disabled', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-Neg-2', tokenPrefix: 'SCL'}); + + await collection.setLimits(alice, { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, sponsorTransferTimeout, ownerCanTransfer: false, ownerCanDestroy: true, }); - await setCollectionLimitsExpectFailure(alice, collectionId, { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredMintSize: sponsoredDataSize, - tokenLimit: tokenLimit, + + await expect(collection.setLimits(alice, { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, sponsorTransferTimeout, ownerCanTransfer: true, ownerCanDestroy: true, - }); + })).to.be.rejectedWith(/common\.OwnerPermissionsCantBeReverted/); }); - it('fails when trying to enable OwnerCanDestroy after it was disabled', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredMintSize: sponsoredDataSize, - tokenLimit: tokenLimit, + itSub('fails when trying to enable OwnerCanDestroy after it was disabled', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-Neg-3', tokenPrefix: 'SCL'}); + + await collection.setLimits(alice, { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, sponsorTransferTimeout, ownerCanTransfer: true, ownerCanDestroy: false, }); - await setCollectionLimitsExpectFailure(alice, collectionId, { + + await expect(collection.setLimits(alice, { + accountTokenOwnershipLimit, + sponsoredDataSize, + tokenLimit, + sponsorTransferTimeout, + ownerCanTransfer: true, + ownerCanDestroy: true, + })).to.be.rejectedWith(/common\.OwnerPermissionsCantBeReverted/); + }); + + itSub('Setting the higher token limit fails', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionLimits-Neg-4', tokenPrefix: 'SCL'}); + + const collectionLimits = { accountTokenOwnershipLimit: accountTokenOwnershipLimit, sponsoredMintSize: sponsoredDataSize, tokenLimit: tokenLimit, sponsorTransferTimeout, ownerCanTransfer: true, ownerCanDestroy: true, - }); - }); - - it('Setting the higher token limit fails', async () => { - await usingApi(async () => { + }; - const collectionId = await createCollectionExpectSuccess(); - const collectionLimits = { - accountTokenOwnershipLimit: accountTokenOwnershipLimit, - sponsoredMintSize: sponsoredDataSize, - tokenLimit: tokenLimit, - sponsorTransferTimeout, - ownerCanTransfer: true, - ownerCanDestroy: true, - }; + // The first time + await collection.setLimits(alice, collectionLimits); - // The first time - await setCollectionLimitsExpectSuccess(alice, collectionId, collectionLimits); - - // The second time - higher token limit - collectionLimits.tokenLimit += 1; - await setCollectionLimitsExpectFailure(alice, collectionId, collectionLimits); - }); + // The second time - higher token limit + collectionLimits.tokenLimit += 1; + await expect(collection.setLimits(alice, collectionLimits)).to.be.rejectedWith(/common\.CollectionTokenLimitExceeded/); }); - }); diff --git a/tests/src/setCollectionSponsor.test.ts b/tests/src/setCollectionSponsor.test.ts index 5606c91977..8c7619a8c0 100644 --- a/tests/src/setCollectionSponsor.test.ts +++ b/tests/src/setCollectionSponsor.test.ts @@ -14,89 +14,106 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import {createCollectionExpectSuccess, - setCollectionSponsorExpectSuccess, - destroyCollectionExpectSuccess, - setCollectionSponsorExpectFailure, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, -} from './util/helpers'; import {IKeyringPair} from '@polkadot/types/types'; - -chai.use(chaiAsPromised); - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; +import {itSub, usingPlaygrounds, expect, Pallets} from './util'; describe('integration test: ext. setCollectionSponsor():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([20n, 10n, 10n], donor); }); }); - it('Set NFT collection sponsor', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); + itSub('Set NFT collection sponsor', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-1-NFT', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: bob.address, + }); }); - it('Set Fungible collection sponsor', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); + + itSub('Set Fungible collection sponsor', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'SetCollectionSponsor-1-FT', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: bob.address, + }); }); - it('Set ReFungible collection sponsor', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); + + itSub.ifWithPallets('Set ReFungible collection sponsor', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'SetCollectionSponsor-1-RFT', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: bob.address, + }); }); - it('Set the same sponsor repeatedly', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); + itSub('Set the same sponsor repeatedly', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-2', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: bob.address, + }); }); - it('Replace collection sponsor', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectSuccess(collectionId, bob.address); - await setCollectionSponsorExpectSuccess(collectionId, charlie.address); + + itSub('Replace collection sponsor', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-3', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(alice, bob.address)).to.be.not.rejected; + await expect(collection.setSponsor(alice, charlie.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: charlie.address, + }); }); - it('Collection admin add sponsor', async () => { - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await setCollectionSponsorExpectSuccess(collectionId, charlie.address, '//Bob'); + + itSub('Collection admin add sponsor', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-4', tokenPrefix: 'SCS'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + await expect(collection.setSponsor(bob, charlie.address)).to.be.not.rejected; + + expect((await collection.getData())?.raw.sponsorship).to.deep.equal({ + Unconfirmed: charlie.address, + }); }); }); describe('(!negative test!) integration test: ext. setCollectionSponsor():', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 5n], donor); }); }); - it('(!negative test!) Add sponsor with a non-owner', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionSponsorExpectFailure(collectionId, bob.address, '//Bob'); + itSub('(!negative test!) Add sponsor with a non-owner', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-Neg-1', tokenPrefix: 'SCS'}); + await expect(collection.setSponsor(bob, bob.address)) + .to.be.rejectedWith(/common\.NoPermission/); }); - it('(!negative test!) Add sponsor to a collection that never existed', async () => { - // Find the collection that never existed - let collectionId = 0; - await usingApi(async (api) => { - collectionId = await getCreatedCollectionCount(api) + 1; - }); - await setCollectionSponsorExpectFailure(collectionId, bob.address); + itSub('(!negative test!) Add sponsor to a collection that never existed', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.collection.setSponsor(alice, collectionId, bob.address)) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('(!negative test!) Add sponsor to a collection that was destroyed', async () => { - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); - await setCollectionSponsorExpectFailure(collectionId, bob.address); + + itSub('(!negative test!) Add sponsor to a collection that was destroyed', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetCollectionSponsor-Neg-2', tokenPrefix: 'SCS'}); + await collection.burn(alice); + await expect(collection.setSponsor(alice, bob.address)) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); }); diff --git a/tests/src/setMintPermission.test.ts b/tests/src/setMintPermission.test.ts deleted file mode 100644 index d5332bef14..0000000000 --- a/tests/src/setMintPermission.test.ts +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import {IKeyringPair} from '@polkadot/types/types'; -import usingApi from './substrate/substrate-api'; -import { - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectFailure, - createItemExpectSuccess, - destroyCollectionExpectSuccess, - enableAllowListExpectSuccess, - findNotExistingCollection, - setMintPermissionExpectFailure, - setMintPermissionExpectSuccess, - addCollectionAdminExpectSuccess, -} from './util/helpers'; - -describe('Integration Test setMintPermission', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('ensure allow-listed non-privileged address can mint tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - - await createItemExpectSuccess(bob, collectionId, 'NFT'); - }); - }); - - it('can be enabled twice', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await setMintPermissionExpectSuccess(alice, collectionId, true); - }); - }); - - it('can be disabled twice', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await setMintPermissionExpectSuccess(alice, collectionId, true); - await setMintPermissionExpectSuccess(alice, collectionId, false); - await setMintPermissionExpectSuccess(alice, collectionId, false); - }); - }); - - it('Collection admin success on set', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - await setMintPermissionExpectSuccess(bob, collectionId, true); - }); - }); -}); - -describe('Negative Integration Test setMintPermission', () => { - let alice: IKeyringPair; - let bob: IKeyringPair; - - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('fails on not existing collection', async () => { - await usingApi(async (api) => { - const nonExistingCollection = await findNotExistingCollection(api); - await setMintPermissionExpectFailure(alice, nonExistingCollection, true); - }); - }); - - it('fails on removed collection', async () => { - await usingApi(async () => { - const removedCollectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await destroyCollectionExpectSuccess(removedCollectionId); - - await setMintPermissionExpectFailure(alice, removedCollectionId, true); - }); - }); - - it('fails when not collection owner tries to set mint status', async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectFailure(bob, collectionId, true); - }); - - it('ensure non-allow-listed non-privileged address can\'t mint tokens', async () => { - await usingApi(async () => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'NFT'}}); - await enableAllowListExpectSuccess(alice, collectionId); - await setMintPermissionExpectSuccess(alice, collectionId, true); - - await createItemExpectFailure(bob, collectionId, 'NFT'); - }); - }); -}); diff --git a/tests/src/setPermissions.test.ts b/tests/src/setPermissions.test.ts new file mode 100644 index 0000000000..9cc412dee6 --- /dev/null +++ b/tests/src/setPermissions.test.ts @@ -0,0 +1,107 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {itSub, usingPlaygrounds, expect} from './util'; + +describe('Integration Test: Set Permissions', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); + }); + }); + + itSub('can all be enabled twice', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetPermissions-1', tokenPrefix: 'SP'}); + expect((await collection.getData())?.raw.permissions.access).to.not.equal('AllowList'); + + await collection.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {collectionAdmin: true, tokenOwner: true, restricted: [1, 2]}}); + await collection.setPermissions(alice, {access: 'AllowList', mintMode: true, nesting: {collectionAdmin: true, tokenOwner: true, restricted: [1, 2]}}); + + const permissions = (await collection.getData())?.raw.permissions; + expect(permissions).to.be.deep.equal({ + access: 'AllowList', + mintMode: true, + nesting: {collectionAdmin: true, tokenOwner: true, restricted: [1, 2]}, + }); + }); + + itSub('can all be disabled twice', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetPermissions-2', tokenPrefix: 'SP'}); + expect((await collection.getData())?.raw.permissions.access).to.equal('Normal'); + + await collection.setPermissions(alice, {access: 'AllowList', nesting: {collectionAdmin: false, tokenOwner: true, restricted: [1, 2]}}); + expect((await collection.getData())?.raw.permissions).to.be.deep.equal({ + access: 'AllowList', + mintMode: false, + nesting: {collectionAdmin: false, tokenOwner: true, restricted: [1, 2]}, + }); + + await collection.setPermissions(alice, {access: 'Normal', mintMode: false, nesting: {}}); + await collection.setPermissions(alice, {access: 'Normal', mintMode: false, nesting: {}}); + expect((await collection.getData())?.raw.permissions).to.be.deep.equal({ + access: 'Normal', + mintMode: false, + nesting: {collectionAdmin: false, tokenOwner: false, restricted: null}, + }); + }); + + itSub('collection admin can set permissions', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetPermissions-2', tokenPrefix: 'SP'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + await collection.setPermissions(bob, {access: 'AllowList', mintMode: true}); + + expect((await collection.getData())?.raw.permissions.access).to.equal('AllowList'); + expect((await collection.getData())?.raw.permissions.mintMode).to.equal(true); + }); +}); + +describe('Negative Integration Test: Set Permissions', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([10n, 10n], donor); + }); + }); + + itSub('fails on not existing collection', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.collection.setPermissions(alice, collectionId, {access: 'AllowList', mintMode: true})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); + + itSub('fails on removed collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetPermissions-Neg-1', tokenPrefix: 'SP'}); + await collection.burn(alice); + + await expect(collection.setPermissions(alice, {access: 'AllowList', mintMode: true})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); + + itSub('fails when non-owner tries to set permissions', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'SetPermissions-Neg-2', tokenPrefix: 'SP'}); + + await expect(collection.setPermissions(bob, {access: 'AllowList', mintMode: true})) + .to.be.rejectedWith(/common\.NoPermission/); + }); +}); \ No newline at end of file diff --git a/tests/src/setPublicAccessMode.test.ts b/tests/src/setPublicAccessMode.test.ts deleted file mode 100644 index d71a812dfe..0000000000 --- a/tests/src/setPublicAccessMode.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -// https://unique-network.readthedocs.io/en/latest/jsapi.html#setschemaversion -import {ApiPromise} from '@polkadot/api'; -import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import usingApi, {submitTransactionExpectFailAsync} from './substrate/substrate-api'; -import { - addToAllowListExpectSuccess, - createCollectionExpectSuccess, - createItemExpectSuccess, - destroyCollectionExpectSuccess, - enablePublicMintingExpectSuccess, - enableAllowListExpectSuccess, - normalizeAccountId, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; - -let alice: IKeyringPair; -let bob: IKeyringPair; - -describe('Integration Test setPublicAccessMode(): ', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); - - it('Run extrinsic with collection id parameters, set the allowlist mode for the collection', async () => { - await usingApi(async () => { - const collectionId: number = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await enablePublicMintingExpectSuccess(alice, collectionId); - await addToAllowListExpectSuccess(alice, collectionId, bob.address); - await createItemExpectSuccess(bob, collectionId, 'NFT', bob.address); - }); - }); - - it('Allowlisted collection limits', async () => { - await usingApi(async (api: ApiPromise) => { - const collectionId = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await enablePublicMintingExpectSuccess(alice, collectionId); - const tx = api.tx.unique.createItem(collectionId, normalizeAccountId(bob.address), 'NFT'); - await expect(submitTransactionExpectFailAsync(bob, tx)).to.be.rejected; - }); - }); -}); - -describe('Negative Integration Test ext. setPublicAccessMode(): ', () => { - it('Set a non-existent collection', async () => { - await usingApi(async (api: ApiPromise) => { - // tslint:disable-next-line: radix - const collectionId = await getCreatedCollectionCount(api) + 1; - const tx = api.tx.unique.setCollectionPermissions(collectionId, {access: 'AllowList'}); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); - }); - - it('Set the collection that has been deleted', async () => { - await usingApi(async (api: ApiPromise) => { - // tslint:disable-next-line: no-bitwise - const collectionId = await createCollectionExpectSuccess(); - await destroyCollectionExpectSuccess(collectionId); - const tx = api.tx.unique.setCollectionPermissions(collectionId, {access: 'AllowList'}); - await expect(submitTransactionExpectFailAsync(alice, tx)).to.be.rejected; - }); - }); - - it('Re-set the list mode already set in quantity', async () => { - await usingApi(async () => { - const collectionId: number = await createCollectionExpectSuccess(); - await enableAllowListExpectSuccess(alice, collectionId); - await enableAllowListExpectSuccess(alice, collectionId); - }); - }); - - it('Execute method not on behalf of the collection owner', async () => { - await usingApi(async (api: ApiPromise) => { - // tslint:disable-next-line: no-bitwise - const collectionId = await createCollectionExpectSuccess(); - const tx = api.tx.unique.setCollectionPermissions(collectionId, {access: 'AllowList'}); - await expect(submitTransactionExpectFailAsync(bob, tx)).to.be.rejected; - }); - }); - - it('setPublicAccessMode by collection admin', async () => { - await usingApi(async (api: ApiPromise) => { - // tslint:disable-next-line: no-bitwise - const collectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, collectionId, bob.address); - const tx = api.tx.unique.setCollectionPermissions(collectionId, {access: 'AllowList'}); - await expect(submitTransactionExpectFailAsync(bob, tx)).to.be.not.rejected; - }); - }); -}); - -describe('Negative Integration Test ext. collection admin setPublicAccessMode(): ', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); - }); -}); diff --git a/tests/src/substrate/get-balance.ts b/tests/src/substrate/get-balance.ts index 807e40c73e..733dede57e 100644 --- a/tests/src/substrate/get-balance.ts +++ b/tests/src/substrate/get-balance.ts @@ -19,7 +19,7 @@ import {AccountInfo} from '@polkadot/types/interfaces/system'; import promisifySubstrate from './promisify-substrate'; import {IKeyringPair} from '@polkadot/types/types'; import {submitTransactionAsync} from './substrate-api'; -import {getGenericResult} from '../util/helpers'; +import {getGenericResult} from '../deprecated-helpers/helpers'; import {expect} from 'chai'; export default async function getBalance(api: ApiPromise, accounts: string[]): Promise> { diff --git a/tests/src/substrate/substrate-api.ts b/tests/src/substrate/substrate-api.ts index 2fba5c1e18..90ecf85b5b 100644 --- a/tests/src/substrate/substrate-api.ts +++ b/tests/src/substrate/substrate-api.ts @@ -25,6 +25,8 @@ import * as defs from '../interfaces/definitions'; import privateKey from './privateKey'; import promisifySubstrate from './promisify-substrate'; +import {SilentConsole} from '../util/playgrounds/unique.dev'; + function defaultApiOptions(): ApiOptions { @@ -39,13 +41,10 @@ function defaultApiOptions(): ApiOptions { extrinsic: {}, payload: {}, }, - CheckMaintenance: { - extrinsic: {}, - payload: {}, - }, }, rpc: { unique: defs.unique.rpc, + appPromotion: defs.appPromotion.rpc, rmrk: defs.rmrk.rpc, eth: { feeHistory: { @@ -79,25 +78,8 @@ export default async function usingApi(action: (api: ApiPromise, priva const api: ApiPromise = new ApiPromise(settings); let result: T = null as unknown as T; - // TODO: Remove, this is temporary: Filter unneeded API output - // (Jaco promised it will be removed in the next version) - const consoleErr = console.error; - const consoleLog = console.log; - const consoleWarn = console.warn; - - const outFn = (printer: any) => (...args: any[]) => { - for (const arg of args) { - if (typeof arg !== 'string') - continue; - if (arg.includes('1000:: Normal connection closure' || arg === 'Normal connection closure')) - return; - } - printer(...args); - }; - - console.error = outFn(consoleErr.bind(console)); - console.log = outFn(consoleLog.bind(console)); - console.warn = outFn(consoleWarn.bind(console)); + const silentConsole = new SilentConsole(); + silentConsole.enable(); try { await promisifySubstrate(api, async () => { @@ -110,9 +92,7 @@ export default async function usingApi(action: (api: ApiPromise, priva })(); } finally { await api.disconnect(); - console.error = consoleErr; - console.log = consoleLog; - console.warn = consoleWarn; + silentConsole.disable(); } return result as T; } @@ -130,6 +110,9 @@ function getTransactionStatus(events: EventRecord[], status: ExtrinsicStatus): T if (status.isBroadcast) { return TransactionStatus.NotReady; } + if (status.isRetracted) { + return TransactionStatus.NotReady; + } if (status.isInBlock || status.isFinalized) { if(events.filter(e => e.event.data.method === 'ExtrinsicFailed').length > 0) { return TransactionStatus.Fail; @@ -168,18 +151,32 @@ export function executeTransaction(api: ApiPromise, sender: IKeyringPair, transa }); } +/** + * @deprecated use `executeTransaction` instead + */ export function submitTransactionAsync(sender: IKeyringPair, transaction: SubmittableExtrinsic): Promise { /* eslint no-async-promise-executor: "off" */ return new Promise(async (resolve, reject) => { try { - await transaction.signAndSend(sender, ({events = [], status}) => { + await transaction.signAndSend(sender, ({events = [], status, dispatchError}) => { const transactionStatus = getTransactionStatus(events, status); if (transactionStatus === TransactionStatus.Success) { resolve(events); } else if (transactionStatus === TransactionStatus.Fail) { - console.log(`Something went wrong with transaction. Status: ${status}`); + let moduleError = null; + + if (dispatchError) { + if (dispatchError.isModule) { + const modErr = dispatchError.asModule; + const errorMeta = dispatchError.registry.findMetaError(modErr); + + moduleError = JSON.stringify(errorMeta, null, 4); + } + } + + console.log(`Something went wrong with transaction. Status: ${status}\nModule error: ${moduleError}`); reject(events); } }); diff --git a/tests/src/transfer.nload.ts b/tests/src/transfer.nload.ts index 4d95eab428..1e22f45af0 100644 --- a/tests/src/transfer.nload.ts +++ b/tests/src/transfer.nload.ts @@ -18,10 +18,24 @@ import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; import usingApi, {submitTransactionAsync} from './substrate/substrate-api'; import waitNewBlocks from './substrate/wait-new-blocks'; -import {findUnusedAddresses} from './util/helpers'; import * as cluster from 'cluster'; import os from 'os'; +async function findUnusedAddress(api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair, seedAddition = ''): Promise { + let bal = 0n; + let unused; + do { + const randomSeed = 'seed' + Math.floor(Math.random() * Math.floor(10000)) + seedAddition; + unused = privateKeyWrapper(`//${randomSeed}`); + bal = (await api.query.system.account(unused.address)).data.free.toBigInt(); + } while (bal !== 0n); + return unused; +} + +function findUnusedAddresses(api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair, amount: number): Promise { + return Promise.all(new Array(amount).fill(null).map(() => findUnusedAddress(api, privateKeyWrapper, '_' + Date.now()))); +} + // Innacurate transfer fee const FEE = 10n ** 8n; diff --git a/tests/src/transfer.test.ts b/tests/src/transfer.test.ts index 2011e5c445..a084363e8b 100644 --- a/tests/src/transfer.test.ts +++ b/tests/src/transfer.test.ts @@ -14,345 +14,314 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import {expect} from 'chai'; -import getBalance from './substrate/get-balance'; -import {default as usingApi, submitTransactionAsync} from './substrate/substrate-api'; -import { - burnItemExpectSuccess, createCollectionExpectSuccess, createItemExpectSuccess, - destroyCollectionExpectSuccess, - findUnusedAddress, - getCreateCollectionResult, - getCreateItemResult, - transferExpectFailure, - transferExpectSuccess, - addCollectionAdminExpectSuccess, - getCreatedCollectionCount, - toSubstrateAddress, - getTokenOwner, - normalizeAccountId, - getBalance as getTokenBalance, - transferFromExpectSuccess, - transferFromExpectFail, -} from './util/helpers'; -import { - subToEth, - itWeb3, -} from './eth/util/helpers'; - -let alice: IKeyringPair; -let bob: IKeyringPair; -let charlie: IKeyringPair; +import {itEth, usingEthPlaygrounds} from './eth/util'; +import {itSub, Pallets, usingPlaygrounds, expect} from './util'; describe('Integration Test Transfer(recipient, collection_id, item_id, value)', () => { + let donor: IKeyringPair; + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); + await usingPlaygrounds(async (helper, privateKey) => { + donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); }); }); - it('Balance transfers and check balance', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const [alicesBalanceBefore, bobsBalanceBefore] = await getBalance(api, [alice.address, bob.address]); - - const transfer = api.tx.balances.transfer(bob.address, 1n); - const events = await submitTransactionAsync(alice, transfer); - const result = getCreateItemResult(events); - // tslint:disable-next-line:no-unused-expression - expect(result.success).to.be.true; - - const [alicesBalanceAfter, bobsBalanceAfter] = await getBalance(api, [alice.address, bob.address]); - - // tslint:disable-next-line:no-unused-expression - expect(alicesBalanceAfter < alicesBalanceBefore).to.be.true; - // tslint:disable-next-line:no-unused-expression - expect(bobsBalanceAfter > bobsBalanceBefore).to.be.true; - }); + itSub('Balance transfers and check balance', async ({helper}) => { + const alicesBalanceBefore = await helper.balance.getSubstrate(alice.address); + const bobsBalanceBefore = await helper.balance.getSubstrate(bob.address); + + expect(await helper.balance.transferToSubstrate(alice, bob.address, 1n)).to.be.true; + + const alicesBalanceAfter = await helper.balance.getSubstrate(alice.address); + const bobsBalanceAfter = await helper.balance.getSubstrate(bob.address); + + expect(alicesBalanceAfter < alicesBalanceBefore).to.be.true; + expect(bobsBalanceAfter > bobsBalanceBefore).to.be.true; }); - it('Inability to pay fees error message is correct', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // Find unused address - const pk = await findUnusedAddress(api, privateKeyWrapper); - - const badTransfer = api.tx.balances.transfer(bob.address, 1n); - // const events = await submitTransactionAsync(pk, badTransfer); - const badTransaction = async () => { - const events = await submitTransactionAsync(pk, badTransfer); - const result = getCreateCollectionResult(events); - // tslint:disable-next-line:no-unused-expression - expect(result.success).to.be.false; - }; - await expect(badTransaction()).to.be.rejectedWith('Inability to pay some fees , e.g. account balance too low'); - }); + itSub('Inability to pay fees error message is correct', async ({helper}) => { + const [zero] = await helper.arrange.createAccounts([0n], donor); + + // console.error = () => {}; + // The following operation throws an error into the console and the logs. Pay it no heed as long as the test succeeds. + await expect(helper.balance.transferToSubstrate(zero, donor.address, 1n)) + .to.be.rejectedWith('Inability to pay some fees , e.g. account balance too low'); }); - it('User can transfer owned token', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await transferExpectSuccess(nftCollectionId, newNftTokenId, alice, bob, 1, 'NFT'); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await transferExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob, 1, 'Fungible'); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await transferExpectSuccess( - reFungibleCollectionId, - newReFungibleTokenId, - alice, - bob, - 100, - 'ReFungible', - ); - }); + itSub('[nft] User can transfer owned token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-1-NFT', description: '', tokenPrefix: 'T'}); + const nft = await collection.mintToken(alice); + + await nft.transfer(alice, {Substrate: bob.address}); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: bob.address}); }); - it('Collection admin can transfer owned token', async () => { - await usingApi(async (api, privateKeyWrapper) => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - await addCollectionAdminExpectSuccess(alice, nftCollectionId, bob.address); - const newNftTokenId = await createItemExpectSuccess(bob, nftCollectionId, 'NFT', bob.address); - await transferExpectSuccess(nftCollectionId, newNftTokenId, bob, alice, 1, 'NFT'); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await addCollectionAdminExpectSuccess(alice, fungibleCollectionId, bob.address); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible', bob.address); - await transferExpectSuccess(fungibleCollectionId, newFungibleTokenId, bob, alice, 1, 'Fungible'); - // reFungible - const reFungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await addCollectionAdminExpectSuccess(alice, reFungibleCollectionId, bob.address); - const newReFungibleTokenId = await createItemExpectSuccess(bob, reFungibleCollectionId, 'ReFungible', bob.address); - await transferExpectSuccess( - reFungibleCollectionId, - newReFungibleTokenId, - bob, - alice, - 100, - 'ReFungible', - ); - }); + itSub('[fungible] User can transfer owned token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-1-FT', description: '', tokenPrefix: 'T'}); + await collection.mint(alice, 10n); + + await collection.transfer(alice, {Substrate: bob.address}, 9n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(9n); + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(1n); + }); + + itSub.ifWithPallets('[refungible] User can transfer owned token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-1-RFT', description: '', tokenPrefix: 'T'}); + const rft = await collection.mintToken(alice, 10n); + + await rft.transfer(alice, {Substrate: bob.address}, 9n); + expect(await rft.getBalance({Substrate: bob.address})).to.be.equal(9n); + expect(await rft.getBalance({Substrate: alice.address})).to.be.equal(1n); + }); + + itSub('[nft] Collection admin can transfer owned token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-2-NFT', description: '', tokenPrefix: 'T'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + + const nft = await collection.mintToken(bob, {Substrate: bob.address}); + await nft.transfer(bob, {Substrate: alice.address}); + + expect(await nft.getOwner()).to.be.deep.equal({Substrate: alice.address}); + }); + + itSub('[fungible] Collection admin can transfer owned token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-2-FT', description: '', tokenPrefix: 'T'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + + await collection.mint(bob, 10n, {Substrate: bob.address}); + await collection.transfer(bob, {Substrate: alice.address}, 1n); + + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(9n); + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(1n); + }); + + itSub.ifWithPallets('[refungible] Collection admin can transfer owned token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-2-RFT', description: '', tokenPrefix: 'T'}); + await collection.addAdmin(alice, {Substrate: bob.address}); + + const rft = await collection.mintToken(bob, 10n, {Substrate: bob.address}); + await rft.transfer(bob, {Substrate: alice.address}, 1n); + + expect(await rft.getBalance({Substrate: bob.address})).to.be.equal(9n); + expect(await rft.getBalance({Substrate: alice.address})).to.be.equal(1n); }); }); describe('Negative Integration Test Transfer(recipient, collection_id, item_id, value)', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob] = await helper.arrange.createAccounts([50n, 10n], donor); }); }); - it('Transfer with not existed collection_id', async () => { - await usingApi(async (api) => { - // nft - const nftCollectionCount = await getCreatedCollectionCount(api); - await transferExpectFailure(nftCollectionCount + 1, 1, alice, bob, 1); - // fungible - const fungibleCollectionCount = await getCreatedCollectionCount(api); - await transferExpectFailure(fungibleCollectionCount + 1, 0, alice, bob, 1); - // reFungible - const reFungibleCollectionCount = await getCreatedCollectionCount(api); - await transferExpectFailure(reFungibleCollectionCount + 1, 1, alice, bob, 1); - }); + + itSub('[nft] Transfer with not existed collection_id', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.nft.transferToken(alice, collectionId, 1, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); + + itSub('[fungible] Transfer with not existed collection_id', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.ft.transfer(alice, collectionId, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); + + itSub.ifWithPallets('[refungible] Transfer with not existed collection_id', [Pallets.ReFungible], async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.rft.transferToken(alice, collectionId, 1, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); + }); + + itSub('[nft] Transfer with deleted collection_id', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-Neg-1-NFT', description: '', tokenPrefix: 'T'}); + const nft = await collection.mintToken(alice); + + await nft.burn(alice); + await collection.burn(alice); + + await expect(nft.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('Transfer with deleted collection_id', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await burnItemExpectSuccess(alice, nftCollectionId, newNftTokenId); - await destroyCollectionExpectSuccess(nftCollectionId); - await transferExpectFailure(nftCollectionId, newNftTokenId, alice, bob, 1); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await burnItemExpectSuccess(alice, fungibleCollectionId, newFungibleTokenId, 10); - await destroyCollectionExpectSuccess(fungibleCollectionId); - await transferExpectFailure(fungibleCollectionId, newFungibleTokenId, alice, bob, 1); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await burnItemExpectSuccess(alice, reFungibleCollectionId, newReFungibleTokenId, 100); - await destroyCollectionExpectSuccess(reFungibleCollectionId); - await transferExpectFailure( - reFungibleCollectionId, - newReFungibleTokenId, - alice, - bob, - 1, - ); + + itSub('[fungible] Transfer with deleted collection_id', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-Neg-1-FT', description: '', tokenPrefix: 'T'}); + await collection.mint(alice, 10n); + + await collection.burnTokens(alice, 10n); + await collection.burn(alice); + + await expect(collection.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('Transfer with not existed item_id', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - await transferExpectFailure(nftCollectionId, 2, alice, bob, 1); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await transferExpectFailure(fungibleCollectionId, 2, alice, bob, 1); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await transferExpectFailure( - reFungibleCollectionId, - 2, - alice, - bob, - 1, - ); + + itSub.ifWithPallets('[refungible] Transfer with deleted collection_id', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-Neg-1-RFT', description: '', tokenPrefix: 'T'}); + const rft = await collection.mintToken(alice, 10n); + + await rft.burn(alice, 10n); + await collection.burn(alice); + + await expect(rft.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); - it('Transfer with deleted item_id', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await burnItemExpectSuccess(alice, nftCollectionId, newNftTokenId, 1); - await transferExpectFailure(nftCollectionId, newNftTokenId, alice, bob, 1); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await burnItemExpectSuccess(alice, fungibleCollectionId, newFungibleTokenId, 10); - await transferExpectFailure(fungibleCollectionId, newFungibleTokenId, alice, bob, 1); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await burnItemExpectSuccess(alice, reFungibleCollectionId, newReFungibleTokenId, 100); - await transferExpectFailure( - reFungibleCollectionId, - newReFungibleTokenId, - alice, - bob, - 1, - ); + + itSub('[nft] Transfer with not existed item_id', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-Neg-2-NFT', description: '', tokenPrefix: 'T'}); + await expect(collection.transferToken(alice, 1, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenNotFound/); }); - it('Transfer with recipient that is not owner', async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await transferExpectFailure(nftCollectionId, newNftTokenId, charlie, bob, 1); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await transferExpectFailure(fungibleCollectionId, newFungibleTokenId, charlie, bob, 1); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await transferExpectFailure( - reFungibleCollectionId, - newReFungibleTokenId, - charlie, - bob, - 1, - ); + + itSub('[fungible] Transfer with not existed item_id', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-Neg-2-FT', description: '', tokenPrefix: 'T'}); + await expect(collection.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenValueTooLow/); }); -}); -describe('Zero value transfer(From)', () => { - before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - }); + itSub.ifWithPallets('[refungible] Transfer with not existed item_id', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-Neg-2-RFT', description: '', tokenPrefix: 'T'}); + await expect(collection.transferToken(alice, 1, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenValueTooLow/); }); - it('NFT', async () => { - await usingApi(async (api: ApiPromise) => { - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); + itSub('[nft] Transfer with deleted item_id', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-Neg-3-NFT', description: '', tokenPrefix: 'T'}); + const nft = await collection.mintToken(alice); - const transferTx = api.tx.unique.transfer(normalizeAccountId(bob), nftCollectionId, newNftTokenId, 0); - await submitTransactionAsync(alice, transferTx); - const address = normalizeAccountId(await getTokenOwner(api, nftCollectionId, newNftTokenId)); + await nft.burn(alice); - expect(toSubstrateAddress(address)).to.be.equal(alice.address); - }); + await expect(nft.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenNotFound/); }); - it('RFT', async () => { - await usingApi(async (api: ApiPromise) => { - const reFungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - const balanceBeforeAlice = await getTokenBalance(api, reFungibleCollectionId, normalizeAccountId(alice), newReFungibleTokenId); - const balanceBeforeBob = await getTokenBalance(api, reFungibleCollectionId, normalizeAccountId(bob), newReFungibleTokenId); + itSub('[fungible] Transfer with deleted item_id', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-Neg-3-FT', description: '', tokenPrefix: 'T'}); + await collection.mint(alice, 10n); - const transferTx = api.tx.unique.transfer(normalizeAccountId(bob), reFungibleCollectionId, newReFungibleTokenId, 0); - await submitTransactionAsync(alice, transferTx); + await collection.burnTokens(alice, 10n); - const balanceAfterAlice = await getTokenBalance(api, reFungibleCollectionId, normalizeAccountId(alice), newReFungibleTokenId); - const balanceAfterBob = await getTokenBalance(api, reFungibleCollectionId, normalizeAccountId(bob), newReFungibleTokenId); + await expect(collection.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + }); - expect((balanceBeforeAlice)).to.be.equal(balanceAfterAlice); - expect((balanceBeforeBob)).to.be.equal(balanceAfterBob); - }); + itSub.ifWithPallets('[refungible] Transfer with deleted item_id', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-Neg-3-RFT', description: '', tokenPrefix: 'T'}); + const rft = await collection.mintToken(alice, 10n); + + await rft.burn(alice, 10n); + + await expect(rft.transfer(alice, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + }); + + itSub('[nft] Transfer with recipient that is not owner', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'Transfer-Neg-4-NFT', description: '', tokenPrefix: 'T'}); + const nft = await collection.mintToken(alice); + + await expect(nft.transfer(bob, {Substrate: bob.address})) + .to.be.rejectedWith(/common\.NoPermission/); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('Fungible', async () => { - await usingApi(async (api: ApiPromise) => { - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - const balanceBeforeAlice = await getTokenBalance(api, fungibleCollectionId, normalizeAccountId(alice), newFungibleTokenId); - const balanceBeforeBob = await getTokenBalance(api, fungibleCollectionId, normalizeAccountId(bob), newFungibleTokenId); + itSub('[fungible] Transfer with recipient that is not owner', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'Transfer-Neg-4-FT', description: '', tokenPrefix: 'T'}); + await collection.mint(alice, 10n); - const transferTx = api.tx.unique.transfer(normalizeAccountId(bob), fungibleCollectionId, newFungibleTokenId, 0); - await submitTransactionAsync(alice, transferTx); + await expect(collection.transfer(bob, {Substrate: bob.address}, 9n)) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + expect(await collection.getBalance({Substrate: bob.address})).to.be.equal(0n); + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(10n); + }); - const balanceAfterAlice = await getTokenBalance(api, fungibleCollectionId, normalizeAccountId(alice), newFungibleTokenId); - const balanceAfterBob = await getTokenBalance(api, fungibleCollectionId, normalizeAccountId(bob), newFungibleTokenId); + itSub.ifWithPallets('[refungible] Transfer with recipient that is not owner', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'Transfer-1-RFT', description: '', tokenPrefix: 'T'}); + const rft = await collection.mintToken(alice, 10n); - expect((balanceBeforeAlice)).to.be.equal(balanceAfterAlice); - expect((balanceBeforeBob)).to.be.equal(balanceAfterBob); - }); + await expect(rft.transfer(bob, {Substrate: bob.address}, 9n)) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + expect(await rft.getBalance({Substrate: bob.address})).to.be.equal(0n); + expect(await rft.getBalance({Substrate: alice.address})).to.be.equal(10n); }); }); describe('Transfers to self (potentially over substrate-evm boundary)', () => { - itWeb3('Transfers to self. In case of same frontend', async ({api, privateKeyWrapper}) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const aliceProxy = subToEth(alice.address); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible', {Substrate: alice.address}); - await transferExpectSuccess(collectionId, tokenId, alice, {Ethereum: aliceProxy}, 10, 'Fungible'); - const balanceAliceBefore = await getTokenBalance(api, collectionId, {Ethereum: aliceProxy}, tokenId); - await transferFromExpectSuccess(collectionId, tokenId, alice, {Ethereum: aliceProxy}, {Ethereum: aliceProxy}, 10, 'Fungible'); - const balanceAliceAfter = await getTokenBalance(api, collectionId, {Ethereum: aliceProxy}, tokenId); - expect(balanceAliceBefore).to.be.eq(balanceAliceAfter); + let donor: IKeyringPair; + + before(async function() { + await usingEthPlaygrounds(async (_, privateKey) => { + donor = await privateKey({filename: __filename}); + }); + }); + + itEth('Transfers to self. In case of same frontend', async ({helper}) => { + const [owner] = await helper.arrange.createAccounts([10n], donor); + const collection = await helper.ft.mintCollection(owner, {}); + await collection.mint(owner, 100n); + + const ownerProxy = helper.address.substrateToEth(owner.address); + + // transfer to own proxy + await collection.transfer(owner, {Ethereum: ownerProxy}, 10n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(90n); + expect(await collection.getBalance({Ethereum: ownerProxy})).to.be.equal(10n); + + // transfer-from own proxy to own proxy again + await collection.transferFrom(owner, {Ethereum: ownerProxy}, {Ethereum: ownerProxy}, 5n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(90n); + expect(await collection.getBalance({Ethereum: ownerProxy})).to.be.equal(10n); }); - itWeb3('Transfers to self. In case of substrate-evm boundary', async ({api, privateKeyWrapper}) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const aliceProxy = subToEth(alice.address); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible', {Substrate: alice.address}); - const balanceAliceBefore = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - await transferExpectSuccess(collectionId, tokenId, alice, {Ethereum: aliceProxy} , 10, 'Fungible'); - await transferFromExpectSuccess(collectionId, tokenId, alice, {Ethereum: aliceProxy}, alice, 10, 'Fungible'); - const balanceAliceAfter = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - expect(balanceAliceBefore).to.be.eq(balanceAliceAfter); + itEth('Transfers to self. In case of substrate-evm boundary', async ({helper}) => { + const [owner] = await helper.arrange.createAccounts([10n], donor); + const collection = await helper.ft.mintCollection(owner, {}); + await collection.mint(owner, 100n); + + const ownerProxy = helper.address.substrateToEth(owner.address); + + // transfer to own proxy + await collection.transfer(owner, {Ethereum: ownerProxy}, 10n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(90n); + expect(await collection.getBalance({Ethereum: ownerProxy})).to.be.equal(10n); + + // transfer-from own proxy to self + await collection.transferFrom(owner, {Ethereum: ownerProxy}, {Substrate: owner.address}, 5n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(95n); + expect(await collection.getBalance({Ethereum: ownerProxy})).to.be.equal(5n); }); - itWeb3('Transfers to self. In case of inside substrate-evm', async ({api, privateKeyWrapper}) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible', {Substrate: alice.address}); - const balanceAliceBefore = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - await transferExpectSuccess(collectionId, tokenId, alice, alice , 10, 'Fungible'); - await transferFromExpectSuccess(collectionId, tokenId, alice, alice, alice, 10, 'Fungible'); - const balanceAliceAfter = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - expect(balanceAliceBefore).to.be.eq(balanceAliceAfter); + itEth('Transfers to self. In case of inside substrate-evm', async ({helper}) => { + const [owner] = await helper.arrange.createAccounts([10n], donor); + const collection = await helper.ft.mintCollection(owner, {}); + await collection.mint(owner, 100n); + + // transfer to self again + await collection.transfer(owner, {Substrate: owner.address}, 10n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(100n); + + // transfer-from self to self again + await collection.transferFrom(owner, {Substrate: owner.address}, {Substrate: owner.address}, 5n); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(100n); }); - itWeb3('Transfers to self. In case of inside substrate-evm when not enought "Fungibles"', async ({api, privateKeyWrapper}) => { - const collectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const tokenId = await createItemExpectSuccess(alice, collectionId, 'Fungible', {Substrate: alice.address}); - const balanceAliceBefore = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - await transferExpectFailure(collectionId, tokenId, alice, alice , 11); - await transferFromExpectFail(collectionId, tokenId, alice, alice, alice, 11); - const balanceAliceAfter = await getTokenBalance(api, collectionId, normalizeAccountId(alice), tokenId); - expect(balanceAliceBefore).to.be.eq(balanceAliceAfter); + itEth('Transfers to self. In case of inside substrate-evm when not enought "Fungibles"', async ({helper}) => { + const [owner] = await helper.arrange.createAccounts([10n], donor); + const collection = await helper.ft.mintCollection(owner, {}); + await collection.mint(owner, 10n); + + // transfer to self again + await expect(collection.transfer(owner, {Substrate: owner.address}, 11n)) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + + // transfer-from self to self again + await expect(collection.transferFrom(owner, {Substrate: owner.address}, {Substrate: owner.address}, 12n)) + .to.be.rejectedWith(/common\.TokenValueTooLow/); + expect(await collection.getBalance({Substrate: owner.address})).to.be.equal(10n); }); }); diff --git a/tests/src/transferFrom.test.ts b/tests/src/transferFrom.test.ts index 3c2dfbba22..630aa68539 100644 --- a/tests/src/transferFrom.test.ts +++ b/tests/src/transferFrom.test.ts @@ -14,27 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Unique Network. If not, see . -import {ApiPromise} from '@polkadot/api'; import {IKeyringPair} from '@polkadot/types/types'; -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {default as usingApi} from './substrate/substrate-api'; -import { - approveExpectFail, - approveExpectSuccess, - createCollectionExpectSuccess, - createFungibleItemExpectSuccess, - createItemExpectSuccess, - getAllowance, - transferFromExpectFail, - transferFromExpectSuccess, - burnItemExpectSuccess, - setCollectionLimitsExpectSuccess, - getCreatedCollectionCount, -} from './util/helpers'; - -chai.use(chaiAsPromised); -const expect = chai.expect; +import {itSub, Pallets, usingPlaygrounds, expect} from './util'; describe('Integration Test transferFrom(from, recipient, collection_id, item_id, value):', () => { let alice: IKeyringPair; @@ -42,67 +23,64 @@ describe('Integration Test transferFrom(from, recipient, collection_id, item_id, let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([20n, 10n, 10n], donor); }); }); - it('Execute the extrinsic and check nftItemList - owner of token', async () => { - await usingApi(async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address); - - await transferFromExpectSuccess(nftCollectionId, newNftTokenId, bob, alice, charlie, 1, 'NFT'); - - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address); - await transferFromExpectSuccess(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 1, 'Fungible'); - - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address, 100); - await transferFromExpectSuccess( - reFungibleCollectionId, - newReFungibleTokenId, - bob, - alice, - charlie, - 100, - 'ReFungible', - ); - }); + itSub('[nft] Execute the extrinsic and check nftItemList - owner of token', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-1', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice); + await nft.approve(alice, {Substrate: bob.address}); + expect(await nft.isApproved({Substrate: bob.address})).to.be.true; + + await nft.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: charlie.address}); + }); + + itSub('[fungible] Execute the extrinsic and check nftItemList - owner of token', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-2', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 10n); + await collection.approveTokens(alice, {Substrate: bob.address}, 7n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(7n); + + await collection.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 6n); + expect(await collection.getBalance({Substrate: charlie.address})).to.be.equal(6n); + expect(await collection.getBalance({Substrate: alice.address})).to.be.equal(4n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(1n); }); - it('Should reduce allowance if value is big', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const alice = privateKeyWrapper('//Alice'); - const bob = privateKeyWrapper('//Bob'); - const charlie = privateKeyWrapper('//Charlie'); + itSub.ifWithPallets('[refungible] Execute the extrinsic and check nftItemList - owner of token', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-3', description: '', tokenPrefix: 'TF'}); + const rft = await collection.mintToken(alice, 10n); + await rft.approve(alice, {Substrate: bob.address}, 7n); + expect(await rft.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(7n); + + await rft.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 6n); + expect(await rft.getBalance({Substrate: charlie.address})).to.be.equal(6n); + expect(await rft.getBalance({Substrate: alice.address})).to.be.equal(4n); + expect(await rft.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(1n); + }); - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createFungibleItemExpectSuccess(alice, fungibleCollectionId, {Value: 500000n}); + itSub('Should reduce allowance if value is big', async ({helper}) => { + // fungible + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-4', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 500000n); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address, 500000n); - await transferFromExpectSuccess(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 500000n, 'Fungible'); - expect(await getAllowance(api, fungibleCollectionId, alice.address, bob.address, newFungibleTokenId)).to.equal(0n); - }); + await collection.approveTokens(alice, {Substrate: bob.address}, 500000n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(500000n); + await collection.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 500000n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.equal(0n); }); - it('can be called by collection owner on non-owned item when OwnerCanTransfer == true', async () => { - const collectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: true}); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); + itSub('can be called by collection owner on non-owned item when OwnerCanTransfer == true', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-5', description: '', tokenPrefix: 'TF'}); + await collection.setLimits(alice, {ownerCanTransfer: true}); - await transferFromExpectSuccess(collectionId, itemId, alice, bob, charlie); + const nft = await collection.mintToken(alice, {Substrate: bob.address}); + await nft.transferFrom(alice, {Substrate: bob.address}, {Substrate: charlie.address}); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: charlie.address}); }); }); @@ -112,218 +90,263 @@ describe('Negative Integration Test transferFrom(from, recipient, collection_id, let charlie: IKeyringPair; before(async () => { - await usingApi(async (api, privateKeyWrapper) => { - alice = privateKeyWrapper('//Alice'); - bob = privateKeyWrapper('//Bob'); - charlie = privateKeyWrapper('//Charlie'); + await usingPlaygrounds(async (helper, privateKey) => { + const donor = await privateKey({filename: __filename}); + [alice, bob, charlie] = await helper.arrange.createAccounts([50n, 10n, 10n], donor); }); }); - it('transferFrom for a collection that does not exist', async () => { - await usingApi(async (api: ApiPromise) => { - // nft - const nftCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(nftCollectionCount + 1, 1, alice, bob); - - await transferFromExpectFail(nftCollectionCount + 1, 1, bob, alice, charlie, 1); - - // fungible - const fungibleCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(fungibleCollectionCount + 1, 0, alice, bob); - - await transferFromExpectFail(fungibleCollectionCount + 1, 0, bob, alice, charlie, 1); - // reFungible - const reFungibleCollectionCount = await getCreatedCollectionCount(api); - await approveExpectFail(reFungibleCollectionCount + 1, 1, alice, bob); - - await transferFromExpectFail(reFungibleCollectionCount + 1, 1, bob, alice, charlie, 1); - }); + itSub('transferFrom for a collection that does not exist', async ({helper}) => { + const collectionId = (1 << 32) - 1; + await expect(helper.collection.approveToken(alice, collectionId, 0, {Substrate: bob.address}, 1n)) + .to.be.rejectedWith(/common\.CollectionNotFound/); + await expect(helper.collection.transferTokenFrom(bob, collectionId, 0, {Substrate: alice.address}, {Substrate: bob.address}, 1n)) + .to.be.rejectedWith(/common\.CollectionNotFound/); }); - /* it('transferFrom for a collection that was destroyed', async () => { - await usingApi(async (api: ApiPromise) => { + /* itSub('transferFrom for a collection that was destroyed', async ({helper}) => { this test copies approve negative test - }); }); */ - /* it('transferFrom a token that does not exist', async () => { - await usingApi(async (api: ApiPromise) => { - this test copies approve negative test - }); + /* itSub('transferFrom a token that does not exist', async ({helper}) => { + this test copies approve negative test }); */ - /* it('transferFrom a token that was deleted', async () => { - await usingApi(async (api: ApiPromise) => { - this test copies approve negative test - }); + /* itSub('transferFrom a token that was deleted', async ({helper}) => { + this test copies approve negative test }); */ - it('transferFrom for not approved address', async () => { - await usingApi(async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - - await transferFromExpectFail(nftCollectionId, newNftTokenId, bob, alice, charlie, 1); - - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 1); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await transferFromExpectFail( - reFungibleCollectionId, - newReFungibleTokenId, - bob, - alice, - charlie, - 1, - ); - }); + itSub('[nft] transferFrom for not approved address', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-1', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice); + + await expect(nft.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('transferFrom incorrect token count', async () => { - await usingApi(async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address); - - await transferFromExpectFail(nftCollectionId, newNftTokenId, bob, alice, charlie, 2); - - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 2); - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address); - await transferFromExpectFail( - reFungibleCollectionId, - newReFungibleTokenId, - bob, - alice, - charlie, - 2, - ); - }); + itSub('[fungible] transferFrom for not approved address', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-Neg-1', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 10n); + + await expect(collection.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 5n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await collection.getBalance({Substrate: alice.address})).to.be.deep.equal(10n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await collection.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); }); - it('execute transferFrom from account that is not owner of collection', async () => { - await usingApi(async (api, privateKeyWrapper) => { - const dave = privateKeyWrapper('//Dave'); - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - try { - await approveExpectFail(nftCollectionId, newNftTokenId, dave, bob); - await transferFromExpectFail(nftCollectionId, newNftTokenId, dave, alice, charlie, 1); - } catch (e) { - // tslint:disable-next-line:no-unused-expression - expect(e).to.be.exist; - } - - // await transferFromExpectFail(nftCollectionId, newNftTokenId, Dave, Alice, Charlie, 1); - - // fungible - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - try { - await approveExpectFail(fungibleCollectionId, newFungibleTokenId, dave, bob); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, dave, alice, charlie, 1); - } catch (e) { - // tslint:disable-next-line:no-unused-expression - expect(e).to.be.exist; - } - // reFungible - const reFungibleCollectionId = await - createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - try { - await approveExpectFail(reFungibleCollectionId, newReFungibleTokenId, dave, bob); - await transferFromExpectFail(reFungibleCollectionId, newReFungibleTokenId, dave, alice, charlie, 1); - } catch (e) { - // tslint:disable-next-line:no-unused-expression - expect(e).to.be.exist; - } - }); + itSub.ifWithPallets('[refungible] transferFrom for not approved address', [Pallets.ReFungible], async({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-Neg-3', description: '', tokenPrefix: 'TF'}); + const rft = await collection.mintToken(alice, 10n); + + await expect(rft.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address})) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await rft.getBalance({Substrate: alice.address})).to.be.deep.equal(10n); + expect(await rft.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await rft.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); }); - it('transferFrom burnt token before approve NFT', async () => { - await usingApi(async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - await setCollectionLimitsExpectSuccess(alice, nftCollectionId, {ownerCanTransfer: true}); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await burnItemExpectSuccess(alice, nftCollectionId, newNftTokenId, 1); - await approveExpectFail(nftCollectionId, newNftTokenId, alice, bob); - await transferFromExpectFail(nftCollectionId, newNftTokenId, bob, alice, charlie, 1); - }); + + itSub('[nft] transferFrom incorrect token count', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-4', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice); + + await nft.approve(alice, {Substrate: bob.address}); + expect(await nft.isApproved({Substrate: bob.address})).to.be.true; + + await expect(helper.collection.transferTokenFrom( + bob, + collection.collectionId, + nft.tokenId, + {Substrate: alice.address}, + {Substrate: charlie.address}, + 2n, + )).to.be.rejectedWith(/nonfungible\.NonfungibleItemsHaveNoAmount/); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('transferFrom burnt token before approve Fungible', async () => { - await usingApi(async () => { - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - await setCollectionLimitsExpectSuccess(alice, fungibleCollectionId, {ownerCanTransfer: true}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await burnItemExpectSuccess(alice, fungibleCollectionId, newFungibleTokenId, 10); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 1); - }); + itSub('[fungible] transferFrom incorrect token count', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-Neg-5', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 10n); + + await collection.approveTokens(alice, {Substrate: bob.address}, 2n); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(2n); + + await expect(collection.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 5n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await collection.getBalance({Substrate: alice.address})).to.be.deep.equal(10n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await collection.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); }); - it('transferFrom burnt token before approve ReFungible', async () => { - await usingApi(async () => { - const reFungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - await setCollectionLimitsExpectSuccess(alice, reFungibleCollectionId, {ownerCanTransfer: true}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await burnItemExpectSuccess(alice, reFungibleCollectionId, newReFungibleTokenId, 100); - await approveExpectFail(reFungibleCollectionId, newReFungibleTokenId, alice, bob); - await transferFromExpectFail(reFungibleCollectionId, newReFungibleTokenId, bob, alice, charlie, 1); - }); + itSub.ifWithPallets('[refungible] transferFrom incorrect token count', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-Neg-6', description: '', tokenPrefix: 'TF'}); + const rft = await collection.mintToken(alice, 10n); + + await rft.approve(alice, {Substrate: bob.address}, 5n); + expect(await rft.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(5n); + + await expect(rft.transferFrom(bob, {Substrate: alice.address}, {Substrate: charlie.address}, 7n)) + .to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await rft.getBalance({Substrate: alice.address})).to.be.deep.equal(10n); + expect(await rft.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await rft.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); }); - it('transferFrom burnt token after approve NFT', async () => { - await usingApi(async () => { - // nft - const nftCollectionId = await createCollectionExpectSuccess(); - const newNftTokenId = await createItemExpectSuccess(alice, nftCollectionId, 'NFT'); - await approveExpectSuccess(nftCollectionId, newNftTokenId, alice, bob.address); - await burnItemExpectSuccess(alice, nftCollectionId, newNftTokenId, 1); - await transferFromExpectFail(nftCollectionId, newNftTokenId, bob, alice, charlie, 1); - }); + itSub('[nft] execute transferFrom from account that is not owner of collection', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-7', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice); + + await expect(nft.approve(charlie, {Substrate: bob.address})).to.be.rejectedWith(/common\.CantApproveMoreThanOwned/); + expect(await nft.isApproved({Substrate: bob.address})).to.be.false; + + await expect(nft.transferFrom( + charlie, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await nft.getOwner()).to.be.deep.equal({Substrate: alice.address}); }); - it('transferFrom burnt token after approve Fungible', async () => { - await usingApi(async () => { - const fungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'Fungible', decimalPoints: 0}}); - const newFungibleTokenId = await createItemExpectSuccess(alice, fungibleCollectionId, 'Fungible'); - await approveExpectSuccess(fungibleCollectionId, newFungibleTokenId, alice, bob.address); - await burnItemExpectSuccess(alice, fungibleCollectionId, newFungibleTokenId, 10); - await transferFromExpectFail(fungibleCollectionId, newFungibleTokenId, bob, alice, charlie, 1); - }); + itSub('[fungible] execute transferFrom from account that is not owner of collection', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-Neg-8', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 10000n); + + await expect(collection.approveTokens(charlie, {Substrate: bob.address}, 1n)).to.be.rejectedWith(/common\.CantApproveMoreThanOwned/); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(0n); + expect(await collection.getApprovedTokens({Substrate: charlie.address}, {Substrate: bob.address})).to.be.eq(0n); + + await expect(collection.transferFrom( + charlie, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await collection.getBalance({Substrate: alice.address})).to.be.deep.equal(10000n); + expect(await collection.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await collection.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); }); - it('transferFrom burnt token after approve ReFungible', async () => { - await usingApi(async () => { - const reFungibleCollectionId = await createCollectionExpectSuccess({mode: {type: 'ReFungible'}}); - const newReFungibleTokenId = await createItemExpectSuccess(alice, reFungibleCollectionId, 'ReFungible'); - await approveExpectSuccess(reFungibleCollectionId, newReFungibleTokenId, alice, bob.address); - await burnItemExpectSuccess(alice, reFungibleCollectionId, newReFungibleTokenId, 100); - await transferFromExpectFail(reFungibleCollectionId, newReFungibleTokenId, bob, alice, charlie, 1); - }); + itSub.ifWithPallets('[refungible] execute transferFrom from account that is not owner of collection', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-Neg-9', description: '', tokenPrefix: 'TF'}); + const rft = await collection.mintToken(alice, 10000n); + + await expect(rft.approve(charlie, {Substrate: bob.address}, 1n)).to.be.rejectedWith(/common\.CantApproveMoreThanOwned/); + expect(await rft.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(0n); + expect(await rft.getApprovedPieces({Substrate: charlie.address}, {Substrate: bob.address})).to.be.eq(0n); + + await expect(rft.transferFrom( + charlie, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + expect(await rft.getBalance({Substrate: alice.address})).to.be.deep.equal(10000n); + expect(await rft.getBalance({Substrate: bob.address})).to.be.deep.equal(0n); + expect(await rft.getBalance({Substrate: charlie.address})).to.be.deep.equal(0n); + }); + + itSub('transferFrom burnt token before approve NFT', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-10', description: '', tokenPrefix: 'TF'}); + await collection.setLimits(alice, {ownerCanTransfer: true}); + const nft = await collection.mintToken(alice); + + await nft.burn(alice); + await expect(nft.approve(alice, {Substrate: bob.address})).to.be.rejectedWith(/common\.TokenNotFound/); + + await expect(nft.transferFrom( + bob, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + }); + + itSub('transferFrom burnt token before approve Fungible', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-Neg-11', description: '', tokenPrefix: 'TF'}); + await collection.setLimits(alice, {ownerCanTransfer: true}); + await collection.mint(alice, 10n); + + await collection.burnTokens(alice, 10n); + await expect(collection.approveTokens(alice, {Substrate: bob.address})).to.be.not.rejected; + + await expect(collection.transferFrom( + alice, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.TokenValueTooLow/); + }); + + itSub.ifWithPallets('transferFrom burnt token before approve ReFungible', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-Neg-12', description: '', tokenPrefix: 'TF'}); + await collection.setLimits(alice, {ownerCanTransfer: true}); + const rft = await collection.mintToken(alice, 10n); + + await rft.burn(alice, 10n); + await expect(rft.approve(alice, {Substrate: bob.address})).to.be.rejectedWith(/common\.CantApproveMoreThanOwned/); + + await expect(rft.transferFrom( + alice, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.TokenValueTooLow/); }); - it('fails when called by collection owner on non-owned item when OwnerCanTransfer == false', async () => { - const collectionId = await createCollectionExpectSuccess(); - const itemId = await createItemExpectSuccess(alice, collectionId, 'NFT', bob.address); - await setCollectionLimitsExpectSuccess(alice, collectionId, {ownerCanTransfer: false}); + itSub('transferFrom burnt token after approve NFT', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-13', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice); + + await nft.approve(alice, {Substrate: bob.address}); + expect(await nft.isApproved({Substrate: bob.address})).to.be.true; + + await nft.burn(alice); + + await expect(nft.transferFrom( + bob, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + }); + + itSub('transferFrom burnt token after approve Fungible', async ({helper}) => { + const collection = await helper.ft.mintCollection(alice, {name: 'TransferFrom-Neg-14', description: '', tokenPrefix: 'TF'}); + await collection.mint(alice, 10n); + + await collection.approveTokens(alice, {Substrate: bob.address}); + expect(await collection.getApprovedTokens({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(1n); + + await collection.burnTokens(alice, 10n); + + await expect(collection.transferFrom( + bob, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.TokenValueTooLow/); + }); + + itSub.ifWithPallets('transferFrom burnt token after approve ReFungible', [Pallets.ReFungible], async ({helper}) => { + const collection = await helper.rft.mintCollection(alice, {name: 'TransferFrom-Neg-15', description: '', tokenPrefix: 'TF'}); + const rft = await collection.mintToken(alice, 10n); + + await rft.approve(alice, {Substrate: bob.address}, 10n); + expect(await rft.getApprovedPieces({Substrate: alice.address}, {Substrate: bob.address})).to.be.eq(10n); + + await rft.burn(alice, 10n); + + await expect(rft.transferFrom( + bob, + {Substrate: alice.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); + }); + + itSub('fails when called by collection owner on non-owned item when OwnerCanTransfer == false', async ({helper}) => { + const collection = await helper.nft.mintCollection(alice, {name: 'TransferFrom-Neg-16', description: '', tokenPrefix: 'TF'}); + const nft = await collection.mintToken(alice, {Substrate: bob.address}); + + await collection.setLimits(alice, {ownerCanTransfer: false}); - await transferFromExpectFail(collectionId, itemId, alice, bob, charlie); + await expect(nft.transferFrom( + alice, + {Substrate: bob.address}, + {Substrate: charlie.address}, + )).to.be.rejectedWith(/common\.ApprovedValueTooLow/); }); }); diff --git a/tests/src/tx-version-presence.test.ts b/tests/src/tx-version-presence.test.ts new file mode 100644 index 0000000000..4b932af45c --- /dev/null +++ b/tests/src/tx-version-presence.test.ts @@ -0,0 +1,32 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {Metadata} from '@polkadot/types'; +import {itSub, usingPlaygrounds, expect} from './util'; + +let metadata: Metadata; + +describe('TxVersion is present', () => { + before(async () => { + await usingPlaygrounds(async helper => { + metadata = await helper.callRpc('api.rpc.state.getMetadata', []); + }); + }); + + itSub('Signed extension CheckTxVersion is present', async () => { + expect(metadata.asLatest.extrinsic.signedExtensions.map(se => se.identifier.toString())).to.include('CheckTxVersion'); + }); +}); diff --git a/tests/src/util/contracthelpers.ts b/tests/src/util/contracthelpers.ts deleted file mode 100644 index a32d8961d7..0000000000 --- a/tests/src/util/contracthelpers.ts +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -import chai from 'chai'; -import chaiAsPromised from 'chai-as-promised'; -import {submitTransactionAsync, submitTransactionExpectFailAsync} from '../substrate/substrate-api'; -import fs from 'fs'; -import {Abi, CodePromise, ContractPromise as Contract} from '@polkadot/api-contract'; -import {IKeyringPair} from '@polkadot/types/types'; -import {ApiPromise} from '@polkadot/api'; - -chai.use(chaiAsPromised); -const expect = chai.expect; -import {findUnusedAddress, getGenericResult} from '../util/helpers'; - -const value = 0; -const gasLimit = '200000000000'; -const endowment = '100000000000000000'; - -/* eslint no-async-promise-executor: "off" */ -function deployContract(alice: IKeyringPair, code: CodePromise, constructor = 'default', ...args: any[]): Promise { - return new Promise(async (resolve) => { - const unsub = await (code as any) - .tx[constructor]({value: endowment, gasLimit}, ...args) - .signAndSend(alice, (result: any) => { - if (result.status.isInBlock || result.status.isFinalized) { - // here we have an additional field in the result, containing the blueprint - resolve((result as any).contract); - unsub(); - } - }); - }); -} - -async function prepareDeployer(api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair) { - // Find unused address - const deployer = await findUnusedAddress(api, privateKeyWrapper); - - // Transfer balance to it - const alice = privateKeyWrapper('//Alice'); - const amount = BigInt(endowment) + 10n**15n; - const tx = api.tx.balances.transfer(deployer.address, amount); - await submitTransactionAsync(alice, tx); - - return deployer; -} - -export async function deployFlipper(api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair): Promise<[Contract, IKeyringPair]> { - const metadata = JSON.parse(fs.readFileSync('./src/flipper/metadata.json').toString('utf-8')); - const abi = new Abi(metadata); - - const deployer = await prepareDeployer(api, privateKeyWrapper); - - const wasm = fs.readFileSync('./src/flipper/flipper.wasm'); - - const code = new CodePromise(api, abi, wasm); - - const contract = (await deployContract(deployer, code, 'new', true)) as Contract; - - const initialGetResponse = await getFlipValue(contract, deployer); - expect(initialGetResponse).to.be.true; - - return [contract, deployer]; -} - -export async function getFlipValue(contract: Contract, deployer: IKeyringPair) { - const result = await contract.query.get(deployer.address, value, gasLimit); - - if(!result.result.isOk) { - throw 'Failed to get flipper value'; - } - return (result.result.asOk.data[0] == 0x00) ? false : true; -} - -export async function toggleFlipValueExpectSuccess(sender: IKeyringPair, contract: Contract) { - const tx = contract.tx.flip(value, gasLimit); - const events = await submitTransactionAsync(sender, tx); - const result = getGenericResult(events); - - expect(result.success).to.be.true; -} - -export async function toggleFlipValueExpectFailure(sender: IKeyringPair, contract: Contract) { - const tx = contract.tx.flip(value, gasLimit); - await expect(submitTransactionExpectFailAsync(sender, tx)).to.be.rejected; -} - -export async function deployTransferContract(api: ApiPromise, privateKeyWrapper: (account: string) => IKeyringPair): Promise<[Contract, IKeyringPair]> { - const metadata = JSON.parse(fs.readFileSync('./src/transfer_contract/metadata.json').toString('utf-8')); - const abi = new Abi(metadata); - - const deployer = await prepareDeployer(api, privateKeyWrapper); - - const wasm = fs.readFileSync('./src/transfer_contract/nft_transfer.wasm'); - - const code = new CodePromise(api, abi, wasm); - - const contract = await deployContract(deployer, code); - - return [contract, deployer]; -} diff --git a/tests/src/util/globalSetup.ts b/tests/src/util/globalSetup.ts new file mode 100644 index 0000000000..b15b264b9d --- /dev/null +++ b/tests/src/util/globalSetup.ts @@ -0,0 +1,107 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +import {usingPlaygrounds, Pallets, DONOR_FUNDING, MINIMUM_DONOR_FUND} from './index'; +import * as path from 'path'; +import {promises as fs} from 'fs'; + +// This function should be called before running test suites. +const globalSetup = async (): Promise => { + await usingPlaygrounds(async (helper, privateKey) => { + try { + // 1. Wait node producing blocks + await helper.wait.newBlocks(1, 600_000); + + // 2. Create donors for test files + await fundFilenamesWithRetries(3) + .then((result) => { + if (!result) Promise.reject(); + }); + + // 3. Set up App Promotion admin + const missingPallets = helper.fetchMissingPalletNames([Pallets.AppPromotion]); + if (missingPallets.length === 0) { + const superuser = await privateKey('//Alice'); + const palletAddress = helper.arrange.calculatePalletAddress('appstake'); + const palletAdmin = await privateKey('//PromotionAdmin'); + const api = helper.getApi(); + await helper.signTransaction(superuser, api.tx.sudo.sudo(api.tx.appPromotion.setAdminAddress({Substrate: palletAdmin.address}))); + const nominal = helper.balance.getOneTokenNominal(); + await helper.balance.transferToSubstrate(superuser, palletAdmin.address, 1000n * nominal); + await helper.balance.transferToSubstrate(superuser, palletAddress, 1000n * nominal); + } + } catch (error) { + console.error(error); + Promise.reject(); + } + }); +}; + +async function getFiles(rootPath: string): Promise { + const files = await fs.readdir(rootPath, {withFileTypes: true}); + const filenames: string[] = []; + for (const entry of files) { + const res = path.resolve(rootPath, entry.name); + if (entry.isDirectory()) { + filenames.push(...await getFiles(res)); + } else { + filenames.push(res); + } + } + return filenames; +} + +const fundFilenames = async () => { + await usingPlaygrounds(async (helper, privateKey) => { + const oneToken = helper.balance.getOneTokenNominal(); + const alice = await privateKey('//Alice'); + const nonce = await helper.chain.getNonce(alice.address); + const filenames = await getFiles(path.resolve(__dirname, '..')); + + // batching is actually undesireable, it takes away the time while all the transactions actually succeed + const batchSize = 300; + let balanceGrantedCounter = 0; + for (let b = 0; b < filenames.length; b += batchSize) { + const tx = []; + let batchBalanceGrantedCounter = 0; + for (let i = 0; batchBalanceGrantedCounter < batchSize && b + i < filenames.length; i++) { + const f = filenames[b + i]; + if (!f.endsWith('.test.ts') && !f.endsWith('seqtest.ts') || f.includes('.outdated')) continue; + const account = await privateKey({filename: f, ignoreFundsPresence: true}); + const aliceBalance = await helper.balance.getSubstrate(account.address); + + if (aliceBalance < MINIMUM_DONOR_FUND * oneToken) { + tx.push(helper.executeExtrinsic( + alice, + 'api.tx.balances.transfer', + [account.address, DONOR_FUNDING * oneToken], + true, + {nonce: nonce + balanceGrantedCounter++}, + ).then(() => true).catch(() => {console.error(`Transaction to ${path.basename(f)} registered as failed. Strange.`); return false;})); + batchBalanceGrantedCounter++; + } + } + + if(tx.length > 0) { + console.log(`Granting funds to ${batchBalanceGrantedCounter} filename accounts.`); + const result = await Promise.all(tx); + if (result && result.lastIndexOf(false) > -1) throw new Error('The transactions actually probably succeeded, should check the balances.'); + } + } + + if (balanceGrantedCounter == 0) console.log('No account needs additional funding.'); + }); +}; + +const fundFilenamesWithRetries = async (retriesLeft: number): Promise => { + if (retriesLeft <= 0) return Promise.resolve(false); + return fundFilenames() + .then(() => Promise.resolve(true)) + .catch(e => { + console.error(e); + console.error(`Some transactions might have failed. ${retriesLeft > 1 ? 'Retrying...' : 'Something is wrong.'}\n`); + return fundFilenamesWithRetries(--retriesLeft); + }); +}; + +globalSetup().catch(() => process.exit(1)); diff --git a/tests/src/util/index.ts b/tests/src/util/index.ts new file mode 100644 index 0000000000..a76e258659 --- /dev/null +++ b/tests/src/util/index.ts @@ -0,0 +1,138 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +import * as path from 'path'; +import * as crypto from 'crypto'; +import {IKeyringPair} from '@polkadot/types/types'; +import chai from 'chai'; +import chaiAsPromised from 'chai-as-promised'; +import {Context} from 'mocha'; +import config from '../config'; +import {ChainHelperBase} from './playgrounds/unique'; +import {ILogger} from './playgrounds/types'; +import {DevUniqueHelper, SilentLogger, SilentConsole, DevMoonbeamHelper, DevMoonriverHelper, DevAcalaHelper, DevKaruraHelper, DevRelayHelper, DevWestmintHelper} from './playgrounds/unique.dev'; + +chai.use(chaiAsPromised); +export const expect = chai.expect; + +const getTestHash = (filename: string) => { + return crypto.createHash('md5').update(filename).digest('hex'); +}; + +export const getTestSeed = (filename: string) => { + return `//Alice+${getTestHash(filename)}`; +}; + +async function usingPlaygroundsGeneral(helperType: new(logger: ILogger) => T, url: string, code: (helper: T, privateKey: (seed: string | {filename: string, ignoreFundsPresence?: boolean}) => Promise) => Promise) { + const silentConsole = new SilentConsole(); + silentConsole.enable(); + + const helper = new helperType(new SilentLogger()); + + try { + await helper.connect(url); + const ss58Format = helper.chain.getChainProperties().ss58Format; + const privateKey = async (seed: string | {filename: string, ignoreFundsPresence?: boolean}) => { + if (typeof seed === 'string') { + return helper.util.fromSeed(seed, ss58Format); + } + else { + const actualSeed = getTestSeed(seed.filename); + let account = helper.util.fromSeed(actualSeed, ss58Format); + // here's to hoping that no + if (!seed.ignoreFundsPresence && ((helper as any)['balance'] == undefined || await (helper as any).balance.getSubstrate(account.address) < MINIMUM_DONOR_FUND)) { + console.warn(`${path.basename(seed.filename)}: Not enough funds present on the filename account. Using the default one as the donor instead.`); + account = helper.util.fromSeed('//Alice', ss58Format); + } + return account; + } + }; + await code(helper, privateKey); + } + finally { + await helper.disconnect(); + silentConsole.disable(); + } +} + +export const usingPlaygrounds = (code: (helper: DevUniqueHelper, privateKey: (seed: string | {filename: string, ignoreFundsPresence?: boolean}) => Promise) => Promise, url: string = config.substrateUrl) => { + return usingPlaygroundsGeneral(DevUniqueHelper, url, code); +}; + +export const usingWestmintPlaygrounds = async (url: string, code: (helper: DevWestmintHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevWestmintHelper, url, code); +}; + +export const usingRelayPlaygrounds = async (url: string, code: (helper: DevRelayHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevRelayHelper, url, code); +}; + +export const usingAcalaPlaygrounds = async (url: string, code: (helper: DevAcalaHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevAcalaHelper, url, code); +}; + +export const usingKaruraPlaygrounds = async (url: string, code: (helper: DevKaruraHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevAcalaHelper, url, code); +}; + +export const usingMoonbeamPlaygrounds = async (url: string, code: (helper: DevMoonbeamHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevMoonbeamHelper, url, code); +}; + +export const usingMoonriverPlaygrounds = async (url: string, code: (helper: DevMoonbeamHelper, privateKey: (seed: string) => Promise) => Promise) => { + return usingPlaygroundsGeneral(DevMoonriverHelper, url, code); +}; + +export const MINIMUM_DONOR_FUND = 100_000n; +export const DONOR_FUNDING = 1_000_000n; + +export enum Pallets { + Inflation = 'inflation', + RmrkCore = 'rmrkcore', + RmrkEquip = 'rmrkequip', + ReFungible = 'refungible', + Fungible = 'fungible', + NFT = 'nonfungible', + Scheduler = 'scheduler', + AppPromotion = 'apppromotion', +} + +export function requirePalletsOrSkip(test: Context, helper: DevUniqueHelper, requiredPallets: string[]) { + const missingPallets = helper.fetchMissingPalletNames(requiredPallets); + + if (missingPallets.length > 0) { + const skipMsg = `\tSkipping test '${test.test?.title}'.\n\tThe following pallets are missing:\n\t- ${missingPallets.join('\n\t- ')}`; + console.warn('\x1b[38:5:208m%s\x1b[0m', skipMsg); + test.skip(); + } +} + +export async function itSub(name: string, cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any, opts: { only?: boolean, skip?: boolean, requiredPallets?: string[] } = {}) { + (opts.only ? it.only : + opts.skip ? it.skip : it)(name, async function () { + await usingPlaygrounds(async (helper, privateKey) => { + if (opts.requiredPallets) { + requirePalletsOrSkip(this, helper, opts.requiredPallets); + } + + await cb({helper, privateKey}); + }); + }); +} +export async function itSubIfWithPallet(name: string, required: string[], cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any, opts: { only?: boolean, skip?: boolean, requiredPallets?: string[] } = {}) { + return itSub(name, cb, {requiredPallets: required, ...opts}); +} +itSub.only = (name: string, cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any) => itSub(name, cb, {only: true}); +itSub.skip = (name: string, cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any) => itSub(name, cb, {skip: true}); + +itSubIfWithPallet.only = (name: string, required: string[], cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any) => itSubIfWithPallet(name, required, cb, {only: true}); +itSubIfWithPallet.skip = (name: string, required: string[], cb: (apis: { helper: DevUniqueHelper, privateKey: (seed: string) => Promise }) => any) => itSubIfWithPallet(name, required, cb, {skip: true}); +itSub.ifWithPallets = itSubIfWithPallet; + +export async function describeXCM(title: string, fn: (this: Mocha.Suite) => void, opts: {skip?: boolean} = {}) { + (process.env.RUN_XCM_TESTS && !opts.skip + ? describe + : describe.skip)(title, fn); +} + +describeXCM.skip = (name: string, fn: (this: Mocha.Suite) => void) => describeXCM(name, fn, {skip: true}); diff --git a/tests/src/util/playgrounds/types.ts b/tests/src/util/playgrounds/types.ts new file mode 100644 index 0000000000..2d5cdb8833 --- /dev/null +++ b/tests/src/util/playgrounds/types.ts @@ -0,0 +1,219 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +import {IKeyringPair} from '@polkadot/types/types'; + +export interface IEvent { + section: string; + method: string; + index: [number, number] | string; + data: any[]; + phase: {applyExtrinsic: number} | 'Initialization', +} + +export interface ITransactionResult { + status: 'Fail' | 'Success'; + result: { + dispatchError: any, + events: { + phase: any, // {ApplyExtrinsic: number} | 'Initialization', + event: IEvent; + }[]; + }, + moduleError?: string; +} + +export interface ISubscribeBlockEventsData { + number: number; + hash: string; + timestamp: number; + events: IEvent[]; +} + +export interface ILogger { + log: (msg: any, level?: string) => void; + level: { + ERROR: 'ERROR'; + WARNING: 'WARNING'; + INFO: 'INFO'; + [key: string]: string; + } +} + +export interface IUniqueHelperLog { + executedAt: number; + executionTime: number; + type: 'extrinsic' | 'rpc'; + status: 'Fail' | 'Success'; + call: string; + params: any[]; + moduleError?: string; + dispatchError?: any; + events?: any; +} + +export interface IApiListeners { + connected?: (...args: any[]) => any; + disconnected?: (...args: any[]) => any; + error?: (...args: any[]) => any; + ready?: (...args: any[]) => any; + decorated?: (...args: any[]) => any; +} + +export interface ICrossAccountId { + Substrate?: TSubstrateAccount; + Ethereum?: TEthereumAccount; +} + +export interface ICrossAccountIdLower { + substrate?: TSubstrateAccount; + ethereum?: TEthereumAccount; +} + +export interface ICollectionLimits { + accountTokenOwnershipLimit?: number | null; + sponsoredDataSize?: number | null; + sponsoredDataRateLimit?: {blocks: number} | {sponsoringDisabled: null} | null; + tokenLimit?: number | null; + sponsorTransferTimeout?: number | null; + sponsorApproveTimeout?: number | null; + ownerCanTransfer?: boolean | null; + ownerCanDestroy?: boolean | null; + transfersEnabled?: boolean | null; +} + +export interface INestingPermissions { + tokenOwner?: boolean; + collectionAdmin?: boolean; + restricted?: number[] | null; +} + +export interface ICollectionPermissions { + access?: 'Normal' | 'AllowList'; + mintMode?: boolean; + nesting?: INestingPermissions; +} + +export interface IProperty { + key: string; + value?: string; +} + +export interface ITokenPropertyPermission { + key: string; + permission: { + mutable?: boolean; + tokenOwner?: boolean; + collectionAdmin?: boolean; + } +} + +export interface IToken { + collectionId: number; + tokenId: number; +} + +export interface IBlock { + extrinsics: IExtrinsic[] + header: { + parentHash: string, + number: number, + }; +} + +export interface IExtrinsic { + isSigned: boolean, + method: { + method: string, + section: string, + args: any[] + } +} + +export interface ICollectionCreationOptions { + name?: string | number[]; + description?: string | number[]; + tokenPrefix?: string | number[]; + mode?: { + nft?: null; + refungible?: null; + fungible?: number; + } + permissions?: ICollectionPermissions; + properties?: IProperty[]; + tokenPropertyPermissions?: ITokenPropertyPermission[]; + limits?: ICollectionLimits; + pendingSponsor?: TSubstrateAccount; +} + +export interface IChainProperties { + ss58Format: number; + tokenDecimals: number[]; + tokenSymbol: string[] +} + +export interface ISubstrateBalance { + free: bigint, + reserved: bigint, + miscFrozen: bigint, + feeFrozen: bigint +} + +export interface IStakingInfo { + block: bigint, + amount: bigint, +} + +export interface ISchedulerOptions { + priority?: number, + periodic?: { + period: number, + repetitions: number, + }, +} + +export interface IForeignAssetMetadata { + name?: number | Uint8Array, + symbol?: string, + decimals?: number, + minimalBalance?: bigint, +} + +export interface MoonbeamAssetInfo { + location: any, + metadata: { + name: string, + symbol: string, + decimals: number, + isFrozen: boolean, + minimalBalance: bigint, + }, + existentialDeposit: bigint, + isSufficient: boolean, + unitsPerSecond: bigint, + numAssetsWeightHint: number, +} + +export interface AcalaAssetMetadata { + name: string, + symbol: string, + decimals: number, + minimalBalance: bigint, +} + +export interface DemocracyStandardAccountVote { + balance: bigint, + vote: { + aye: boolean, + conviction: number, + }, +} + +export type TSubstrateAccount = string; +export type TEthereumAccount = string; +export type TApiAllowedListeners = 'connected' | 'disconnected' | 'error' | 'ready' | 'decorated'; +export type TUniqueNetworks = 'opal' | 'quartz' | 'unique'; +export type TSiblingNetworkds = 'moonbeam' | 'moonriver' | 'acala' | 'karura' | 'westmint'; +export type TRelayNetworks = 'rococo' | 'westend'; +export type TNetworks = TUniqueNetworks | TSiblingNetworkds | TRelayNetworks; +export type TSigner = IKeyringPair; // | 'string' diff --git a/tests/src/util/playgrounds/unique.dev.ts b/tests/src/util/playgrounds/unique.dev.ts new file mode 100644 index 0000000000..7094f9f773 --- /dev/null +++ b/tests/src/util/playgrounds/unique.dev.ts @@ -0,0 +1,473 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +import {stringToU8a} from '@polkadot/util'; +import {encodeAddress, mnemonicGenerate} from '@polkadot/util-crypto'; +import {UniqueHelper, MoonbeamHelper, ChainHelperBase, AcalaHelper, RelayHelper, WestmintHelper} from './unique'; +import {ApiPromise, Keyring, WsProvider} from '@polkadot/api'; +import * as defs from '../../interfaces/definitions'; +import {IKeyringPair} from '@polkadot/types/types'; +import {EventRecord} from '@polkadot/types/interfaces'; +import {ICrossAccountId} from './types'; +import {FrameSystemEventRecord} from '@polkadot/types/lookup'; + +export class SilentLogger { + log(_msg: any, _level: any): void { } + level = { + ERROR: 'ERROR' as const, + WARNING: 'WARNING' as const, + INFO: 'INFO' as const, + }; +} + +export class SilentConsole { + // TODO: Remove, this is temporary: Filter unneeded API output + // (Jaco promised it will be removed in the next version) + consoleErr: any; + consoleLog: any; + consoleWarn: any; + + constructor() { + this.consoleErr = console.error; + this.consoleLog = console.log; + this.consoleWarn = console.warn; + } + + enable() { + const outFn = (printer: any) => (...args: any[]) => { + for (const arg of args) { + if (typeof arg !== 'string') + continue; + if (arg.includes('1000:: Normal connection closure') || arg.includes('Not decorating unknown runtime apis:') || arg.includes('RPC methods not decorated:') || arg === 'Normal connection closure') + return; + } + printer(...args); + }; + + console.error = outFn(this.consoleErr.bind(console)); + console.log = outFn(this.consoleLog.bind(console)); + console.warn = outFn(this.consoleWarn.bind(console)); + } + + disable() { + console.error = this.consoleErr; + console.log = this.consoleLog; + console.warn = this.consoleWarn; + } +} + +export class DevUniqueHelper extends UniqueHelper { + /** + * Arrange methods for tests + */ + arrange: ArrangeGroup; + wait: WaitGroup; + admin: AdminGroup; + + constructor(logger: { log: (msg: any, level: any) => void, level: any }, options: {[key: string]: any} = {}) { + options.helperBase = options.helperBase ?? DevUniqueHelper; + + super(logger, options); + this.arrange = new ArrangeGroup(this); + this.wait = new WaitGroup(this); + this.admin = new AdminGroup(this); + } + + async connect(wsEndpoint: string, _listeners?: any): Promise { + const wsProvider = new WsProvider(wsEndpoint); + this.api = new ApiPromise({ + provider: wsProvider, + signedExtensions: { + ContractHelpers: { + extrinsic: {}, + payload: {}, + }, + CheckMaintenance: { + extrinsic: {}, + payload: {}, + }, + FakeTransactionFinalizer: { + extrinsic: {}, + payload: {}, + }, + }, + rpc: { + unique: defs.unique.rpc, + appPromotion: defs.appPromotion.rpc, + rmrk: defs.rmrk.rpc, + eth: { + feeHistory: { + description: 'Dummy', + params: [], + type: 'u8', + }, + maxPriorityFeePerGas: { + description: 'Dummy', + params: [], + type: 'u8', + }, + }, + }, + }); + await this.api.isReadyOrError; + this.network = await UniqueHelper.detectNetwork(this.api); + } +} + +export class DevRelayHelper extends RelayHelper {} + +export class DevWestmintHelper extends WestmintHelper { + wait: WaitGroup; + + constructor(logger: { log: (msg: any, level: any) => void, level: any }, options: {[key: string]: any} = {}) { + options.helperBase = options.helperBase ?? DevWestmintHelper; + + super(logger, options); + this.wait = new WaitGroup(this); + } +} + +export class DevMoonbeamHelper extends MoonbeamHelper { + account: MoonbeamAccountGroup; + wait: WaitGroup; + + constructor(logger: { log: (msg: any, level: any) => void, level: any }, options: {[key: string]: any} = {}) { + options.helperBase = options.helperBase ?? DevMoonbeamHelper; + + super(logger, options); + this.account = new MoonbeamAccountGroup(this); + this.wait = new WaitGroup(this); + } +} + +export class DevMoonriverHelper extends DevMoonbeamHelper {} + +export class DevAcalaHelper extends AcalaHelper { + wait: WaitGroup; + + constructor(logger: { log: (msg: any, level: any) => void, level: any }, options: {[key: string]: any} = {}) { + options.helperBase = options.helperBase ?? DevAcalaHelper; + + super(logger, options); + this.wait = new WaitGroup(this); + } +} + +export class DevKaruraHelper extends DevAcalaHelper {} + +class ArrangeGroup { + helper: DevUniqueHelper; + + constructor(helper: DevUniqueHelper) { + this.helper = helper; + } + + /** + * Generates accounts with the specified UNQ token balance + * @param balances balances for generated accounts. Each balance will be multiplied by the token nominal. + * @param donor donor account for balances + * @returns array of newly created accounts + * @example const [acc1, acc2, acc3] = await createAccounts([0n, 10n, 20n], donor); + */ + createAccounts = async (balances: bigint[], donor: IKeyringPair): Promise => { + let nonce = await this.helper.chain.getNonce(donor.address); + const wait = new WaitGroup(this.helper); + const ss58Format = this.helper.chain.getChainProperties().ss58Format; + const tokenNominal = this.helper.balance.getOneTokenNominal(); + const transactions = []; + const accounts: IKeyringPair[] = []; + for (const balance of balances) { + const recipient = this.helper.util.fromSeed(mnemonicGenerate(), ss58Format); + accounts.push(recipient); + if (balance !== 0n) { + const tx = this.helper.constructApiCall('api.tx.balances.transfer', [{Id: recipient.address}, balance * tokenNominal]); + transactions.push(this.helper.signTransaction(donor, tx, {nonce}, 'account generation')); + nonce++; + } + } + + await Promise.all(transactions).catch(_e => {}); + + //#region TODO remove this region, when nonce problem will be solved + const checkBalances = async () => { + let isSuccess = true; + for (let i = 0; i < balances.length; i++) { + const balance = await this.helper.balance.getSubstrate(accounts[i].address); + if (balance !== balances[i] * tokenNominal) { + isSuccess = false; + break; + } + } + return isSuccess; + }; + + let accountsCreated = false; + const maxBlocksChecked = await this.helper.arrange.isDevNode() ? 50 : 5; + // checkBalances retry up to 5-50 blocks + for (let index = 0; index < maxBlocksChecked; index++) { + accountsCreated = await checkBalances(); + if(accountsCreated) break; + await wait.newBlocks(1); + } + + if (!accountsCreated) throw Error('Accounts generation failed'); + //#endregion + + return accounts; + }; + + // TODO combine this method and createAccounts into one + createCrowd = async (accountsToCreate: number, withBalance: bigint, donor: IKeyringPair): Promise => { + const createAsManyAsCan = async () => { + let transactions: any = []; + const accounts: IKeyringPair[] = []; + let nonce = await this.helper.chain.getNonce(donor.address); + const tokenNominal = this.helper.balance.getOneTokenNominal(); + for (let i = 0; i < accountsToCreate; i++) { + if (i === 500) { // if there are too many accounts to create + await Promise.allSettled(transactions); // wait while first 500 (should be 100 for devnode) tx will be settled + transactions = []; // + nonce = await this.helper.chain.getNonce(donor.address); // update nonce + } + const recepient = this.helper.util.fromSeed(mnemonicGenerate()); + accounts.push(recepient); + if (withBalance !== 0n) { + const tx = this.helper.constructApiCall('api.tx.balances.transfer', [{Id: recepient.address}, withBalance * tokenNominal]); + transactions.push(this.helper.signTransaction(donor, tx, {nonce}, 'account generation')); + nonce++; + } + } + + const fullfilledAccounts = []; + await Promise.allSettled(transactions); + for (const account of accounts) { + const accountBalance = await this.helper.balance.getSubstrate(account.address); + if (accountBalance === withBalance * tokenNominal) { + fullfilledAccounts.push(account); + } + } + return fullfilledAccounts; + }; + + + const crowd: IKeyringPair[] = []; + // do up to 5 retries + for (let index = 0; index < 5 && accountsToCreate !== 0; index++) { + const asManyAsCan = await createAsManyAsCan(); + crowd.push(...asManyAsCan); + accountsToCreate -= asManyAsCan.length; + } + + if (accountsToCreate !== 0) throw Error(`Crowd generation failed: ${accountsToCreate} accounts left`); + + return crowd; + }; + + isDevNode = async () => { + let blockNumber = (await this.helper.callRpc('api.query.system.number')).toJSON(); + if (blockNumber == 0) { + await this.helper.wait.newBlocks(1); + blockNumber = (await this.helper.callRpc('api.query.system.number')).toJSON(); + } + const block2 = await this.helper.callRpc('api.rpc.chain.getBlock', [await this.helper.callRpc('api.rpc.chain.getBlockHash', [blockNumber])]); + const block1 = await this.helper.callRpc('api.rpc.chain.getBlock', [await this.helper.callRpc('api.rpc.chain.getBlockHash', [blockNumber - 1])]); + const findCreationDate = async (block: any) => { + const humanBlock = block.toHuman(); + let date; + humanBlock.block.extrinsics.forEach((ext: any) => { + if(ext.method.section === 'timestamp') { + date = Number(ext.method.args.now.replaceAll(',', '')); + } + }); + return date; + }; + const block1date = await findCreationDate(block1); + const block2date = await findCreationDate(block2); + if(block2date! - block1date! < 9000) return true; + }; + + async calculcateFee(payer: ICrossAccountId, promise: () => Promise): Promise { + const address = payer.Substrate ? payer.Substrate : await this.helper.address.ethToSubstrate(payer.Ethereum!); + let balance = await this.helper.balance.getSubstrate(address); + + await promise(); + + balance -= await this.helper.balance.getSubstrate(address); + + return balance; + } + + calculatePalletAddress(palletId: any) { + const address = stringToU8a(('modl' + palletId).padEnd(32, '\0')); + return encodeAddress(address); + } +} + +class MoonbeamAccountGroup { + helper: MoonbeamHelper; + + keyring: Keyring; + _alithAccount: IKeyringPair; + _baltatharAccount: IKeyringPair; + _dorothyAccount: IKeyringPair; + + constructor(helper: MoonbeamHelper) { + this.helper = helper; + + this.keyring = new Keyring({type: 'ethereum'}); + const alithPrivateKey = '0x5fb92d6e98884f76de468fa3f6278f8807c48bebc13595d45af5bdc4da702133'; + const baltatharPrivateKey = '0x8075991ce870b93a8870eca0c0f91913d12f47948ca0fd25b49c6fa7cdbeee8b'; + const dorothyPrivateKey = '0x39539ab1876910bbf3a223d84a29e28f1cb4e2e456503e7e91ed39b2e7223d68'; + + this._alithAccount = this.keyring.addFromUri(alithPrivateKey, undefined, 'ethereum'); + this._baltatharAccount = this.keyring.addFromUri(baltatharPrivateKey, undefined, 'ethereum'); + this._dorothyAccount = this.keyring.addFromUri(dorothyPrivateKey, undefined, 'ethereum'); + } + + alithAccount() { + return this._alithAccount; + } + + baltatharAccount() { + return this._baltatharAccount; + } + + dorothyAccount() { + return this._dorothyAccount; + } + + create() { + return this.keyring.addFromUri(mnemonicGenerate()); + } +} + +class WaitGroup { + helper: ChainHelperBase; + + constructor(helper: ChainHelperBase) { + this.helper = helper; + } + + sleep(milliseconds: number) { + return new Promise((resolve) => setTimeout(resolve, milliseconds)); + } + + private async waitWithTimeout(promise: Promise, timeout: number) { + let isBlock = false; + promise.then(() => isBlock = true).catch(() => isBlock = true); + let totalTime = 0; + const step = 100; + while(!isBlock) { + await this.sleep(step); + totalTime += step; + if(totalTime >= timeout) throw Error('Blocks production failed'); + } + return promise; + } + + /** + * Wait for specified number of blocks + * @param blocksCount number of blocks to wait + * @returns + */ + async newBlocks(blocksCount = 1, timeout?: number): Promise { + timeout = timeout ?? blocksCount * 60_000; + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async (resolve) => { + const unsubscribe = await this.helper.getApi().rpc.chain.subscribeNewHeads(() => { + if (blocksCount > 0) { + blocksCount--; + } else { + unsubscribe(); + resolve(); + } + }); + }); + await this.waitWithTimeout(promise, timeout); + return promise; + } + + async forParachainBlockNumber(blockNumber: bigint, timeout?: number) { + timeout = timeout ?? 30 * 60 * 1000; + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async (resolve) => { + const unsubscribe = await this.helper.getApi().rpc.chain.subscribeNewHeads((data: any) => { + if (data.number.toNumber() >= blockNumber) { + unsubscribe(); + resolve(); + } + }); + }); + await this.waitWithTimeout(promise, timeout); + return promise; + } + + async forRelayBlockNumber(blockNumber: bigint, timeout?: number) { + timeout = timeout ?? 30 * 60 * 1000; + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async (resolve) => { + const unsubscribe = await this.helper.getApi().query.parachainSystem.validationData((data: any) => { + if (data.value.relayParentNumber.toNumber() >= blockNumber) { + // @ts-ignore + unsubscribe(); + resolve(); + } + }); + }); + await this.waitWithTimeout(promise, timeout); + return promise; + } + + async event(maxBlocksToWait: number, eventSection: string, eventMethod: string) { + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async (resolve) => { + const unsubscribe = await this.helper.getApi().rpc.chain.subscribeNewHeads(async header => { + const blockNumber = header.number.toHuman(); + const blockHash = header.hash; + const eventIdStr = `${eventSection}.${eventMethod}`; + const waitLimitStr = `wait blocks remaining: ${maxBlocksToWait}`; + + this.helper.logger.log(`[Block #${blockNumber}] Waiting for event \`${eventIdStr}\` (${waitLimitStr})`); + + const apiAt = await this.helper.getApi().at(blockHash); + const eventRecords = (await apiAt.query.system.events()) as any; + + const neededEvent = eventRecords.toArray().find((r: FrameSystemEventRecord) => { + return r.event.section == eventSection && r.event.method == eventMethod; + }); + + if (neededEvent) { + unsubscribe(); + resolve(neededEvent); + } else if (maxBlocksToWait > 0) { + maxBlocksToWait--; + } else { + this.helper.logger.log(`Event \`${eventIdStr}\` is NOT found`); + + unsubscribe(); + resolve(null); + } + }); + }); + return promise; + } +} + +class AdminGroup { + helper: UniqueHelper; + + constructor(helper: UniqueHelper) { + this.helper = helper; + } + + async payoutStakers(signer: IKeyringPair, stakersToPayout: number) { + const payoutResult = await this.helper.executeExtrinsic(signer, 'api.tx.appPromotion.payoutStakers', [stakersToPayout], true); + return payoutResult.result.events.filter(e => e.event.method === 'StakingRecalculation').map(e => { + return { + staker: e.event.data[0].toString(), + stake: e.event.data[1].toBigInt(), + payout: e.event.data[2].toBigInt(), + }; + }); + } +} diff --git a/tests/src/util/playgrounds/unique.ts b/tests/src/util/playgrounds/unique.ts new file mode 100644 index 0000000000..a980db4fde --- /dev/null +++ b/tests/src/util/playgrounds/unique.ts @@ -0,0 +1,3428 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable @typescript-eslint/no-var-requires */ +/* eslint-disable function-call-argument-newline */ +/* eslint-disable no-prototype-builtins */ + +import {ApiPromise, WsProvider, Keyring} from '@polkadot/api'; +import {ApiInterfaceEvents, SignerOptions} from '@polkadot/api/types'; +import {encodeAddress, decodeAddress, keccakAsHex, evmToAddress, addressToEvm} from '@polkadot/util-crypto'; +import {IKeyringPair} from '@polkadot/types/types'; +import {IApiListeners, IBlock, IEvent, IChainProperties, ICollectionCreationOptions, ICollectionLimits, ICollectionPermissions, ICrossAccountId, ICrossAccountIdLower, ILogger, INestingPermissions, IProperty, IStakingInfo, ISchedulerOptions, ISubstrateBalance, IToken, ITokenPropertyPermission, ITransactionResult, IUniqueHelperLog, TApiAllowedListeners, TEthereumAccount, TSigner, TSubstrateAccount, IForeignAssetMetadata, TNetworks, MoonbeamAssetInfo, DemocracyStandardAccountVote, AcalaAssetMetadata} from './types'; + +export class CrossAccountId implements ICrossAccountId { + Substrate?: TSubstrateAccount; + Ethereum?: TEthereumAccount; + + constructor(account: ICrossAccountId) { + if (account.Substrate) this.Substrate = account.Substrate; + if (account.Ethereum) this.Ethereum = account.Ethereum; + } + + static fromKeyring(account: IKeyringPair, domain: 'Substrate' | 'Ethereum' = 'Substrate') { + switch (domain) { + case 'Substrate': return new CrossAccountId({Substrate: account.address}); + case 'Ethereum': return new CrossAccountId({Substrate: account.address}).toEthereum(); + } + } + + static fromLowerCaseKeys(address: ICrossAccountIdLower): CrossAccountId { + return new CrossAccountId({Substrate: address.substrate, Ethereum: address.ethereum}); + } + + static normalizeSubstrateAddress(address: TSubstrateAccount, ss58Format = 42): TSubstrateAccount { + return encodeAddress(decodeAddress(address), ss58Format); + } + + static withNormalizedSubstrate(address: TSubstrateAccount, ss58Format = 42): CrossAccountId { + return new CrossAccountId({Substrate: CrossAccountId.normalizeSubstrateAddress(address, ss58Format)}); + } + + withNormalizedSubstrate(ss58Format = 42): CrossAccountId { + if (this.Substrate) return CrossAccountId.withNormalizedSubstrate(this.Substrate, ss58Format); + return this; + } + + static translateSubToEth(address: TSubstrateAccount): TEthereumAccount { + return nesting.toChecksumAddress('0x' + Array.from(addressToEvm(address), i => i.toString(16).padStart(2, '0')).join('')); + } + + toEthereum(): CrossAccountId { + if (this.Substrate) return new CrossAccountId({Ethereum: CrossAccountId.translateSubToEth(this.Substrate)}); + return this; + } + + static translateEthToSub(address: TEthereumAccount, ss58Format?: number): TSubstrateAccount { + return evmToAddress(address, ss58Format); + } + + toSubstrate(ss58Format?: number): CrossAccountId { + if (this.Ethereum) return new CrossAccountId({Substrate: CrossAccountId.translateEthToSub(this.Ethereum, ss58Format)}); + return this; + } + + toLowerCase(): CrossAccountId { + if (this.Substrate) this.Substrate = this.Substrate.toLowerCase(); + if (this.Ethereum) this.Ethereum = this.Ethereum.toLowerCase(); + return this; + } +} + +const nesting = { + toChecksumAddress(address: string): string { + if (typeof address === 'undefined') return ''; + + if(!/^(0x)?[0-9a-f]{40}$/i.test(address)) throw new Error(`Given address "${address}" is not a valid Ethereum address.`); + + address = address.toLowerCase().replace(/^0x/i,''); + const addressHash = keccakAsHex(address).replace(/^0x/i,''); + const checksumAddress = ['0x']; + + for (let i = 0; i < address.length; i++) { + // If ith character is 8 to f then make it uppercase + if (parseInt(addressHash[i], 16) > 7) { + checksumAddress.push(address[i].toUpperCase()); + } else { + checksumAddress.push(address[i]); + } + } + return checksumAddress.join(''); + }, + tokenIdToAddress(collectionId: number, tokenId: number) { + return this.toChecksumAddress(`0xf8238ccfff8ed887463fd5e0${collectionId.toString(16).padStart(8, '0')}${tokenId.toString(16).padStart(8, '0')}`); + }, +}; + +class UniqueUtil { + static transactionStatus = { + NOT_READY: 'NotReady', + FAIL: 'Fail', + SUCCESS: 'Success', + }; + + static chainLogType = { + EXTRINSIC: 'extrinsic', + RPC: 'rpc', + }; + + static getTokenAccount(token: IToken): CrossAccountId { + return new CrossAccountId({Ethereum: this.getTokenAddress(token)}); + } + + static getTokenAddress(token: IToken): string { + return nesting.tokenIdToAddress(token.collectionId, token.tokenId); + } + + static getDefaultLogger(): ILogger { + return { + log(msg: any, level = 'INFO') { + console[level.toLocaleLowerCase() === 'error' ? 'error' : 'log'](...(Array.isArray(msg) ? msg : [msg])); + }, + level: { + ERROR: 'ERROR', + WARNING: 'WARNING', + INFO: 'INFO', + }, + }; + } + + static vec2str(arr: string[] | number[]) { + return arr.map(x => String.fromCharCode(parseInt(x.toString()))).join(''); + } + + static str2vec(string: string) { + if (typeof string !== 'string') return string; + return Array.from(string).map(x => x.charCodeAt(0)); + } + + static fromSeed(seed: string, ss58Format = 42) { + const keyring = new Keyring({type: 'sr25519', ss58Format}); + return keyring.addFromUri(seed); + } + + static extractCollectionIdFromCreationResult(creationResult: ITransactionResult): number { + if (creationResult.status !== this.transactionStatus.SUCCESS) { + throw Error('Unable to create collection!'); + } + + let collectionId = null; + creationResult.result.events.forEach(({event: {data, method, section}}) => { + if ((section === 'common') && (method === 'CollectionCreated')) { + collectionId = parseInt(data[0].toString(), 10); + } + }); + + if (collectionId === null) { + throw Error('No CollectionCreated event was found!'); + } + + return collectionId; + } + + static extractTokensFromCreationResult(creationResult: ITransactionResult): { + success: boolean, + tokens: {collectionId: number, tokenId: number, owner: CrossAccountId, amount: bigint}[], + } { + if (creationResult.status !== this.transactionStatus.SUCCESS) { + throw Error('Unable to create tokens!'); + } + let success = false; + const tokens = [] as {collectionId: number, tokenId: number, owner: CrossAccountId, amount: bigint}[]; + creationResult.result.events.forEach(({event: {data, method, section}}) => { + if (method === 'ExtrinsicSuccess') { + success = true; + } else if ((section === 'common') && (method === 'ItemCreated')) { + tokens.push({ + collectionId: parseInt(data[0].toString(), 10), + tokenId: parseInt(data[1].toString(), 10), + owner: data[2].toHuman(), + amount: data[3].toBigInt(), + }); + } + }); + return {success, tokens}; + } + + static extractTokensFromBurnResult(burnResult: ITransactionResult): { + success: boolean, + tokens: {collectionId: number, tokenId: number, owner: CrossAccountId, amount: bigint}[], + } { + if (burnResult.status !== this.transactionStatus.SUCCESS) { + throw Error('Unable to burn tokens!'); + } + let success = false; + const tokens = [] as {collectionId: number, tokenId: number, owner: CrossAccountId, amount: bigint}[]; + burnResult.result.events.forEach(({event: {data, method, section}}) => { + if (method === 'ExtrinsicSuccess') { + success = true; + } else if ((section === 'common') && (method === 'ItemDestroyed')) { + tokens.push({ + collectionId: parseInt(data[0].toString(), 10), + tokenId: parseInt(data[1].toString(), 10), + owner: data[2].toHuman(), + amount: data[3].toBigInt(), + }); + } + }); + return {success, tokens}; + } + + static findCollectionInEvents(events: {event: IEvent}[], collectionId: number, expectedSection: string, expectedMethod: string): boolean { + let eventId = null; + events.forEach(({event: {data, method, section}}) => { + if ((section === expectedSection) && (method === expectedMethod)) { + eventId = parseInt(data[0].toString(), 10); + } + }); + + if (eventId === null) { + throw Error(`No ${expectedMethod} event was found!`); + } + return eventId === collectionId; + } + + static isTokenTransferSuccess(events: {event: IEvent}[], collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n) { + const normalizeAddress = (address: string | ICrossAccountId) => { + if(typeof address === 'string') return address; + const obj = {} as any; + Object.keys(address).forEach(k => { + obj[k.toLocaleLowerCase()] = address[k as 'Substrate' | 'Ethereum']; + }); + if(obj.substrate) return CrossAccountId.withNormalizedSubstrate(obj.substrate); + if(obj.ethereum) return CrossAccountId.fromLowerCaseKeys(obj).toLowerCase(); + return address; + }; + let transfer = {collectionId: null, tokenId: null, from: null, to: null, amount: 1} as any; + events.forEach(({event: {data, method, section}}) => { + if ((section === 'common') && (method === 'Transfer')) { + const hData = (data as any).toJSON(); + transfer = { + collectionId: hData[0], + tokenId: hData[1], + from: normalizeAddress(hData[2]), + to: normalizeAddress(hData[3]), + amount: BigInt(hData[4]), + }; + } + }); + let isSuccess = parseInt(collectionId.toString()) === transfer.collectionId && parseInt(tokenId.toString()) === transfer.tokenId; + isSuccess = isSuccess && JSON.stringify(normalizeAddress(fromAddressObj)) === JSON.stringify(transfer.from); + isSuccess = isSuccess && JSON.stringify(normalizeAddress(toAddressObj)) === JSON.stringify(transfer.to); + isSuccess = isSuccess && amount === transfer.amount; + return isSuccess; + } + + static bigIntToDecimals(number: bigint, decimals = 18) { + const numberStr = number.toString(); + const dotPos = numberStr.length - decimals; + + if (dotPos <= 0) { + return '0.' + '0'.repeat(Math.abs(dotPos)) + numberStr; + } else { + const intPart = numberStr.substring(0, dotPos); + const fractPart = numberStr.substring(dotPos); + return intPart + '.' + fractPart; + } + } +} + +class UniqueEventHelper { + private static extractIndex(index: any): [number, number] | string { + if(index.toRawType() === '[u8;2]') return [index[0], index[1]]; + return index.toJSON(); + } + + private static extractSub(data: any, subTypes: any): {[key: string]: any} { + let obj: any = {}; + let index = 0; + + if (data.entries) { + for(const [key, value] of data.entries()) { + obj[key] = this.extractData(value, subTypes[index]); + index++; + } + } else obj = data.toJSON(); + + return obj; + } + + private static extractData(data: any, type: any): any { + if(!type) return data.toHuman(); + if (['u16', 'u32'].indexOf(type.type) > -1) return data.toNumber(); + if (['u64', 'u128', 'u256'].indexOf(type.type) > -1) return data.toBigInt(); + if(type.hasOwnProperty('sub')) return this.extractSub(data, type.sub); + return data.toHuman(); + } + + public static extractEvents(events: {event: any, phase: any}[]): IEvent[] { + const parsedEvents: IEvent[] = []; + + events.forEach((record) => { + const {event, phase} = record; + const types = event.typeDef; + + const eventData: IEvent = { + section: event.section.toString(), + method: event.method.toString(), + index: this.extractIndex(event.index), + data: [], + phase: phase.toJSON(), + }; + + event.data.forEach((val: any, index: number) => { + eventData.data.push(this.extractData(val, types[index])); + }); + + parsedEvents.push(eventData); + }); + + return parsedEvents; + } +} + +export class ChainHelperBase { + helperBase: any; + + transactionStatus = UniqueUtil.transactionStatus; + chainLogType = UniqueUtil.chainLogType; + util: typeof UniqueUtil; + eventHelper: typeof UniqueEventHelper; + logger: ILogger; + api: ApiPromise | null; + forcedNetwork: TNetworks | null; + network: TNetworks | null; + chainLog: IUniqueHelperLog[]; + children: ChainHelperBase[]; + address: AddressGroup; + chain: ChainGroup; + + constructor(logger?: ILogger, helperBase?: any) { + this.helperBase = helperBase; + + this.util = UniqueUtil; + this.eventHelper = UniqueEventHelper; + if (typeof logger == 'undefined') logger = this.util.getDefaultLogger(); + this.logger = logger; + this.api = null; + this.forcedNetwork = null; + this.network = null; + this.chainLog = []; + this.children = []; + this.address = new AddressGroup(this); + this.chain = new ChainGroup(this); + } + + clone(helperCls: ChainHelperBaseConstructor, options: {[key: string]: any} = {}) { + Object.setPrototypeOf(helperCls.prototype, this); + const newHelper = new helperCls(this.logger, options); + + newHelper.api = this.api; + newHelper.network = this.network; + newHelper.forceNetwork = this.forceNetwork; + + this.children.push(newHelper); + + return newHelper; + } + + getApi(): ApiPromise { + if(this.api === null) throw Error('API not initialized'); + return this.api; + } + + clearChainLog(): void { + this.chainLog = []; + } + + forceNetwork(value: TNetworks): void { + this.forcedNetwork = value; + } + + async connect(wsEndpoint: string, listeners?: IApiListeners) { + if (this.api !== null) throw Error('Already connected'); + const {api, network} = await ChainHelperBase.createConnection(wsEndpoint, listeners, this.forcedNetwork); + this.api = api; + this.network = network; + } + + async disconnect() { + for (const child of this.children) { + child.clearApi(); + } + + if (this.api === null) return; + await this.api.disconnect(); + this.clearApi(); + } + + clearApi() { + this.api = null; + this.network = null; + } + + static async detectNetwork(api: ApiPromise): Promise { + const spec = (await api.query.system.lastRuntimeUpgrade()).toJSON() as any; + const xcmChains = ['rococo', 'westend', 'westmint', 'acala', 'karura', 'moonbeam', 'moonriver']; + + if(xcmChains.indexOf(spec.specName) > -1) return spec.specName; + + if(['quartz', 'unique'].indexOf(spec.specName) > -1) return spec.specName; + return 'opal'; + } + + static async detectNetworkByWsEndpoint(wsEndpoint: string): Promise { + const api = new ApiPromise({provider: new WsProvider(wsEndpoint)}); + await api.isReady; + + const network = await this.detectNetwork(api); + + await api.disconnect(); + + return network; + } + + static async createConnection(wsEndpoint: string, listeners?: IApiListeners, network?: TNetworks | null): Promise<{ + api: ApiPromise; + network: TNetworks; + }> { + if(typeof network === 'undefined' || network === null) network = 'opal'; + const supportedRPC = { + opal: { + unique: require('@unique-nft/opal-testnet-types/definitions').unique.rpc, + }, + quartz: { + unique: require('@unique-nft/quartz-mainnet-types/definitions').unique.rpc, + }, + unique: { + unique: require('@unique-nft/unique-mainnet-types/definitions').unique.rpc, + }, + rococo: {}, + westend: {}, + moonbeam: {}, + moonriver: {}, + acala: {}, + karura: {}, + westmint: {}, + }; + if(!supportedRPC.hasOwnProperty(network)) network = await this.detectNetworkByWsEndpoint(wsEndpoint); + const rpc = supportedRPC[network]; + + // TODO: investigate how to replace rpc in runtime + // api._rpcCore.addUserInterfaces(rpc); + + const api = new ApiPromise({provider: new WsProvider(wsEndpoint), rpc}); + + await api.isReadyOrError; + + if (typeof listeners === 'undefined') listeners = {}; + for (const event of ['connected', 'disconnected', 'error', 'ready', 'decorated']) { + if (!listeners.hasOwnProperty(event) || typeof listeners[event as TApiAllowedListeners] === 'undefined') continue; + api.on(event as ApiInterfaceEvents, listeners[event as TApiAllowedListeners] as (...args: any[]) => any); + } + + return {api, network}; + } + + getTransactionStatus(data: {events: {event: IEvent}[], status: any}) { + const {events, status} = data; + if (status.isReady) { + return this.transactionStatus.NOT_READY; + } + if (status.isBroadcast) { + return this.transactionStatus.NOT_READY; + } + if (status.isInBlock || status.isFinalized) { + const errors = events.filter(e => e.event.method === 'ExtrinsicFailed'); + if (errors.length > 0) { + return this.transactionStatus.FAIL; + } + if (events.filter(e => e.event.method === 'ExtrinsicSuccess').length > 0) { + return this.transactionStatus.SUCCESS; + } + } + + return this.transactionStatus.FAIL; + } + + signTransaction(sender: TSigner, transaction: any, options: Partial | null = null, label = 'transaction') { + const sign = (callback: any) => { + if(options !== null) return transaction.signAndSend(sender, options, callback); + return transaction.signAndSend(sender, callback); + }; + // eslint-disable-next-line no-async-promise-executor + return new Promise(async (resolve, reject) => { + try { + const unsub = await sign((result: any) => { + const status = this.getTransactionStatus(result); + + if (status === this.transactionStatus.SUCCESS) { + this.logger.log(`${label} successful`); + unsub(); + resolve({result, status}); + } else if (status === this.transactionStatus.FAIL) { + let moduleError = null; + + if (result.hasOwnProperty('dispatchError')) { + const dispatchError = result['dispatchError']; + + if (dispatchError) { + if (dispatchError.isModule) { + const modErr = dispatchError.asModule; + const errorMeta = dispatchError.registry.findMetaError(modErr); + + moduleError = `${errorMeta.section}.${errorMeta.name}`; + } else { + moduleError = dispatchError.toHuman(); + } + } else { + this.logger.log(result, this.logger.level.ERROR); + } + } + + this.logger.log(`Something went wrong with ${label}. Status: ${status}`, this.logger.level.ERROR); + unsub(); + reject({status, moduleError, result}); + } + }); + } catch (e) { + this.logger.log(e, this.logger.level.ERROR); + reject(e); + } + }); + } + + constructApiCall(apiCall: string, params: any[]) { + if(!apiCall.startsWith('api.')) throw Error(`Invalid api call: ${apiCall}`); + let call = this.getApi() as any; + for(const part of apiCall.slice(4).split('.')) { + call = call[part]; + } + return call(...params); + } + + async executeExtrinsic(sender: TSigner, extrinsic: string, params: any[], expectSuccess=true, options: Partial|null = null/*, failureMessage='expected success'*/) { + if(this.api === null) throw Error('API not initialized'); + if(!extrinsic.startsWith('api.tx.')) throw Error(`${extrinsic} is not transaction`); + + const startTime = (new Date()).getTime(); + let result: ITransactionResult; + let events: IEvent[] = []; + try { + result = await this.signTransaction(sender, this.constructApiCall(extrinsic, params), options, extrinsic) as ITransactionResult; + events = this.eventHelper.extractEvents(result.result.events); + } + catch(e) { + if(!(e as object).hasOwnProperty('status')) throw e; + result = e as ITransactionResult; + } + + const endTime = (new Date()).getTime(); + + const log = { + executedAt: endTime, + executionTime: endTime - startTime, + type: this.chainLogType.EXTRINSIC, + status: result.status, + call: extrinsic, + signer: this.getSignerAddress(sender), + params, + } as IUniqueHelperLog; + + if(result.status !== this.transactionStatus.SUCCESS) { + if (result.moduleError) log.moduleError = result.moduleError; + else if (result.result.dispatchError) log.dispatchError = result.result.dispatchError; + } + if(events.length > 0) log.events = events; + + this.chainLog.push(log); + + if(expectSuccess && result.status !== this.transactionStatus.SUCCESS) { + if (result.moduleError) throw Error(`${result.moduleError}`); + else if (result.result.dispatchError) throw Error(JSON.stringify(result.result.dispatchError)); + } + return result; + } + + async callRpc(rpc: string, params?: any[]) { + if(typeof params === 'undefined') params = []; + if(this.api === null) throw Error('API not initialized'); + if(!rpc.startsWith('api.rpc.') && !rpc.startsWith('api.query.')) throw Error(`${rpc} is not RPC call`); + + const startTime = (new Date()).getTime(); + let result; + let error = null; + const log = { + type: this.chainLogType.RPC, + call: rpc, + params, + } as IUniqueHelperLog; + + try { + result = await this.constructApiCall(rpc, params); + } + catch(e) { + error = e; + } + + const endTime = (new Date()).getTime(); + + log.executedAt = endTime; + log.status = (error === null ? this.transactionStatus.SUCCESS : this.transactionStatus.FAIL) as 'Fail' | 'Success'; + log.executionTime = endTime - startTime; + + this.chainLog.push(log); + + if(error !== null) throw error; + + return result; + } + + getSignerAddress(signer: IKeyringPair | string): string { + if(typeof signer === 'string') return signer; + return signer.address; + } + + fetchAllPalletNames(): string[] { + if(this.api === null) throw Error('API not initialized'); + return this.api.runtimeMetadata.asLatest.pallets.map(m => m.name.toString().toLowerCase()); + } + + fetchMissingPalletNames(requiredPallets: string[]): string[] { + const palletNames = this.fetchAllPalletNames(); + return requiredPallets.filter(p => !palletNames.includes(p)); + } +} + + +class HelperGroup { + helper: T; + + constructor(uniqueHelper: T) { + this.helper = uniqueHelper; + } +} + + +class CollectionGroup extends HelperGroup { + /** + * Get number of blocks when sponsored transaction is available. + * + * @param collectionId ID of collection + * @param tokenId ID of token + * @param addressObj address for which the sponsorship is checked + * @example await getTokenNextSponsored(1, 2, {Substrate: '5DfhbVfww7ThF8q6f3...'}); + * @returns number of blocks or null if sponsorship hasn't been set + */ + async getTokenNextSponsored(collectionId: number, tokenId: number, addressObj: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.nextSponsored', [collectionId, addressObj, tokenId])).toJSON(); + } + + /** + * Get the number of created collections. + * + * @returns number of created collections + */ + async getTotalCount(): Promise { + return (await this.helper.callRpc('api.rpc.unique.collectionStats')).created.toNumber(); + } + + /** + * Get information about the collection with additional data, + * including the number of tokens it contains, its administrators, + * the normalized address of the collection's owner, and decoded name and description. + * + * @param collectionId ID of collection + * @example await getData(2) + * @returns collection information object + */ + async getData(collectionId: number): Promise<{ + id: number; + name: string; + description: string; + tokensCount: number; + admins: CrossAccountId[]; + normalizedOwner: TSubstrateAccount; + raw: any + } | null> { + const collection = await this.helper.callRpc('api.rpc.unique.collectionById', [collectionId]); + const humanCollection = collection.toHuman(), collectionData = { + id: collectionId, name: null, description: null, tokensCount: 0, admins: [], + raw: humanCollection, + } as any, jsonCollection = collection.toJSON(); + if (humanCollection === null) return null; + collectionData.raw.limits = jsonCollection.limits; + collectionData.raw.permissions = jsonCollection.permissions; + collectionData.normalizedOwner = this.helper.address.normalizeSubstrate(collectionData.raw.owner); + for (const key of ['name', 'description']) { + collectionData[key] = this.helper.util.vec2str(humanCollection[key]); + } + + collectionData.tokensCount = (['RFT', 'NFT'].includes(humanCollection.mode)) + ? await this.helper[humanCollection.mode.toLocaleLowerCase() as 'nft' | 'rft'].getLastTokenId(collectionId) + : 0; + collectionData.admins = await this.getAdmins(collectionId); + + return collectionData; + } + + /** + * Get the addresses of the collection's administrators, optionally normalized. + * + * @param collectionId ID of collection + * @param normalize whether to normalize the addresses to the default ss58 format + * @example await getAdmins(1) + * @returns array of administrators + */ + async getAdmins(collectionId: number, normalize = false): Promise { + const admins = (await this.helper.callRpc('api.rpc.unique.adminlist', [collectionId])).toHuman(); + + return normalize + ? admins.map((address: CrossAccountId) => address.withNormalizedSubstrate()) + : admins; + } + + /** + * Get the addresses added to the collection allow-list, optionally normalized. + * @param collectionId ID of collection + * @param normalize whether to normalize the addresses to the default ss58 format + * @example await getAllowList(1) + * @returns array of allow-listed addresses + */ + async getAllowList(collectionId: number, normalize = false): Promise { + const allowListed = (await this.helper.callRpc('api.rpc.unique.allowlist', [collectionId])).toHuman(); + return normalize + ? allowListed.map((address: CrossAccountId) => address.withNormalizedSubstrate()) + : allowListed; + } + + /** + * Get the effective limits of the collection instead of null for default values + * + * @param collectionId ID of collection + * @example await getEffectiveLimits(2) + * @returns object of collection limits + */ + async getEffectiveLimits(collectionId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.effectiveCollectionLimits', [collectionId])).toJSON(); + } + + /** + * Burns the collection if the signer has sufficient permissions and collection is empty. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @example await helper.collection.burn(aliceKeyring, 3); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async burn(signer: TSigner, collectionId: number): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.destroyCollection', [collectionId], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'CollectionDestroyed'); + } + + /** + * Sets the sponsor for the collection (Requires the Substrate address). Needs confirmation by the sponsor. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param sponsorAddress Sponsor substrate address + * @example setSponsor(aliceKeyring, 10, "5DyN4Y92vZCjv38fg...") + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async setSponsor(signer: TSigner, collectionId: number, sponsorAddress: TSubstrateAccount): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setCollectionSponsor', [collectionId, sponsorAddress], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionSponsorSet'); + } + + /** + * Confirms consent to sponsor the collection on behalf of the signer. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @example confirmSponsorship(aliceKeyring, 10) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async confirmSponsorship(signer: TSigner, collectionId: number): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.confirmSponsorship', [collectionId], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'SponsorshipConfirmed'); + } + + /** + * Removes the sponsor of a collection, regardless if it consented or not. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @example removeSponsor(aliceKeyring, 10) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async removeSponsor(signer: TSigner, collectionId: number): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.removeCollectionSponsor', [collectionId], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionSponsorRemoved'); + } + + /** + * Sets the limits of the collection. At least one limit must be specified for a correct call. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param limits collection limits object + * @example + * await setLimits( + * aliceKeyring, + * 10, + * { + * sponsorTransferTimeout: 0, + * ownerCanDestroy: false + * } + * ) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async setLimits(signer: TSigner, collectionId: number, limits: ICollectionLimits): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setCollectionLimits', [collectionId, limits], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionLimitSet'); + } + + /** + * Changes the owner of the collection to the new Substrate address. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param ownerAddress substrate address of new owner + * @example changeOwner(aliceKeyring, 10, "5DyN4Y92vZCjv38fg...") + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async changeOwner(signer: TSigner, collectionId: number, ownerAddress: TSubstrateAccount): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.changeCollectionOwner', [collectionId, ownerAddress], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionOwnedChanged'); + } + + /** + * Adds a collection administrator. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param adminAddressObj Administrator address (substrate or ethereum) + * @example addAdmin(aliceKeyring, 10, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async addAdmin(signer: TSigner, collectionId: number, adminAddressObj: ICrossAccountId): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.addCollectionAdmin', [collectionId, adminAddressObj], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionAdminAdded'); + } + + /** + * Removes a collection administrator. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param adminAddressObj Administrator address (substrate or ethereum) + * @example removeAdmin(aliceKeyring, 10, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async removeAdmin(signer: TSigner, collectionId: number, adminAddressObj: ICrossAccountId): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.removeCollectionAdmin', [collectionId, adminAddressObj], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionAdminRemoved'); + } + + /** + * Check if user is in allow list. + * + * @param collectionId ID of collection + * @param user Account to check + * @example await getAdmins(1) + * @returns is user in allow list + */ + async allowed(collectionId: number, user: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.allowed', [collectionId, user])).toJSON(); + } + + /** + * Adds an address to allow list + * @param signer keyring of signer + * @param collectionId ID of collection + * @param addressObj address to add to the allow list + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async addToAllowList(signer: TSigner, collectionId: number, addressObj: ICrossAccountId): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.addToAllowList', [collectionId, addressObj], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'AllowListAddressAdded'); + } + + /** + * Removes an address from allow list + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param addressObj address to remove from the allow list + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async removeFromAllowList(signer: TSigner, collectionId: number, addressObj: ICrossAccountId): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.removeFromAllowList', [collectionId, addressObj], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'AllowListAddressRemoved'); + } + + /** + * Sets onchain permissions for selected collection. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param permissions collection permissions object + * @example setPermissions(aliceKeyring, 10, {access:'AllowList', mintMode: true, nesting: {collectionAdmin: true, tokenOwner: true}}); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async setPermissions(signer: TSigner, collectionId: number, permissions: ICollectionPermissions): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setCollectionPermissions', [collectionId, permissions], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'unique', 'CollectionPermissionSet'); + } + + /** + * Enables nesting for selected collection. If `restricted` set, you can nest only tokens from specified collections. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param permissions nesting permissions object + * @example enableNesting(aliceKeyring, 10, {collectionAdmin: true, tokenOwner: true}); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async enableNesting(signer: TSigner, collectionId: number, permissions: INestingPermissions): Promise { + return await this.setPermissions(signer, collectionId, {nesting: permissions}); + } + + /** + * Disables nesting for selected collection. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @example disableNesting(aliceKeyring, 10); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async disableNesting(signer: TSigner, collectionId: number): Promise { + return await this.setPermissions(signer, collectionId, {nesting: {tokenOwner: false, collectionAdmin: false}}); + } + + /** + * Sets onchain properties to the collection. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param properties array of property objects + * @example setProperties(aliceKeyring, 10, [{key: "gender", value: "male"}]); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async setProperties(signer: TSigner, collectionId: number, properties: IProperty[]): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setCollectionProperties', [collectionId, properties], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'CollectionPropertySet'); + } + + /** + * Get collection properties. + * + * @param collectionId ID of collection + * @param propertyKeys optionally filter the returned properties to only these keys + * @example getProperties(1219, ['location', 'date', 'time', 'isParadise']); + * @returns array of key-value pairs + */ + async getProperties(collectionId: number, propertyKeys?: string[] | null): Promise { + return (await this.helper.callRpc('api.rpc.unique.collectionProperties', [collectionId, propertyKeys])).toHuman(); + } + + async getCollectionOptions(collectionId: number) { + return (await this.helper.callRpc('api.rpc.unique.collectionById', [collectionId])).toHuman(); + } + + /** + * Deletes onchain properties from the collection. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param propertyKeys array of property keys to delete + * @example deleteProperties(aliceKeyring, 10, ["gender", "age"]); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async deleteProperties(signer: TSigner, collectionId: number, propertyKeys: string[]): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.deleteCollectionProperties', [collectionId, propertyKeys], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'CollectionPropertyDeleted'); + } + + /** + * Changes the owner of the token. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param addressObj address of a new owner + * @param amount amount of tokens to be transfered. For NFT must be set to 1n + * @example transferToken(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns true if the token success, otherwise false + */ + async transferToken(signer: TSigner, collectionId: number, tokenId: number, addressObj: ICrossAccountId, amount=1n): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.transfer', [addressObj, collectionId, tokenId, amount], + true, // `Unable to transfer token #${tokenId} from collection #${collectionId}`, + ); + + return this.helper.util.isTokenTransferSuccess(result.result.events, collectionId, tokenId, {Substrate: typeof signer === 'string' ? signer : signer.address}, addressObj, amount); + } + + /** + * + * Change ownership of a token(s) on behalf of the owner. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param fromAddressObj address on behalf of which the token will be sent + * @param toAddressObj new token owner + * @param amount amount of tokens to be transfered. For NFT must be set to 1n + * @example transferTokenFrom(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg"}, {Ethereum: "0x9F0583DbB85..."}) + * @returns true if the token success, otherwise false + */ + async transferTokenFrom(signer: TSigner, collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.transferFrom', [fromAddressObj, toAddressObj, collectionId, tokenId, amount], + true, // `Unable to transfer token #${tokenId} from collection #${collectionId}`, + ); + return this.helper.util.isTokenTransferSuccess(result.result.events, collectionId, tokenId, fromAddressObj, toAddressObj, amount); + } + + /** + * + * Destroys a concrete instance of NFT/RFT or burns a specified amount of fungible tokens. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param amount amount of tokens to be burned. For NFT must be set to 1n + * @example burnToken(aliceKeyring, 10, 5); + * @returns ```true``` if the extrinsic is successful, otherwise ```false``` + */ + async burnToken(signer: TSigner, collectionId: number, tokenId: number, amount=1n): Promise { + const burnResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.burnItem', [collectionId, tokenId, amount], + true, // `Unable to burn token for ${label}`, + ); + const burnedTokens = this.helper.util.extractTokensFromBurnResult(burnResult); + if (burnedTokens.tokens.length > 1) throw Error('Burned multiple tokens'); + return burnedTokens.success; + } + + /** + * Destroys a concrete instance of NFT on behalf of the owner + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param fromAddressObj address on behalf of which the token will be burnt + * @param amount amount of tokens to be burned. For NFT must be set to 1n + * @example burnTokenFrom(aliceKeyring, 10, {Substrate: "5DyN4Y92vZCjv38fg..."}, 5, {Ethereum: "0x9F0583DbB85..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async burnTokenFrom(signer: TSigner, collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, amount=1n): Promise { + const burnResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.burnFrom', [collectionId, fromAddressObj, tokenId, amount], + true, // `Unable to burn token from for ${label}`, + ); + const burnedTokens = this.helper.util.extractTokensFromBurnResult(burnResult); + return burnedTokens.success && burnedTokens.tokens.length > 0; + } + + /** + * Set, change, or remove approved address to transfer the ownership of the NFT. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param toAddressObj Substrate or Ethereum address which gets approved use of the signer's tokens + * @param amount amount of token to be approved. For NFT must be set to 1n + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async approveToken(signer: IKeyringPair, collectionId: number, tokenId: number, toAddressObj: ICrossAccountId, amount=1n) { + const approveResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.approve', [toAddressObj, collectionId, tokenId, amount], + true, // `Unable to approve token for ${label}`, + ); + + return this.helper.util.findCollectionInEvents(approveResult.result.events, collectionId, 'common', 'Approved'); + } + + /** + * Get the amount of token pieces approved to transfer or burn. Normally 0. + * + * @param collectionId ID of collection + * @param tokenId ID of token + * @param toAccountObj address which is approved to use token pieces + * @param fromAccountObj address which may have allowed the use of its owned tokens + * @example getTokenApprovedPieces(10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}, {Substrate: "5ERZNF88Mm7UGfPP3mdG..."}) + * @returns number of approved to transfer pieces + */ + async getTokenApprovedPieces(collectionId: number, tokenId: number, toAccountObj: ICrossAccountId, fromAccountObj: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.allowance', [collectionId, fromAccountObj, toAccountObj, tokenId])).toBigInt(); + } + + /** + * Get the last created token ID in a collection + * + * @param collectionId ID of collection + * @example getLastTokenId(10); + * @returns id of the last created token + */ + async getLastTokenId(collectionId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.lastTokenId', [collectionId])).toNumber(); + } + + /** + * Check if token exists + * + * @param collectionId ID of collection + * @param tokenId ID of token + * @example doesTokenExist(10, 20); + * @returns true if the token exists, otherwise false + */ + async doesTokenExist(collectionId: number, tokenId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.tokenExists', [collectionId, tokenId])).toJSON(); + } +} + +class NFTnRFT extends CollectionGroup { + /** + * Get tokens owned by account + * + * @param collectionId ID of collection + * @param addressObj tokens owner + * @example getTokensByAddress(10, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns array of token ids owned by account + */ + async getTokensByAddress(collectionId: number, addressObj: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.accountTokens', [collectionId, addressObj])).toJSON(); + } + + /** + * Get token data + * + * @param collectionId ID of collection + * @param tokenId ID of token + * @param propertyKeys optionally filter the token properties to only these keys + * @param blockHashAt optionally query the data at some block with this hash + * @example getToken(10, 5); + * @returns human readable token data + */ + async getToken(collectionId: number, tokenId: number, propertyKeys: string[] = [], blockHashAt?: string): Promise<{ + properties: IProperty[]; + owner: CrossAccountId; + normalizedOwner: CrossAccountId; + }| null> { + let tokenData; + if(typeof blockHashAt === 'undefined') { + tokenData = await this.helper.callRpc('api.rpc.unique.tokenData', [collectionId, tokenId]); + } + else { + if(propertyKeys.length == 0) { + const collection = (await this.helper.callRpc('api.rpc.unique.collectionById', [collectionId])).toHuman(); + if(!collection) return null; + propertyKeys = collection.tokenPropertyPermissions.map((x: ITokenPropertyPermission) => x.key); + } + tokenData = await this.helper.callRpc('api.rpc.unique.tokenData', [collectionId, tokenId, propertyKeys, blockHashAt]); + } + tokenData = tokenData.toHuman(); + if (tokenData === null || tokenData.owner === null) return null; + const owner = {} as any; + for (const key of Object.keys(tokenData.owner)) { + owner[key.toLocaleLowerCase()] = key.toLocaleLowerCase() == 'substrate' + ? CrossAccountId.normalizeSubstrateAddress(tokenData.owner[key]) + : tokenData.owner[key]; + } + tokenData.normalizedOwner = CrossAccountId.fromLowerCaseKeys(owner); + return tokenData; + } + + /** + * Set permissions to change token properties + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param permissions permissions to change a property by the collection admin or token owner + * @example setTokenPropertyPermissions( + * aliceKeyring, 10, [{key: "gender", permission: {tokenOwner: true, mutable: true, collectionAdmin: true}}] + * ) + * @returns true if extrinsic success otherwise false + */ + async setTokenPropertyPermissions(signer: TSigner, collectionId: number, permissions: ITokenPropertyPermission[]): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setTokenPropertyPermissions', [collectionId, permissions], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'PropertyPermissionSet'); + } + + /** + * Get token property permissions. + * + * @param collectionId ID of collection + * @param propertyKeys optionally filter the returned property permissions to only these keys + * @example getPropertyPermissions(1219, ['location', 'date', 'time', 'isParadise']); + * @returns array of key-permission pairs + */ + async getPropertyPermissions(collectionId: number, propertyKeys: string[] | null = null): Promise { + return (await this.helper.callRpc('api.rpc.unique.propertyPermissions', [collectionId, ...(propertyKeys === null ? [] : [propertyKeys])])).toHuman(); + } + + /** + * Set token properties + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param properties key-value pairs of metadata which to add to a token. Keys must be permitted in the collection + * @example setTokenProperties(aliceKeyring, 10, 5, [{key: "gender", value: "female"}, {key: "age", value: "23"}]) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async setTokenProperties(signer: TSigner, collectionId: number, tokenId: number, properties: IProperty[]): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.setTokenProperties', [collectionId, tokenId, properties], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'TokenPropertySet'); + } + + /** + * Get properties, metadata assigned to a token. + * + * @param collectionId ID of collection + * @param tokenId ID of token + * @param propertyKeys optionally filter the returned properties to only these keys + * @example getTokenProperties(1219, ['location', 'date', 'time', 'isParadise']); + * @returns array of key-value pairs + */ + async getTokenProperties(collectionId: number, tokenId: number, propertyKeys?: string[] | null): Promise { + return (await this.helper.callRpc('api.rpc.unique.tokenProperties', [collectionId, tokenId, propertyKeys])).toHuman(); + } + + /** + * Delete the provided properties of a token + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param propertyKeys property keys to be deleted + * @example deleteTokenProperties(aliceKeyring, 10, 5, ["gender", "age"]) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async deleteTokenProperties(signer: TSigner, collectionId: number, tokenId: number, propertyKeys: string[]): Promise { + const result = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.deleteTokenProperties', [collectionId, tokenId, propertyKeys], + true, + ); + + return this.helper.util.findCollectionInEvents(result.result.events, collectionId, 'common', 'TokenPropertyDeleted'); + } + + /** + * Mint new collection + * + * @param signer keyring of signer + * @param collectionOptions basic collection options and properties + * @param mode NFT or RFT type of a collection + * @example mintCollection(aliceKeyring, {name: 'New', description: "New collection", tokenPrefix: "NEW"}, "NFT") + * @returns object of the created collection + */ + async mintCollection(signer: TSigner, collectionOptions: ICollectionCreationOptions, mode: 'NFT' | 'RFT'): Promise { + collectionOptions = JSON.parse(JSON.stringify(collectionOptions)) as ICollectionCreationOptions; // Clone object + collectionOptions.mode = (mode === 'NFT') ? {nft: null} : {refungible: null}; + for (const key of ['name', 'description', 'tokenPrefix']) { + if (typeof collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] === 'string') collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] = this.helper.util.str2vec(collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] as string); + } + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createCollectionEx', [collectionOptions], + true, // errorLabel, + ); + return this.getCollectionObject(this.helper.util.extractCollectionIdFromCreationResult(creationResult)); + } + + getCollectionObject(_collectionId: number): any { + return null; + } + + getTokenObject(_collectionId: number, _tokenId: number): any { + return null; + } +} + + +class NFTGroup extends NFTnRFT { + /** + * Get collection object + * @param collectionId ID of collection + * @example getCollectionObject(2); + * @returns instance of UniqueNFTCollection + */ + getCollectionObject(collectionId: number): UniqueNFTCollection { + return new UniqueNFTCollection(collectionId, this.helper); + } + + /** + * Get token object + * @param collectionId ID of collection + * @param tokenId ID of token + * @example getTokenObject(10, 5); + * @returns instance of UniqueNFTToken + */ + getTokenObject(collectionId: number, tokenId: number): UniqueNFToken { + return new UniqueNFToken(tokenId, this.getCollectionObject(collectionId)); + } + + /** + * Get token's owner + * @param collectionId ID of collection + * @param tokenId ID of token + * @param blockHashAt optionally query the data at the block with this hash + * @example getTokenOwner(10, 5); + * @returns Address in CrossAccountId format, e.g. {Substrate: "5DnSF6RRjwteE3BrCj..."} + */ + async getTokenOwner(collectionId: number, tokenId: number, blockHashAt?: string): Promise { + let owner; + if (typeof blockHashAt === 'undefined') { + owner = await this.helper.callRpc('api.rpc.unique.tokenOwner', [collectionId, tokenId]); + } else { + owner = await this.helper.callRpc('api.rpc.unique.tokenOwner', [collectionId, tokenId, blockHashAt]); + } + return CrossAccountId.fromLowerCaseKeys(owner.toJSON()); + } + + /** + * Is token approved to transfer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param toAccountObj address to be approved + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async isTokenApproved(collectionId: number, tokenId: number, toAccountObj: ICrossAccountId): Promise { + return (await this.getTokenApprovedPieces(collectionId, tokenId, toAccountObj, await this.getTokenOwner(collectionId, tokenId))) === 1n; + } + + /** + * Changes the owner of the token. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param addressObj address of a new owner + * @example transferToken(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferToken(signer: TSigner, collectionId: number, tokenId: number, addressObj: ICrossAccountId): Promise { + return await super.transferToken(signer, collectionId, tokenId, addressObj, 1n); + } + + /** + * + * Change ownership of a NFT on behalf of the owner. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param fromAddressObj address on behalf of which the token will be sent + * @param toAddressObj new token owner + * @example transferTokenFrom(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}, {Ethereum: "0x9F0583DbB85..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferTokenFrom(signer: TSigner, collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId): Promise { + return await super.transferTokenFrom(signer, collectionId, tokenId, fromAddressObj, toAddressObj, 1n); + } + + /** + * Recursively find the address that owns the token + * @param collectionId ID of collection + * @param tokenId ID of token + * @param blockHashAt + * @example getTokenTopmostOwner(10, 5); + * @returns address in CrossAccountId format, e.g. {Substrate: "5DyN4Y92vZCjv38fg..."} + */ + async getTokenTopmostOwner(collectionId: number, tokenId: number, blockHashAt?: string): Promise { + let owner; + if (typeof blockHashAt === 'undefined') { + owner = await this.helper.callRpc('api.rpc.unique.topmostTokenOwner', [collectionId, tokenId]); + } else { + owner = await this.helper.callRpc('api.rpc.unique.topmostTokenOwner', [collectionId, tokenId, blockHashAt]); + } + + if (owner === null) return null; + + return owner.toHuman(); + } + + /** + * Get tokens nested in the provided token + * @param collectionId ID of collection + * @param tokenId ID of token + * @param blockHashAt optionally query the data at the block with this hash + * @example getTokenChildren(10, 5); + * @returns tokens whose depth of nesting is <= 5 + */ + async getTokenChildren(collectionId: number, tokenId: number, blockHashAt?: string): Promise { + let children; + if(typeof blockHashAt === 'undefined') { + children = await this.helper.callRpc('api.rpc.unique.tokenChildren', [collectionId, tokenId]); + } else { + children = await this.helper.callRpc('api.rpc.unique.tokenChildren', [collectionId, tokenId, blockHashAt]); + } + + return children.toJSON().map((x: any) => { + return {collectionId: x.collection, tokenId: x.token}; + }); + } + + /** + * Nest one token into another + * @param signer keyring of signer + * @param tokenObj token to be nested + * @param rootTokenObj token to be parent + * @example nestToken(aliceKeyring, {collectionId: 10, tokenId: 5}, {collectionId: 10, tokenId: 4}); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async nestToken(signer: TSigner, tokenObj: IToken, rootTokenObj: IToken): Promise { + const rootTokenAddress = this.helper.util.getTokenAccount(rootTokenObj); + const result = await this.transferToken(signer, tokenObj.collectionId, tokenObj.tokenId, rootTokenAddress); + if(!result) { + throw Error('Unable to nest token!'); + } + return result; + } + + /** + * Remove token from nested state + * @param signer keyring of signer + * @param tokenObj token to unnest + * @param rootTokenObj parent of a token + * @param toAddressObj address of a new token owner + * @example unnestToken(aliceKeyring, {collectionId: 10, tokenId: 5}, {collectionId: 10, tokenId: 4}, {Substrate: "5DyN4Y92vZCjv38fg..."}); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async unnestToken(signer: TSigner, tokenObj: IToken, rootTokenObj: IToken, toAddressObj: ICrossAccountId): Promise { + const rootTokenAddress = this.helper.util.getTokenAccount(rootTokenObj); + const result = await this.transferTokenFrom(signer, tokenObj.collectionId, tokenObj.tokenId, rootTokenAddress, toAddressObj); + if(!result) { + throw Error('Unable to unnest token!'); + } + return result; + } + + /** + * Mint new collection + * @param signer keyring of signer + * @param collectionOptions Collection options + * @example + * mintCollection(aliceKeyring, { + * name: 'New', + * description: 'New collection', + * tokenPrefix: 'NEW', + * }) + * @returns object of the created collection + */ + async mintCollection(signer: TSigner, collectionOptions: ICollectionCreationOptions = {}): Promise { + return await super.mintCollection(signer, collectionOptions, 'NFT') as UniqueNFTCollection; + } + + /** + * Mint new token + * @param signer keyring of signer + * @param data token data + * @returns created token object + */ + async mintToken(signer: TSigner, data: { collectionId: number; owner: ICrossAccountId | string; properties?: IProperty[]; }): Promise { + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createItem', [data.collectionId, (typeof data.owner === 'string') ? {Substrate: data.owner} : data.owner, { + nft: { + properties: data.properties, + }, + }], + true, + ); + const createdTokens = this.helper.util.extractTokensFromCreationResult(creationResult); + if (createdTokens.tokens.length > 1) throw Error('Minted multiple tokens'); + if (createdTokens.tokens.length < 1) throw Error('No tokens minted'); + return this.getTokenObject(data.collectionId, createdTokens.tokens[0].tokenId); + } + + /** + * Mint multiple NFT tokens + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokens array of tokens with owner and properties + * @example + * mintMultipleTokens(aliceKeyring, 10, [{ + * owner: {Substrate: "5DyN4Y92vZCjv38fg..."}, + * properties: [{key: "gender", value: "male"},{key: "age", value: "45"}], + * },{ + * owner: {Ethereum: "0x9F0583DbB855d..."}, + * properties: [{key: "gender", value: "female"},{key: "age", value: "22"}], + * }]); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async mintMultipleTokens(signer: TSigner, collectionId: number, tokens: {owner: ICrossAccountId, properties?: IProperty[]}[]): Promise { + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createMultipleItemsEx', [collectionId, {NFT: tokens}], + true, + ); + const collection = this.getCollectionObject(collectionId); + return this.helper.util.extractTokensFromCreationResult(creationResult).tokens.map((x: IToken) => collection.getTokenObject(x.tokenId)); + } + + /** + * Mint multiple NFT tokens with one owner + * @param signer keyring of signer + * @param collectionId ID of collection + * @param owner tokens owner + * @param tokens array of tokens with owner and properties + * @example + * mintMultipleTokensWithOneOwner(aliceKeyring, 10, "5DyN4Y92vZCjv38fg...", [{ + * properties: [{ + * key: "gender", + * value: "female", + * },{ + * key: "age", + * value: "33", + * }], + * }]); + * @returns array of newly created tokens + */ + async mintMultipleTokensWithOneOwner(signer: TSigner, collectionId: number, owner: ICrossAccountId, tokens: {properties?: IProperty[]}[]): Promise { + const rawTokens = []; + for (const token of tokens) { + const raw = {NFT: {properties: token.properties}}; + rawTokens.push(raw); + } + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createMultipleItems', [collectionId, owner, rawTokens], + true, + ); + const collection = this.getCollectionObject(collectionId); + return this.helper.util.extractTokensFromCreationResult(creationResult).tokens.map((x: IToken) => collection.getTokenObject(x.tokenId)); + } + + /** + * Set, change, or remove approved address to transfer the ownership of the NFT. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param toAddressObj address to approve + * @example approveToken(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async approveToken(signer: IKeyringPair, collectionId: number, tokenId: number, toAddressObj: ICrossAccountId) { + return super.approveToken(signer, collectionId, tokenId, toAddressObj, 1n); + } +} + + +class RFTGroup extends NFTnRFT { + /** + * Get collection object + * @param collectionId ID of collection + * @example getCollectionObject(2); + * @returns instance of UniqueRFTCollection + */ + getCollectionObject(collectionId: number): UniqueRFTCollection { + return new UniqueRFTCollection(collectionId, this.helper); + } + + /** + * Get token object + * @param collectionId ID of collection + * @param tokenId ID of token + * @example getTokenObject(10, 5); + * @returns instance of UniqueNFTToken + */ + getTokenObject(collectionId: number, tokenId: number): UniqueRFToken { + return new UniqueRFToken(tokenId, this.getCollectionObject(collectionId)); + } + + /** + * Get top 10 token owners with the largest number of pieces + * @param collectionId ID of collection + * @param tokenId ID of token + * @example getTokenTop10Owners(10, 5); + * @returns array of top 10 owners + */ + async getTokenTop10Owners(collectionId: number, tokenId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.tokenOwners', [collectionId, tokenId])).toJSON().map(CrossAccountId.fromLowerCaseKeys); + } + + /** + * Get number of pieces owned by address + * @param collectionId ID of collection + * @param tokenId ID of token + * @param addressObj address token owner + * @example getTokenBalance(10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}); + * @returns number of pieces ownerd by address + */ + async getTokenBalance(collectionId: number, tokenId: number, addressObj: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.balance', [collectionId, addressObj, tokenId])).toBigInt(); + } + + /** + * Transfer pieces of token to another address + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param addressObj address of a new owner + * @param amount number of pieces to be transfered + * @example transferTokenFrom(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}, 2000n) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferToken(signer: TSigner, collectionId: number, tokenId: number, addressObj: ICrossAccountId, amount=1n): Promise { + return await super.transferToken(signer, collectionId, tokenId, addressObj, amount); + } + + /** + * Change ownership of some pieces of RFT on behalf of the owner. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param fromAddressObj address on behalf of which the token will be sent + * @param toAddressObj new token owner + * @param amount number of pieces to be transfered + * @example transferTokenFrom(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}, {Substrate: "5DfhbVfww7ThF8q6f3i..."}, 2000n) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferTokenFrom(signer: TSigner, collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n): Promise { + return await super.transferTokenFrom(signer, collectionId, tokenId, fromAddressObj, toAddressObj, amount); + } + + /** + * Mint new collection + * @param signer keyring of signer + * @param collectionOptions Collection options + * @example + * mintCollection(aliceKeyring, { + * name: 'New', + * description: 'New collection', + * tokenPrefix: 'NEW', + * }) + * @returns object of the created collection + */ + async mintCollection(signer: TSigner, collectionOptions: ICollectionCreationOptions = {}): Promise { + return await super.mintCollection(signer, collectionOptions, 'RFT') as UniqueRFTCollection; + } + + /** + * Mint new token + * @param signer keyring of signer + * @param data token data + * @example mintToken(aliceKeyring, {collectionId: 10, owner: {Substrate: '5GHoZe9c73RYbVzq...'}, pieces: 10000n}); + * @returns created token object + */ + async mintToken(signer: TSigner, data: { collectionId: number; owner: ICrossAccountId | string; pieces: bigint; properties?: IProperty[]; }): Promise { + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createItem', [data.collectionId, (typeof data.owner === 'string') ? {Substrate: data.owner} : data.owner, { + refungible: { + pieces: data.pieces, + properties: data.properties, + }, + }], + true, + ); + const createdTokens = this.helper.util.extractTokensFromCreationResult(creationResult); + if (createdTokens.tokens.length > 1) throw Error('Minted multiple tokens'); + if (createdTokens.tokens.length < 1) throw Error('No tokens minted'); + return this.getTokenObject(data.collectionId, createdTokens.tokens[0].tokenId); + } + + async mintMultipleTokens(signer: TSigner, collectionId: number, tokens: {owner: ICrossAccountId, pieces: bigint, properties?: IProperty[]}[]): Promise { + throw Error('Not implemented'); + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createMultipleItemsEx', [collectionId, {RefungibleMultipleOwners: tokens}], + true, // `Unable to mint RFT tokens for ${label}`, + ); + const collection = this.getCollectionObject(collectionId); + return this.helper.util.extractTokensFromCreationResult(creationResult).tokens.map((x: IToken) => collection.getTokenObject(x.tokenId)); + } + + /** + * Mint multiple RFT tokens with one owner + * @param signer keyring of signer + * @param collectionId ID of collection + * @param owner tokens owner + * @param tokens array of tokens with properties and pieces + * @example mintMultipleTokensWithOneOwner(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq..."}, [{pieces: 100000n, properties: [{key: "gender", value: "male"}]}]); + * @returns array of newly created RFT tokens + */ + async mintMultipleTokensWithOneOwner(signer: TSigner, collectionId: number, owner: ICrossAccountId, tokens: {pieces: bigint, properties?: IProperty[]}[]): Promise { + const rawTokens = []; + for (const token of tokens) { + const raw = {ReFungible: {pieces: token.pieces, properties: token.properties}}; + rawTokens.push(raw); + } + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createMultipleItems', [collectionId, owner, rawTokens], + true, + ); + const collection = this.getCollectionObject(collectionId); + return this.helper.util.extractTokensFromCreationResult(creationResult).tokens.map((x: IToken) => collection.getTokenObject(x.tokenId)); + } + + /** + * Destroys a concrete instance of RFT. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param amount number of pieces to be burnt + * @example burnToken(aliceKeyring, 10, 5); + * @returns ```true``` if the extrinsic is successful, otherwise ```false``` + */ + async burnToken(signer: IKeyringPair, collectionId: number, tokenId: number, amount=1n): Promise { + return await super.burnToken(signer, collectionId, tokenId, amount); + } + + /** + * Destroys a concrete instance of RFT on behalf of the owner. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param fromAddressObj address on behalf of which the token will be burnt + * @param amount number of pieces to be burnt + * @example burnTokenFrom(aliceKeyring, 10, 5, {Substrate: "5DyN4Y92vZCjv38fg..."}, 2n) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async burnTokenFrom(signer: IKeyringPair, collectionId: number, tokenId: number, fromAddressObj: ICrossAccountId, amount=1n): Promise { + return await super.burnTokenFrom(signer, collectionId, tokenId, fromAddressObj, amount); + } + + /** + * Set, change, or remove approved address to transfer the ownership of the RFT. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param toAddressObj address to approve + * @param amount number of pieces to be approved + * @example approveToken(aliceKeyring, 10, 5, {Substrate: "5GHoZe9c73RYbVzq..."}, "", 10000n); + * @returns true if the token success, otherwise false + */ + async approveToken(signer: IKeyringPair, collectionId: number, tokenId: number, toAddressObj: ICrossAccountId, amount=1n) { + return super.approveToken(signer, collectionId, tokenId, toAddressObj, amount); + } + + /** + * Get total number of pieces + * @param collectionId ID of collection + * @param tokenId ID of token + * @example getTokenTotalPieces(10, 5); + * @returns number of pieces + */ + async getTokenTotalPieces(collectionId: number, tokenId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.totalPieces', [collectionId, tokenId])).unwrap().toBigInt(); + } + + /** + * Change number of token pieces. Signer must be the owner of all token pieces. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param tokenId ID of token + * @param amount new number of pieces + * @example repartitionToken(aliceKeyring, 10, 5, 12345n); + * @returns true if the repartion was success, otherwise false + */ + async repartitionToken(signer: TSigner, collectionId: number, tokenId: number, amount: bigint): Promise { + const currentAmount = await this.getTokenTotalPieces(collectionId, tokenId); + const repartitionResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.repartition', [collectionId, tokenId, amount], + true, + ); + if(currentAmount < amount) return this.helper.util.findCollectionInEvents(repartitionResult.result.events, collectionId, 'common', 'ItemCreated'); + return this.helper.util.findCollectionInEvents(repartitionResult.result.events, collectionId, 'common', 'ItemDestroyed'); + } +} + + +class FTGroup extends CollectionGroup { + /** + * Get collection object + * @param collectionId ID of collection + * @example getCollectionObject(2); + * @returns instance of UniqueFTCollection + */ + getCollectionObject(collectionId: number): UniqueFTCollection { + return new UniqueFTCollection(collectionId, this.helper); + } + + /** + * Mint new fungible collection + * @param signer keyring of signer + * @param collectionOptions Collection options + * @param decimalPoints number of token decimals + * @example + * mintCollection(aliceKeyring, { + * name: 'New', + * description: 'New collection', + * tokenPrefix: 'NEW', + * }, 18) + * @returns newly created fungible collection + */ + async mintCollection(signer: TSigner, collectionOptions: ICollectionCreationOptions = {}, decimalPoints = 0): Promise { + collectionOptions = JSON.parse(JSON.stringify(collectionOptions)) as ICollectionCreationOptions; // Clone object + if(collectionOptions.tokenPropertyPermissions) throw Error('Fungible collections has no tokenPropertyPermissions'); + collectionOptions.mode = {fungible: decimalPoints}; + for (const key of ['name', 'description', 'tokenPrefix']) { + if (typeof collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] === 'string') collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] = this.helper.util.str2vec(collectionOptions[key as 'name' | 'description' | 'tokenPrefix'] as string); + } + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createCollectionEx', [collectionOptions], + true, + ); + return this.getCollectionObject(this.helper.util.extractCollectionIdFromCreationResult(creationResult)); + } + + /** + * Mint tokens + * @param signer keyring of signer + * @param collectionId ID of collection + * @param owner address owner of new tokens + * @param amount amount of tokens to be meanted + * @example mintTokens(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq"}, 1000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async mintTokens(signer: TSigner, collectionId: number, amount: bigint, owner: ICrossAccountId | string): Promise { + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createItem', [collectionId, (typeof owner === 'string') ? {Substrate: owner} : owner, { + fungible: { + value: amount, + }, + }], + true, // `Unable to mint fungible tokens for ${label}`, + ); + return this.helper.util.findCollectionInEvents(creationResult.result.events, collectionId, 'common', 'ItemCreated'); + } + + /** + * Mint multiple Fungible tokens with one owner + * @param signer keyring of signer + * @param collectionId ID of collection + * @param owner tokens owner + * @param tokens array of tokens with properties and pieces + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async mintMultipleTokensWithOneOwner(signer: TSigner, collectionId: number, tokens: {value: bigint}[], owner: ICrossAccountId): Promise { + const rawTokens = []; + for (const token of tokens) { + const raw = {Fungible: {Value: token.value}}; + rawTokens.push(raw); + } + const creationResult = await this.helper.executeExtrinsic( + signer, + 'api.tx.unique.createMultipleItems', [collectionId, owner, rawTokens], + true, + ); + return this.helper.util.findCollectionInEvents(creationResult.result.events, collectionId, 'common', 'ItemCreated'); + } + + /** + * Get the top 10 owners with the largest balance for the Fungible collection + * @param collectionId ID of collection + * @example getTop10Owners(10); + * @returns array of ```ICrossAccountId``` + */ + async getTop10Owners(collectionId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.tokenOwners', [collectionId, 0])).toJSON().map(CrossAccountId.fromLowerCaseKeys); + } + + /** + * Get account balance + * @param collectionId ID of collection + * @param addressObj address of owner + * @example getBalance(10, {Substrate: "5GHoZe9c73RYbVzq..."}) + * @returns amount of fungible tokens owned by address + */ + async getBalance(collectionId: number, addressObj: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.unique.balance', [collectionId, addressObj, 0])).toBigInt(); + } + + /** + * Transfer tokens to address + * @param signer keyring of signer + * @param collectionId ID of collection + * @param toAddressObj address recipient + * @param amount amount of tokens to be sent + * @example transfer(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq..."}, 1000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transfer(signer: TSigner, collectionId: number, toAddressObj: ICrossAccountId, amount=1n) { + return await super.transferToken(signer, collectionId, 0, toAddressObj, amount); + } + + /** + * Transfer some tokens on behalf of the owner. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param fromAddressObj address on behalf of which tokens will be sent + * @param toAddressObj address where token to be sent + * @param amount number of tokens to be sent + * @example transferFrom(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq..."}, {Substrate: "5DfhbVfww7ThF8q6f3ij..."}, 10000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferFrom(signer: TSigner, collectionId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n) { + return await super.transferTokenFrom(signer, collectionId, 0, fromAddressObj, toAddressObj, amount); + } + + /** + * Destroy some amount of tokens + * @param signer keyring of signer + * @param collectionId ID of collection + * @param amount amount of tokens to be destroyed + * @example burnTokens(aliceKeyring, 10, 1000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async burnTokens(signer: IKeyringPair, collectionId: number, amount=1n): Promise { + return await super.burnToken(signer, collectionId, 0, amount); + } + + /** + * Burn some tokens on behalf of the owner. + * @param signer keyring of signer + * @param collectionId ID of collection + * @param fromAddressObj address on behalf of which tokens will be burnt + * @param amount amount of tokens to be burnt + * @example burnTokensFrom(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq..."}, 1000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async burnTokensFrom(signer: IKeyringPair, collectionId: number, fromAddressObj: ICrossAccountId, amount=1n): Promise { + return await super.burnTokenFrom(signer, collectionId, 0, fromAddressObj, amount); + } + + /** + * Get total collection supply + * @param collectionId + * @returns + */ + async getTotalPieces(collectionId: number): Promise { + return (await this.helper.callRpc('api.rpc.unique.totalPieces', [collectionId, 0])).unwrap().toBigInt(); + } + + /** + * Set, change, or remove approved address to transfer tokens. + * + * @param signer keyring of signer + * @param collectionId ID of collection + * @param toAddressObj address to be approved + * @param amount amount of tokens to be approved + * @example approveTokens(aliceKeyring, 10, {Substrate: "5GHoZe9c73RYbVzq..."}, 1000n) + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async approveTokens(signer: IKeyringPair, collectionId: number, toAddressObj: ICrossAccountId, amount=1n) { + return super.approveToken(signer, collectionId, 0, toAddressObj, amount); + } + + /** + * Get amount of fungible tokens approved to transfer + * @param collectionId ID of collection + * @param fromAddressObj owner of tokens + * @param toAddressObj the address approved for the transfer of tokens on behalf of the owner + * @returns number of tokens approved for the transfer + */ + async getApprovedTokens(collectionId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId) { + return super.getTokenApprovedPieces(collectionId, 0, toAddressObj, fromAddressObj); + } +} + + +class ChainGroup extends HelperGroup { + /** + * Get system properties of a chain + * @example getChainProperties(); + * @returns ss58Format, token decimals, and token symbol + */ + getChainProperties(): IChainProperties { + const properties = (this.helper.getApi() as any).registry.getChainProperties().toJSON(); + return { + ss58Format: properties.ss58Format.toJSON(), + tokenDecimals: properties.tokenDecimals.toJSON(), + tokenSymbol: properties.tokenSymbol.toJSON(), + }; + } + + /** + * Get chain header + * @example getLatestBlockNumber(); + * @returns the number of the last block + */ + async getLatestBlockNumber(): Promise { + return (await this.helper.callRpc('api.rpc.chain.getHeader')).number.toNumber(); + } + + /** + * Get block hash by block number + * @param blockNumber number of block + * @example getBlockHashByNumber(12345); + * @returns hash of a block + */ + async getBlockHashByNumber(blockNumber: number): Promise { + const blockHash = (await this.helper.callRpc('api.rpc.chain.getBlockHash', [blockNumber])).toJSON(); + if(blockHash === '0x0000000000000000000000000000000000000000000000000000000000000000') return null; + return blockHash; + } + + // TODO add docs + async getBlock(blockHashOrNumber: string | number): Promise { + const blockHash = typeof blockHashOrNumber === 'string' ? blockHashOrNumber : await this.getBlockHashByNumber(blockHashOrNumber); + if (!blockHash) return null; + return (await this.helper.callRpc('api.rpc.chain.getBlock', [blockHash])).toHuman().block; + } + + /** + * Get account nonce + * @param address substrate address + * @example getNonce("5GrwvaEF5zXb26Fz..."); + * @returns number, account's nonce + */ + async getNonce(address: TSubstrateAccount): Promise { + return (await this.helper.callRpc('api.query.system.account', [address])).nonce.toNumber(); + } +} + +class SubstrateBalanceGroup extends HelperGroup { + /** + * Get substrate address balance + * @param address substrate address + * @example getSubstrate("5GrwvaEF5zXb26Fz...") + * @returns amount of tokens on address + */ + async getSubstrate(address: TSubstrateAccount): Promise { + return (await this.helper.callRpc('api.query.system.account', [address])).data.free.toBigInt(); + } + + /** + * Transfer tokens to substrate address + * @param signer keyring of signer + * @param address substrate address of a recipient + * @param amount amount of tokens to be transfered + * @example transferToSubstrate(aliceKeyring, "5GrwvaEF5zXb26Fz...", 100_000_000_000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferToSubstrate(signer: TSigner, address: TSubstrateAccount, amount: bigint | string): Promise { + const result = await this.helper.executeExtrinsic(signer, 'api.tx.balances.transfer', [address, amount], true/*, `Unable to transfer balance from ${this.helper.getSignerAddress(signer)} to ${address}`*/); + + let transfer = {from: null, to: null, amount: 0n} as any; + result.result.events.forEach(({event: {data, method, section}}) => { + if ((section === 'balances') && (method === 'Transfer')) { + transfer = { + from: this.helper.address.normalizeSubstrate(data[0]), + to: this.helper.address.normalizeSubstrate(data[1]), + amount: BigInt(data[2]), + }; + } + }); + const isSuccess = this.helper.address.normalizeSubstrate(typeof signer === 'string' ? signer : signer.address) === transfer.from + && this.helper.address.normalizeSubstrate(address) === transfer.to + && BigInt(amount) === transfer.amount; + return isSuccess; + } + + /** + * Get full substrate balance including free, miscFrozen, feeFrozen, and reserved + * @param address substrate address + * @returns + */ + async getSubstrateFull(address: TSubstrateAccount): Promise { + const accountInfo = (await this.helper.callRpc('api.query.system.account', [address])).data; + return {free: accountInfo.free.toBigInt(), miscFrozen: accountInfo.miscFrozen.toBigInt(), feeFrozen: accountInfo.feeFrozen.toBigInt(), reserved: accountInfo.reserved.toBigInt()}; + } +} + +class EthereumBalanceGroup extends HelperGroup { + /** + * Get ethereum address balance + * @param address ethereum address + * @example getEthereum("0x9F0583DbB855d...") + * @returns amount of tokens on address + */ + async getEthereum(address: TEthereumAccount): Promise { + return (await this.helper.callRpc('api.rpc.eth.getBalance', [address])).toBigInt(); + } + + /** + * Transfer tokens to address + * @param signer keyring of signer + * @param address Ethereum address of a recipient + * @param amount amount of tokens to be transfered + * @example transferToEthereum(alithKeyring, "0x9F0583DbB855d...", 100_000_000_000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferToEthereum(signer: TSigner, address: TEthereumAccount, amount: bigint | string): Promise { + const result = await this.helper.executeExtrinsic(signer, 'api.tx.balances.transfer', [address, amount], true); + + let transfer = {from: null, to: null, amount: 0n} as any; + result.result.events.forEach(({event: {data, method, section}}) => { + if ((section === 'balances') && (method === 'Transfer')) { + transfer = { + from: data[0].toString(), + to: data[1].toString(), + amount: BigInt(data[2]), + }; + } + }); + const isSuccess = (typeof signer === 'string' ? signer : signer.address) === transfer.from + && address === transfer.to + && BigInt(amount) === transfer.amount; + return isSuccess; + } +} + +class BalanceGroup extends HelperGroup { + subBalanceGroup: SubstrateBalanceGroup; + ethBalanceGroup: EthereumBalanceGroup; + + constructor(helper: T) { + super(helper); + this.subBalanceGroup = new SubstrateBalanceGroup(helper); + this.ethBalanceGroup = new EthereumBalanceGroup(helper); + } + + getCollectionCreationPrice(): bigint { + return 2n * this.getOneTokenNominal(); + } + /** + * Representation of the native token in the smallest unit - one OPAL (OPL), QUARTZ (QTZ), or UNIQUE (UNQ). + * @example getOneTokenNominal() + * @returns ```BigInt``` representation of the native token in the smallest unit, e.g. ```1_000_000_000_000_000_000n``` for QTZ. + */ + getOneTokenNominal(): bigint { + const chainProperties = this.helper.chain.getChainProperties(); + return 10n ** BigInt((chainProperties.tokenDecimals || [18])[0]); + } + + /** + * Get substrate address balance + * @param address substrate address + * @example getSubstrate("5GrwvaEF5zXb26Fz...") + * @returns amount of tokens on address + */ + async getSubstrate(address: TSubstrateAccount): Promise { + return this.subBalanceGroup.getSubstrate(address); + } + + /** + * Get full substrate balance including free, miscFrozen, feeFrozen, and reserved + * @param address substrate address + * @returns + */ + async getSubstrateFull(address: TSubstrateAccount): Promise { + return this.subBalanceGroup.getSubstrateFull(address); + } + + /** + * Get ethereum address balance + * @param address ethereum address + * @example getEthereum("0x9F0583DbB855d...") + * @returns amount of tokens on address + */ + async getEthereum(address: TEthereumAccount): Promise { + return this.ethBalanceGroup.getEthereum(address); + } + + /** + * Transfer tokens to substrate address + * @param signer keyring of signer + * @param address substrate address of a recipient + * @param amount amount of tokens to be transfered + * @example transferToSubstrate(aliceKeyring, "5GrwvaEF5zXb26Fz...", 100_000_000_000n); + * @returns ```true``` if extrinsic success, otherwise ```false``` + */ + async transferToSubstrate(signer: TSigner, address: TSubstrateAccount, amount: bigint | string): Promise { + return this.subBalanceGroup.transferToSubstrate(signer, address, amount); + } +} + +class AddressGroup extends HelperGroup { + /** + * Normalizes the address to the specified ss58 format, by default ```42```. + * @param address substrate address + * @param ss58Format format for address conversion, by default ```42``` + * @example normalizeSubstrate("unjKJQJrRd238pkUZZvzDQrfKuM39zBSnQ5zjAGAGcdRhaJTx") // returns 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY + * @returns substrate address converted to normalized (i.e., starting with 5) or specified explicitly representation + */ + normalizeSubstrate(address: TSubstrateAccount, ss58Format = 42): TSubstrateAccount { + return CrossAccountId.normalizeSubstrateAddress(address, ss58Format); + } + + /** + * Get address in the connected chain format + * @param address substrate address + * @example normalizeSubstrateToChainFormat("5GrwvaEF5zXb26Fz...") // returns unjKJQJrRd238pkUZZ... for Unique Network + * @returns address in chain format + */ + normalizeSubstrateToChainFormat(address: TSubstrateAccount): TSubstrateAccount { + return this.normalizeSubstrate(address, this.helper.chain.getChainProperties().ss58Format); + } + + /** + * Get substrate mirror of an ethereum address + * @param ethAddress ethereum address + * @param toChainFormat false for normalized account + * @example ethToSubstrate('0x9F0583DbB855d...') + * @returns substrate mirror of a provided ethereum address + */ + ethToSubstrate(ethAddress: TEthereumAccount, toChainFormat=false): TSubstrateAccount { + return CrossAccountId.translateEthToSub(ethAddress, toChainFormat ? this.helper.chain.getChainProperties().ss58Format : undefined); + } + + /** + * Get ethereum mirror of a substrate address + * @param subAddress substrate account + * @example substrateToEth("5DnSF6RRjwteE3BrC...") + * @returns ethereum mirror of a provided substrate address + */ + substrateToEth(subAddress: TSubstrateAccount): TEthereumAccount { + return CrossAccountId.translateSubToEth(subAddress); + } + + paraSiblingSovereignAccount(paraid: number) { + // We are getting a *sibling* parachain sovereign account, + // so we need a sibling prefix: encoded(b"sibl") == 0x7369626c + const siblingPrefix = '0x7369626c'; + + const encodedParaId = this.helper.getApi().createType('u32', paraid).toHex(true).substring(2); + const suffix = '000000000000000000000000000000000000000000000000'; + + return siblingPrefix + encodedParaId + suffix; + } +} + +class StakingGroup extends HelperGroup { + /** + * Stake tokens for App Promotion + * @param signer keyring of signer + * @param amountToStake amount of tokens to stake + * @param label extra label for log + * @returns + */ + async stake(signer: TSigner, amountToStake: bigint, label?: string): Promise { + if(typeof label === 'undefined') label = `${signer.address} amount: ${amountToStake}`; + const _stakeResult = await this.helper.executeExtrinsic( + signer, 'api.tx.appPromotion.stake', + [amountToStake], true, + ); + // TODO extract info from stakeResult + return true; + } + + /** + * Unstake tokens for App Promotion + * @param signer keyring of signer + * @param amountToUnstake amount of tokens to unstake + * @param label extra label for log + * @returns block number where balances will be unlocked + */ + async unstake(signer: TSigner, label?: string): Promise { + if(typeof label === 'undefined') label = `${signer.address}`; + const _unstakeResult = await this.helper.executeExtrinsic( + signer, 'api.tx.appPromotion.unstake', + [], true, + ); + // TODO extract block number fron events + return 1; + } + + /** + * Get total staked amount for address + * @param address substrate or ethereum address + * @returns total staked amount + */ + async getTotalStaked(address?: ICrossAccountId): Promise { + if (address) return (await this.helper.callRpc('api.rpc.appPromotion.totalStaked', [address])).toBigInt(); + return (await this.helper.callRpc('api.rpc.appPromotion.totalStaked')).toBigInt(); + } + + /** + * Get total staked per block + * @param address substrate or ethereum address + * @returns array of stakes. `block` – the number of the block in which the stake was made. `amount` - the number of tokens staked in the block + */ + async getTotalStakedPerBlock(address: ICrossAccountId): Promise { + const rawTotalStakerdPerBlock = await this.helper.callRpc('api.rpc.appPromotion.totalStakedPerBlock', [address]); + return rawTotalStakerdPerBlock.map(([block, amount]: any[]) => { + return { + block: block.toBigInt(), + amount: amount.toBigInt(), + }; + }); + } + + /** + * Get total pending unstake amount for address + * @param address substrate or ethereum address + * @returns total pending unstake amount + */ + async getPendingUnstake(address: ICrossAccountId): Promise { + return (await this.helper.callRpc('api.rpc.appPromotion.pendingUnstake', [address])).toBigInt(); + } + + /** + * Get pending unstake amount per block for address + * @param address substrate or ethereum address + * @returns array of pending stakes. `block` – the number of the block in which the unstake was made. `amount` - the number of tokens unstaked in the block + */ + async getPendingUnstakePerBlock(address: ICrossAccountId): Promise { + const rawUnstakedPerBlock = await this.helper.callRpc('api.rpc.appPromotion.pendingUnstakePerBlock', [address]); + const result = rawUnstakedPerBlock.map(([block, amount]: any[]) => { + return { + block: block.toBigInt(), + amount: amount.toBigInt(), + }; + }); + return result; + } +} + +class SchedulerGroup extends HelperGroup { + constructor(helper: UniqueHelper) { + super(helper); + } + + async cancelScheduled(signer: TSigner, scheduledId: string) { + return this.helper.executeExtrinsic( + signer, + 'api.tx.scheduler.cancelNamed', + [scheduledId], + true, + ); + } + + async changePriority(signer: TSigner, scheduledId: string, priority: number) { + return this.helper.executeExtrinsic( + signer, + 'api.tx.scheduler.changeNamedPriority', + [scheduledId, priority], + true, + ); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + return this.schedule('scheduleNamed', scheduledId, executionBlockNumber, options); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + return this.schedule('scheduleNamedAfter', scheduledId, blocksBeforeExecution, options); + } + + schedule( + scheduleFn: 'scheduleNamed' | 'scheduleNamedAfter', + scheduledId: string, + blocksNum: number, + options: ISchedulerOptions = {}, + ) { + // eslint-disable-next-line @typescript-eslint/naming-convention + const ScheduledHelperType = ScheduledUniqueHelper(this.helper.helperBase); + return this.helper.clone(ScheduledHelperType, { + scheduleFn, + scheduledId, + blocksNum, + options, + }) as T; + } +} + +class ForeignAssetsGroup extends HelperGroup { + async register(signer: TSigner, ownerAddress: TSubstrateAccount, location: any, metadata: IForeignAssetMetadata) { + await this.helper.executeExtrinsic( + signer, + 'api.tx.foreignAssets.registerForeignAsset', + [ownerAddress, location, metadata], + true, + ); + } + + async update(signer: TSigner, foreignAssetId: number, location: any, metadata: IForeignAssetMetadata) { + await this.helper.executeExtrinsic( + signer, + 'api.tx.foreignAssets.updateForeignAsset', + [foreignAssetId, location, metadata], + true, + ); + } +} + +class XcmGroup extends HelperGroup { + palletName: string; + + constructor(helper: T, palletName: string) { + super(helper); + + this.palletName = palletName; + } + + async limitedReserveTransferAssets(signer: TSigner, destination: any, beneficiary: any, assets: any, feeAssetItem: number, weightLimit: number) { + await this.helper.executeExtrinsic(signer, `api.tx.${this.palletName}.limitedReserveTransferAssets`, [destination, beneficiary, assets, feeAssetItem, {Limited: weightLimit}], true); + } +} + +class XTokensGroup extends HelperGroup { + async transfer(signer: TSigner, currencyId: any, amount: bigint, destination: any, destWeight: number) { + await this.helper.executeExtrinsic(signer, 'api.tx.xTokens.transfer', [currencyId, amount, destination, destWeight], true); + } + + async transferMultiasset(signer: TSigner, asset: any, destination: any, destWeight: number) { + await this.helper.executeExtrinsic(signer, 'api.tx.xTokens.transferMultiasset', [asset, destination, destWeight], true); + } + + async transferMulticurrencies(signer: TSigner, currencies: any[], feeItem: number, destLocation: any, destWeight: number) { + await this.helper.executeExtrinsic(signer, 'api.tx.xTokens.transferMulticurrencies', [currencies, feeItem, destLocation, destWeight], true); + } +} + +class TokensGroup extends HelperGroup { + async accounts(address: string, currencyId: any) { + const {free} = (await this.helper.callRpc('api.query.tokens.accounts', [address, currencyId])).toJSON() as any; + return BigInt(free); + } +} + +class AssetsGroup extends HelperGroup { + async create(signer: TSigner, assetId: number, admin: string, minimalBalance: bigint) { + await this.helper.executeExtrinsic(signer, 'api.tx.assets.create', [assetId, admin, minimalBalance], true); + } + + async setMetadata(signer: TSigner, assetId: number, name: string, symbol: string, decimals: number) { + await this.helper.executeExtrinsic(signer, 'api.tx.assets.setMetadata', [assetId, name, symbol, decimals], true); + } + + async mint(signer: TSigner, assetId: number, beneficiary: string, amount: bigint) { + await this.helper.executeExtrinsic(signer, 'api.tx.assets.mint', [assetId, beneficiary, amount], true); + } + + async account(assetId: string | number, address: string) { + const accountAsset = ( + await this.helper.callRpc('api.query.assets.account', [assetId, address]) + ).toJSON()! as any; + + if (accountAsset !== null) { + return BigInt(accountAsset['balance']); + } else { + return null; + } + } +} + +class AcalaAssetRegistryGroup extends HelperGroup { + async registerForeignAsset(signer: TSigner, destination: any, metadata: AcalaAssetMetadata) { + await this.helper.executeExtrinsic(signer, 'api.tx.assetRegistry.registerForeignAsset', [destination, metadata], true); + } +} + +class MoonbeamAssetManagerGroup extends HelperGroup { + makeRegisterForeignAssetProposal(assetInfo: MoonbeamAssetInfo) { + const apiPrefix = 'api.tx.assetManager.'; + + const registerTx = this.helper.constructApiCall( + apiPrefix + 'registerForeignAsset', + [assetInfo.location, assetInfo.metadata, assetInfo.existentialDeposit, assetInfo.isSufficient], + ); + + const setUnitsTx = this.helper.constructApiCall( + apiPrefix + 'setAssetUnitsPerSecond', + [assetInfo.location, assetInfo.unitsPerSecond, assetInfo.numAssetsWeightHint], + ); + + const batchCall = this.helper.getApi().tx.utility.batchAll([registerTx, setUnitsTx]); + const encodedProposal = batchCall?.method.toHex() || ''; + return encodedProposal; + } + + async assetTypeId(location: any) { + return await this.helper.callRpc('api.query.assetManager.assetTypeId', [location]); + } +} + +class MoonbeamDemocracyGroup extends HelperGroup { + async notePreimage(signer: TSigner, encodedProposal: string) { + await this.helper.executeExtrinsic(signer, 'api.tx.democracy.notePreimage', [encodedProposal], true); + } + + externalProposeMajority(proposalHash: string) { + return this.helper.constructApiCall('api.tx.democracy.externalProposeMajority', [proposalHash]); + } + + fastTrack(proposalHash: string, votingPeriod: number, delayPeriod: number) { + return this.helper.constructApiCall('api.tx.democracy.fastTrack', [proposalHash, votingPeriod, delayPeriod]); + } + + async referendumVote(signer: TSigner, referendumIndex: number, accountVote: DemocracyStandardAccountVote) { + await this.helper.executeExtrinsic(signer, 'api.tx.democracy.vote', [referendumIndex, {Standard: accountVote}], true); + } +} + +class MoonbeamCollectiveGroup extends HelperGroup { + collective: string; + + constructor(helper: MoonbeamHelper, collective: string) { + super(helper); + + this.collective = collective; + } + + async propose(signer: TSigner, threshold: number, proposalHash: string, lengthBound: number) { + await this.helper.executeExtrinsic(signer, `api.tx.${this.collective}.propose`, [threshold, proposalHash, lengthBound], true); + } + + async vote(signer: TSigner, proposalHash: string, proposalIndex: number, approve: boolean) { + await this.helper.executeExtrinsic(signer, `api.tx.${this.collective}.vote`, [proposalHash, proposalIndex, approve], true); + } + + async close(signer: TSigner, proposalHash: string, proposalIndex: number, weightBound: number, lengthBound: number) { + await this.helper.executeExtrinsic(signer, `api.tx.${this.collective}.close`, [proposalHash, proposalIndex, weightBound, lengthBound], true); + } + + async proposalCount() { + return Number(await this.helper.callRpc(`api.query.${this.collective}.proposalCount`, [])); + } +} + +export type ChainHelperBaseConstructor = new(...args: any[]) => ChainHelperBase; +export type UniqueHelperConstructor = new(...args: any[]) => UniqueHelper; + +export class UniqueHelper extends ChainHelperBase { + balance: BalanceGroup; + collection: CollectionGroup; + nft: NFTGroup; + rft: RFTGroup; + ft: FTGroup; + staking: StakingGroup; + scheduler: SchedulerGroup; + foreignAssets: ForeignAssetsGroup; + xcm: XcmGroup; + xTokens: XTokensGroup; + tokens: TokensGroup; + + constructor(logger?: ILogger, options: {[key: string]: any} = {}) { + super(logger, options.helperBase ?? UniqueHelper); + + this.balance = new BalanceGroup(this); + this.collection = new CollectionGroup(this); + this.nft = new NFTGroup(this); + this.rft = new RFTGroup(this); + this.ft = new FTGroup(this); + this.staking = new StakingGroup(this); + this.scheduler = new SchedulerGroup(this); + this.foreignAssets = new ForeignAssetsGroup(this); + this.xcm = new XcmGroup(this, 'polkadotXcm'); + this.xTokens = new XTokensGroup(this); + this.tokens = new TokensGroup(this); + } + + getSudo() { + // eslint-disable-next-line @typescript-eslint/naming-convention + const SudoHelperType = SudoHelper(this.helperBase); + return this.clone(SudoHelperType) as T; + } +} + +export class XcmChainHelper extends ChainHelperBase { + async connect(wsEndpoint: string, _listeners?: any): Promise { + const wsProvider = new WsProvider(wsEndpoint); + this.api = new ApiPromise({ + provider: wsProvider, + }); + await this.api.isReadyOrError; + this.network = await UniqueHelper.detectNetwork(this.api); + } +} + +export class RelayHelper extends XcmChainHelper { + xcm: XcmGroup; + + constructor(logger?: ILogger, options: {[key: string]: any} = {}) { + super(logger, options.helperBase ?? RelayHelper); + + this.xcm = new XcmGroup(this, 'xcmPallet'); + } +} + +export class WestmintHelper extends XcmChainHelper { + balance: SubstrateBalanceGroup; + xcm: XcmGroup; + assets: AssetsGroup; + xTokens: XTokensGroup; + + constructor(logger?: ILogger, options: {[key: string]: any} = {}) { + super(logger, options.helperBase ?? WestmintHelper); + + this.balance = new SubstrateBalanceGroup(this); + this.xcm = new XcmGroup(this, 'polkadotXcm'); + this.assets = new AssetsGroup(this); + this.xTokens = new XTokensGroup(this); + } +} + +export class MoonbeamHelper extends XcmChainHelper { + balance: EthereumBalanceGroup; + assetManager: MoonbeamAssetManagerGroup; + assets: AssetsGroup; + xTokens: XTokensGroup; + democracy: MoonbeamDemocracyGroup; + collective: { + council: MoonbeamCollectiveGroup, + techCommittee: MoonbeamCollectiveGroup, + }; + + constructor(logger?: ILogger, options: {[key: string]: any} = {}) { + super(logger, options.helperBase ?? MoonbeamHelper); + + this.balance = new EthereumBalanceGroup(this); + this.assetManager = new MoonbeamAssetManagerGroup(this); + this.assets = new AssetsGroup(this); + this.xTokens = new XTokensGroup(this); + this.democracy = new MoonbeamDemocracyGroup(this); + this.collective = { + council: new MoonbeamCollectiveGroup(this, 'councilCollective'), + techCommittee: new MoonbeamCollectiveGroup(this, 'techCommitteeCollective'), + }; + } +} + +export class AcalaHelper extends XcmChainHelper { + balance: SubstrateBalanceGroup; + assetRegistry: AcalaAssetRegistryGroup; + xTokens: XTokensGroup; + tokens: TokensGroup; + + constructor(logger?: ILogger, options: {[key: string]: any} = {}) { + super(logger, options.helperBase ?? AcalaHelper); + + this.balance = new SubstrateBalanceGroup(this); + this.assetRegistry = new AcalaAssetRegistryGroup(this); + this.xTokens = new XTokensGroup(this); + this.tokens = new TokensGroup(this); + } + + getSudo() { + // eslint-disable-next-line @typescript-eslint/naming-convention + const SudoHelperType = SudoHelper(this.helperBase); + return this.clone(SudoHelperType) as T; + } +} + +// eslint-disable-next-line @typescript-eslint/naming-convention +function ScheduledUniqueHelper(Base: T) { + return class extends Base { + scheduleFn: 'scheduleNamed' | 'scheduleNamedAfter'; + scheduledId: string; + blocksNum: number; + options: ISchedulerOptions; + + constructor(...args: any[]) { + const logger = args[0] as ILogger; + const options = args[1] as { + scheduleFn: 'scheduleNamed' | 'scheduleNamedAfter', + scheduledId: string, + blocksNum: number, + options: ISchedulerOptions + }; + + super(logger); + + this.scheduleFn = options.scheduleFn; + this.scheduledId = options.scheduledId; + this.blocksNum = options.blocksNum; + this.options = options.options; + } + + executeExtrinsic(sender: IKeyringPair, scheduledExtrinsic: string, scheduledParams: any[], expectSuccess?: boolean): Promise { + const scheduledTx = this.constructApiCall(scheduledExtrinsic, scheduledParams); + const extrinsic = 'api.tx.scheduler.' + this.scheduleFn; + + return super.executeExtrinsic( + sender, + extrinsic, + [ + this.scheduledId, + this.blocksNum, + this.options.periodic ? [this.options.periodic.period, this.options.periodic.repetitions] : null, + this.options.priority ?? null, + {Value: scheduledTx}, + ], + expectSuccess, + ); + } + }; +} + +// eslint-disable-next-line @typescript-eslint/naming-convention +function SudoHelper(Base: T) { + return class extends Base { + constructor(...args: any[]) { + super(...args); + } + + executeExtrinsic ( + sender: IKeyringPair, + extrinsic: string, + params: any[], + expectSuccess?: boolean, + ): Promise { + const call = this.constructApiCall(extrinsic, params); + + return super.executeExtrinsic( + sender, + 'api.tx.sudo.sudo', + [call], + expectSuccess, + ); + } + }; +} + +export class UniqueBaseCollection { + helper: UniqueHelper; + collectionId: number; + + constructor(collectionId: number, uniqueHelper: UniqueHelper) { + this.collectionId = collectionId; + this.helper = uniqueHelper; + } + + async getData() { + return await this.helper.collection.getData(this.collectionId); + } + + async getLastTokenId() { + return await this.helper.collection.getLastTokenId(this.collectionId); + } + + async doesTokenExist(tokenId: number) { + return await this.helper.collection.doesTokenExist(this.collectionId, tokenId); + } + + async getAdmins() { + return await this.helper.collection.getAdmins(this.collectionId); + } + + async getAllowList() { + return await this.helper.collection.getAllowList(this.collectionId); + } + + async getEffectiveLimits() { + return await this.helper.collection.getEffectiveLimits(this.collectionId); + } + + async getProperties(propertyKeys?: string[] | null) { + return await this.helper.collection.getProperties(this.collectionId, propertyKeys); + } + + async getTokenNextSponsored(tokenId: number, addressObj: ICrossAccountId) { + return await this.helper.collection.getTokenNextSponsored(this.collectionId, tokenId, addressObj); + } + + async getOptions() { + return await this.helper.collection.getCollectionOptions(this.collectionId); + } + + async setSponsor(signer: TSigner, sponsorAddress: TSubstrateAccount) { + return await this.helper.collection.setSponsor(signer, this.collectionId, sponsorAddress); + } + + async confirmSponsorship(signer: TSigner) { + return await this.helper.collection.confirmSponsorship(signer, this.collectionId); + } + + async removeSponsor(signer: TSigner) { + return await this.helper.collection.removeSponsor(signer, this.collectionId); + } + + async setLimits(signer: TSigner, limits: ICollectionLimits) { + return await this.helper.collection.setLimits(signer, this.collectionId, limits); + } + + async changeOwner(signer: TSigner, ownerAddress: TSubstrateAccount) { + return await this.helper.collection.changeOwner(signer, this.collectionId, ownerAddress); + } + + async addAdmin(signer: TSigner, adminAddressObj: ICrossAccountId) { + return await this.helper.collection.addAdmin(signer, this.collectionId, adminAddressObj); + } + + async addToAllowList(signer: TSigner, addressObj: ICrossAccountId) { + return await this.helper.collection.addToAllowList(signer, this.collectionId, addressObj); + } + + async removeFromAllowList(signer: TSigner, addressObj: ICrossAccountId) { + return await this.helper.collection.removeFromAllowList(signer, this.collectionId, addressObj); + } + + async removeAdmin(signer: TSigner, adminAddressObj: ICrossAccountId) { + return await this.helper.collection.removeAdmin(signer, this.collectionId, adminAddressObj); + } + + async setProperties(signer: TSigner, properties: IProperty[]) { + return await this.helper.collection.setProperties(signer, this.collectionId, properties); + } + + async deleteProperties(signer: TSigner, propertyKeys: string[]) { + return await this.helper.collection.deleteProperties(signer, this.collectionId, propertyKeys); + } + + async setPermissions(signer: TSigner, permissions: ICollectionPermissions) { + return await this.helper.collection.setPermissions(signer, this.collectionId, permissions); + } + + async enableNesting(signer: TSigner, permissions: INestingPermissions) { + return await this.helper.collection.enableNesting(signer, this.collectionId, permissions); + } + + async disableNesting(signer: TSigner) { + return await this.helper.collection.disableNesting(signer, this.collectionId); + } + + async burn(signer: TSigner) { + return await this.helper.collection.burn(signer, this.collectionId); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueBaseCollection(this.collectionId, scheduledHelper); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueBaseCollection(this.collectionId, scheduledHelper); + } + + getSudo() { + return new UniqueBaseCollection(this.collectionId, this.helper.getSudo()); + } +} + + +export class UniqueNFTCollection extends UniqueBaseCollection { + getTokenObject(tokenId: number) { + return new UniqueNFToken(tokenId, this); + } + + async getTokensByAddress(addressObj: ICrossAccountId) { + return await this.helper.nft.getTokensByAddress(this.collectionId, addressObj); + } + + async getToken(tokenId: number, blockHashAt?: string) { + return await this.helper.nft.getToken(this.collectionId, tokenId, [], blockHashAt); + } + + async getTokenOwner(tokenId: number, blockHashAt?: string) { + return await this.helper.nft.getTokenOwner(this.collectionId, tokenId, blockHashAt); + } + + async getTokenTopmostOwner(tokenId: number, blockHashAt?: string) { + return await this.helper.nft.getTokenTopmostOwner(this.collectionId, tokenId, blockHashAt); + } + + async getTokenChildren(tokenId: number, blockHashAt?: string) { + return await this.helper.nft.getTokenChildren(this.collectionId, tokenId, blockHashAt); + } + + async getPropertyPermissions(propertyKeys: string[] | null = null) { + return await this.helper.nft.getPropertyPermissions(this.collectionId, propertyKeys); + } + + async getTokenProperties(tokenId: number, propertyKeys?: string[] | null) { + return await this.helper.nft.getTokenProperties(this.collectionId, tokenId, propertyKeys); + } + + async transferToken(signer: TSigner, tokenId: number, addressObj: ICrossAccountId) { + return await this.helper.nft.transferToken(signer, this.collectionId, tokenId, addressObj); + } + + async transferTokenFrom(signer: TSigner, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId) { + return await this.helper.nft.transferTokenFrom(signer, this.collectionId, tokenId, fromAddressObj, toAddressObj); + } + + async approveToken(signer: TSigner, tokenId: number, toAddressObj: ICrossAccountId) { + return await this.helper.nft.approveToken(signer, this.collectionId, tokenId, toAddressObj); + } + + async isTokenApproved(tokenId: number, toAddressObj: ICrossAccountId) { + return await this.helper.nft.isTokenApproved(this.collectionId, tokenId, toAddressObj); + } + + async mintToken(signer: TSigner, owner: ICrossAccountId = {Substrate: signer.address}, properties?: IProperty[]) { + return await this.helper.nft.mintToken(signer, {collectionId: this.collectionId, owner, properties}); + } + + async mintMultipleTokens(signer: TSigner, tokens: {owner: ICrossAccountId, properties?: IProperty[]}[]) { + return await this.helper.nft.mintMultipleTokens(signer, this.collectionId, tokens); + } + + async burnToken(signer: TSigner, tokenId: number) { + return await this.helper.nft.burnToken(signer, this.collectionId, tokenId); + } + + async burnTokenFrom(signer: TSigner, tokenId: number, fromAddressObj: ICrossAccountId) { + return await this.helper.nft.burnTokenFrom(signer, this.collectionId, tokenId, fromAddressObj); + } + + async setTokenProperties(signer: TSigner, tokenId: number, properties: IProperty[]) { + return await this.helper.nft.setTokenProperties(signer, this.collectionId, tokenId, properties); + } + + async deleteTokenProperties(signer: TSigner, tokenId: number, propertyKeys: string[]) { + return await this.helper.nft.deleteTokenProperties(signer, this.collectionId, tokenId, propertyKeys); + } + + async setTokenPropertyPermissions(signer: TSigner, permissions: ITokenPropertyPermission[]) { + return await this.helper.nft.setTokenPropertyPermissions(signer, this.collectionId, permissions); + } + + async nestToken(signer: TSigner, tokenId: number, toTokenObj: IToken) { + return await this.helper.nft.nestToken(signer, {collectionId: this.collectionId, tokenId}, toTokenObj); + } + + async unnestToken(signer: TSigner, tokenId: number, fromTokenObj: IToken, toAddressObj: ICrossAccountId) { + return await this.helper.nft.unnestToken(signer, {collectionId: this.collectionId, tokenId}, fromTokenObj, toAddressObj); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueNFTCollection(this.collectionId, scheduledHelper); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueNFTCollection(this.collectionId, scheduledHelper); + } + + getSudo() { + return new UniqueNFTCollection(this.collectionId, this.helper.getSudo()); + } +} + + +export class UniqueRFTCollection extends UniqueBaseCollection { + getTokenObject(tokenId: number) { + return new UniqueRFToken(tokenId, this); + } + + async getToken(tokenId: number, blockHashAt?: string) { + return await this.helper.rft.getToken(this.collectionId, tokenId, [], blockHashAt); + } + + async getTokensByAddress(addressObj: ICrossAccountId) { + return await this.helper.rft.getTokensByAddress(this.collectionId, addressObj); + } + + async getTop10TokenOwners(tokenId: number) { + return await this.helper.rft.getTokenTop10Owners(this.collectionId, tokenId); + } + + async getTokenBalance(tokenId: number, addressObj: ICrossAccountId) { + return await this.helper.rft.getTokenBalance(this.collectionId, tokenId, addressObj); + } + + async getTokenTotalPieces(tokenId: number) { + return await this.helper.rft.getTokenTotalPieces(this.collectionId, tokenId); + } + + async getTokenApprovedPieces(tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId) { + return await this.helper.rft.getTokenApprovedPieces(this.collectionId, tokenId, toAddressObj, fromAddressObj); + } + + async getPropertyPermissions(propertyKeys: string[] | null = null) { + return await this.helper.rft.getPropertyPermissions(this.collectionId, propertyKeys); + } + + async getTokenProperties(tokenId: number, propertyKeys?: string[] | null) { + return await this.helper.rft.getTokenProperties(this.collectionId, tokenId, propertyKeys); + } + + async transferToken(signer: TSigner, tokenId: number, addressObj: ICrossAccountId, amount=1n) { + return await this.helper.rft.transferToken(signer, this.collectionId, tokenId, addressObj, amount); + } + + async transferTokenFrom(signer: TSigner, tokenId: number, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.rft.transferTokenFrom(signer, this.collectionId, tokenId, fromAddressObj, toAddressObj, amount); + } + + async approveToken(signer: TSigner, tokenId: number, toAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.rft.approveToken(signer, this.collectionId, tokenId, toAddressObj, amount); + } + + async repartitionToken(signer: TSigner, tokenId: number, amount: bigint) { + return await this.helper.rft.repartitionToken(signer, this.collectionId, tokenId, amount); + } + + async mintToken(signer: TSigner, pieces=1n, owner: ICrossAccountId = {Substrate: signer.address}, properties?: IProperty[]) { + return await this.helper.rft.mintToken(signer, {collectionId: this.collectionId, owner, pieces, properties}); + } + + async mintMultipleTokens(signer: TSigner, tokens: {pieces: bigint, owner: ICrossAccountId, properties?: IProperty[]}[]) { + return await this.helper.rft.mintMultipleTokens(signer, this.collectionId, tokens); + } + + async burnToken(signer: TSigner, tokenId: number, amount=1n) { + return await this.helper.rft.burnToken(signer, this.collectionId, tokenId, amount); + } + + async burnTokenFrom(signer: TSigner, tokenId: number, fromAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.rft.burnTokenFrom(signer, this.collectionId, tokenId, fromAddressObj, amount); + } + + async setTokenProperties(signer: TSigner, tokenId: number, properties: IProperty[]) { + return await this.helper.rft.setTokenProperties(signer, this.collectionId, tokenId, properties); + } + + async deleteTokenProperties(signer: TSigner, tokenId: number, propertyKeys: string[]) { + return await this.helper.rft.deleteTokenProperties(signer, this.collectionId, tokenId, propertyKeys); + } + + async setTokenPropertyPermissions(signer: TSigner, permissions: ITokenPropertyPermission[]) { + return await this.helper.rft.setTokenPropertyPermissions(signer, this.collectionId, permissions); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueRFTCollection(this.collectionId, scheduledHelper); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueRFTCollection(this.collectionId, scheduledHelper); + } + + getSudo() { + return new UniqueRFTCollection(this.collectionId, this.helper.getSudo()); + } +} + + +export class UniqueFTCollection extends UniqueBaseCollection { + async getBalance(addressObj: ICrossAccountId) { + return await this.helper.ft.getBalance(this.collectionId, addressObj); + } + + async getTotalPieces() { + return await this.helper.ft.getTotalPieces(this.collectionId); + } + + async getApprovedTokens(fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId) { + return await this.helper.ft.getApprovedTokens(this.collectionId, fromAddressObj, toAddressObj); + } + + async getTop10Owners() { + return await this.helper.ft.getTop10Owners(this.collectionId); + } + + async mint(signer: TSigner, amount=1n, owner: ICrossAccountId = {Substrate: signer.address}) { + return await this.helper.ft.mintTokens(signer, this.collectionId, amount, owner); + } + + async mintWithOneOwner(signer: TSigner, tokens: {value: bigint}[], owner: ICrossAccountId = {Substrate: signer.address}) { + return await this.helper.ft.mintMultipleTokensWithOneOwner(signer, this.collectionId, tokens, owner); + } + + async transfer(signer: TSigner, toAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.ft.transfer(signer, this.collectionId, toAddressObj, amount); + } + + async transferFrom(signer: TSigner, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.ft.transferFrom(signer, this.collectionId, fromAddressObj, toAddressObj, amount); + } + + async burnTokens(signer: TSigner, amount=1n) { + return await this.helper.ft.burnTokens(signer, this.collectionId, amount); + } + + async burnTokensFrom(signer: TSigner, fromAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.ft.burnTokensFrom(signer, this.collectionId, fromAddressObj, amount); + } + + async approveTokens(signer: TSigner, toAddressObj: ICrossAccountId, amount=1n) { + return await this.helper.ft.approveTokens(signer, this.collectionId, toAddressObj, amount); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueFTCollection(this.collectionId, scheduledHelper); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledHelper = this.helper.scheduler.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueFTCollection(this.collectionId, scheduledHelper); + } + + getSudo() { + return new UniqueFTCollection(this.collectionId, this.helper.getSudo()); + } +} + + +export class UniqueBaseToken { + collection: UniqueNFTCollection | UniqueRFTCollection; + collectionId: number; + tokenId: number; + + constructor(tokenId: number, collection: UniqueNFTCollection | UniqueRFTCollection) { + this.collection = collection; + this.collectionId = collection.collectionId; + this.tokenId = tokenId; + } + + async getNextSponsored(addressObj: ICrossAccountId) { + return await this.collection.getTokenNextSponsored(this.tokenId, addressObj); + } + + async getProperties(propertyKeys?: string[] | null) { + return await this.collection.getTokenProperties(this.tokenId, propertyKeys); + } + + async setProperties(signer: TSigner, properties: IProperty[]) { + return await this.collection.setTokenProperties(signer, this.tokenId, properties); + } + + async deleteProperties(signer: TSigner, propertyKeys: string[]) { + return await this.collection.deleteTokenProperties(signer, this.tokenId, propertyKeys); + } + + async doesExist() { + return await this.collection.doesTokenExist(this.tokenId); + } + + nestingAccount() { + return this.collection.helper.util.getTokenAccount(this); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueBaseToken(this.tokenId, scheduledCollection); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueBaseToken(this.tokenId, scheduledCollection); + } + + getSudo() { + return new UniqueBaseToken(this.tokenId, this.collection.getSudo()); + } +} + + +export class UniqueNFToken extends UniqueBaseToken { + collection: UniqueNFTCollection; + + constructor(tokenId: number, collection: UniqueNFTCollection) { + super(tokenId, collection); + this.collection = collection; + } + + async getData(blockHashAt?: string) { + return await this.collection.getToken(this.tokenId, blockHashAt); + } + + async getOwner(blockHashAt?: string) { + return await this.collection.getTokenOwner(this.tokenId, blockHashAt); + } + + async getTopmostOwner(blockHashAt?: string) { + return await this.collection.getTokenTopmostOwner(this.tokenId, blockHashAt); + } + + async getChildren(blockHashAt?: string) { + return await this.collection.getTokenChildren(this.tokenId, blockHashAt); + } + + async nest(signer: TSigner, toTokenObj: IToken) { + return await this.collection.nestToken(signer, this.tokenId, toTokenObj); + } + + async unnest(signer: TSigner, fromTokenObj: IToken, toAddressObj: ICrossAccountId) { + return await this.collection.unnestToken(signer, this.tokenId, fromTokenObj, toAddressObj); + } + + async transfer(signer: TSigner, addressObj: ICrossAccountId) { + return await this.collection.transferToken(signer, this.tokenId, addressObj); + } + + async transferFrom(signer: TSigner, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId) { + return await this.collection.transferTokenFrom(signer, this.tokenId, fromAddressObj, toAddressObj); + } + + async approve(signer: TSigner, toAddressObj: ICrossAccountId) { + return await this.collection.approveToken(signer, this.tokenId, toAddressObj); + } + + async isApproved(toAddressObj: ICrossAccountId) { + return await this.collection.isTokenApproved(this.tokenId, toAddressObj); + } + + async burn(signer: TSigner) { + return await this.collection.burnToken(signer, this.tokenId); + } + + async burnFrom(signer: TSigner, fromAddressObj: ICrossAccountId) { + return await this.collection.burnTokenFrom(signer, this.tokenId, fromAddressObj); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueNFToken(this.tokenId, scheduledCollection); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueNFToken(this.tokenId, scheduledCollection); + } + + getSudo() { + return new UniqueNFToken(this.tokenId, this.collection.getSudo()); + } +} + +export class UniqueRFToken extends UniqueBaseToken { + collection: UniqueRFTCollection; + + constructor(tokenId: number, collection: UniqueRFTCollection) { + super(tokenId, collection); + this.collection = collection; + } + + async getData(blockHashAt?: string) { + return await this.collection.getToken(this.tokenId, blockHashAt); + } + + async getTop10Owners() { + return await this.collection.getTop10TokenOwners(this.tokenId); + } + + async getBalance(addressObj: ICrossAccountId) { + return await this.collection.getTokenBalance(this.tokenId, addressObj); + } + + async getTotalPieces() { + return await this.collection.getTokenTotalPieces(this.tokenId); + } + + async getApprovedPieces(fromAddressObj: ICrossAccountId, toAccountObj: ICrossAccountId) { + return await this.collection.getTokenApprovedPieces(this.tokenId, fromAddressObj, toAccountObj); + } + + async transfer(signer: TSigner, addressObj: ICrossAccountId, amount=1n) { + return await this.collection.transferToken(signer, this.tokenId, addressObj, amount); + } + + async transferFrom(signer: TSigner, fromAddressObj: ICrossAccountId, toAddressObj: ICrossAccountId, amount=1n) { + return await this.collection.transferTokenFrom(signer, this.tokenId, fromAddressObj, toAddressObj, amount); + } + + async approve(signer: TSigner, toAddressObj: ICrossAccountId, amount=1n) { + return await this.collection.approveToken(signer, this.tokenId, toAddressObj, amount); + } + + async repartition(signer: TSigner, amount: bigint) { + return await this.collection.repartitionToken(signer, this.tokenId, amount); + } + + async burn(signer: TSigner, amount=1n) { + return await this.collection.burnToken(signer, this.tokenId, amount); + } + + async burnFrom(signer: TSigner, fromAddressObj: ICrossAccountId, amount=1n) { + return await this.collection.burnTokenFrom(signer, this.tokenId, fromAddressObj, amount); + } + + scheduleAt( + scheduledId: string, + executionBlockNumber: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAt(scheduledId, executionBlockNumber, options); + return new UniqueRFToken(this.tokenId, scheduledCollection); + } + + scheduleAfter( + scheduledId: string, + blocksBeforeExecution: number, + options: ISchedulerOptions = {}, + ) { + const scheduledCollection = this.collection.scheduleAfter(scheduledId, blocksBeforeExecution, options); + return new UniqueRFToken(this.tokenId, scheduledCollection); + } + + getSudo() { + return new UniqueRFToken(this.tokenId, this.collection.getSudo()); + } +} diff --git a/tests/src/util/util.ts b/tests/src/util/util.ts deleted file mode 100644 index cb644a9333..0000000000 --- a/tests/src/util/util.ts +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. -// This file is part of Unique Network. - -// Unique Network is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Unique Network is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Unique Network. If not, see . - -export function strToUTF16(str: string): any { - const buf: number[] = []; - for (let i=0, strLen=str.length; i < strLen; i++) { - buf.push(str.charCodeAt(i)); - } - return buf; -} - -export function utf16ToStr(buf: number[]): string { - let str = ''; - for (let i=0, strLen=buf.length; i < strLen; i++) { - if (buf[i] != 0) str += String.fromCharCode(buf[i]); - else break; - } - return str; -} - -export function hexToStr(buf: string): string { - let str = ''; - let hexStart = buf.indexOf('0x'); - if (hexStart < 0) hexStart = 0; - else hexStart = 2; - for (let i=hexStart, strLen=buf.length; i < strLen; i+=2) { - const ch = buf[i] + buf[i+1]; - const num = parseInt(ch, 16); - if (num != 0) str += String.fromCharCode(num); - else break; - } - return str; -} diff --git a/tests/src/xcm/xcmOpal.test.ts b/tests/src/xcm/xcmOpal.test.ts new file mode 100644 index 0000000000..5ba3c4c44c --- /dev/null +++ b/tests/src/xcm/xcmOpal.test.ts @@ -0,0 +1,400 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import config from '../config'; +import {itSub, expect, describeXCM, usingPlaygrounds, usingWestmintPlaygrounds, usingRelayPlaygrounds} from '../util'; + +const STATEMINE_CHAIN = 1000; +const UNIQUE_CHAIN = 2095; + +const relayUrl = config.relayUrl; +const westmintUrl = config.westmintUrl; + +const STATEMINE_PALLET_INSTANCE = 50; +const ASSET_ID = 100; +const ASSET_METADATA_DECIMALS = 18; +const ASSET_METADATA_NAME = 'USDT'; +const ASSET_METADATA_DESCRIPTION = 'USDT'; +const ASSET_METADATA_MINIMAL_BALANCE = 1n; + +const WESTMINT_DECIMALS = 12; + +const TRANSFER_AMOUNT = 1_000_000_000_000_000_000n; + +// 10,000.00 (ten thousands) USDT +const ASSET_AMOUNT = 1_000_000_000_000_000_000_000n; + +describeXCM('[XCM] Integration test: Exchanging USDT with Westmint', () => { + let alice: IKeyringPair; + let bob: IKeyringPair; + + let balanceStmnBefore: bigint; + let balanceStmnAfter: bigint; + + let balanceOpalBefore: bigint; + let balanceOpalAfter: bigint; + let balanceOpalFinal: bigint; + + let balanceBobBefore: bigint; + let balanceBobAfter: bigint; + let balanceBobFinal: bigint; + + let balanceBobRelayTokenBefore: bigint; + let balanceBobRelayTokenAfter: bigint; + + + before(async () => { + await usingPlaygrounds(async (_helper, privateKey) => { + alice = await privateKey('//Alice'); + bob = await privateKey('//Bob'); // funds donor + }); + + await usingWestmintPlaygrounds(westmintUrl, async (helper) => { + // 350.00 (three hundred fifty) DOT + const fundingAmount = 3_500_000_000_000n; + + await helper.assets.create(alice, ASSET_ID, alice.address, ASSET_METADATA_MINIMAL_BALANCE); + await helper.assets.setMetadata(alice, ASSET_ID, ASSET_METADATA_NAME, ASSET_METADATA_DESCRIPTION, ASSET_METADATA_DECIMALS); + await helper.assets.mint(alice, ASSET_ID, alice.address, ASSET_AMOUNT); + + // funding parachain sovereing account (Parachain: 2095) + const parachainSovereingAccount = helper.address.paraSiblingSovereignAccount(UNIQUE_CHAIN); + await helper.balance.transferToSubstrate(bob, parachainSovereingAccount, fundingAmount); + }); + + + await usingPlaygrounds(async (helper) => { + const location = { + V1: { + parents: 1, + interior: {X3: [ + { + Parachain: STATEMINE_CHAIN, + }, + { + PalletInstance: STATEMINE_PALLET_INSTANCE, + }, + { + GeneralIndex: ASSET_ID, + }, + ]}, + }, + }; + + const metadata = + { + name: ASSET_ID, + symbol: ASSET_METADATA_NAME, + decimals: ASSET_METADATA_DECIMALS, + minimalBalance: ASSET_METADATA_MINIMAL_BALANCE, + }; + await helper.getSudo().foreignAssets.register(alice, alice.address, location, metadata); + balanceOpalBefore = await helper.balance.getSubstrate(alice.address); + }); + + + // Providing the relay currency to the unique sender account + await usingRelayPlaygrounds(relayUrl, async (helper) => { + const destination = { + V1: { + parents: 0, + interior: {X1: { + Parachain: UNIQUE_CHAIN, + }, + }, + }}; + + const beneficiary = { + V1: { + parents: 0, + interior: {X1: { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }}, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: 50_000_000_000_000_000n, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5_000_000_000; + + await helper.xcm.limitedReserveTransferAssets(alice, destination, beneficiary, assets, feeAssetItem, weightLimit); + }); + + }); + + itSub('Should connect and send USDT from Westmint to Opal', async ({helper}) => { + await usingWestmintPlaygrounds(westmintUrl, async (helper) => { + const dest = { + V1: { + parents: 1, + interior: {X1: { + Parachain: UNIQUE_CHAIN, + }, + }, + }}; + + const beneficiary = { + V1: { + parents: 0, + interior: {X1: { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }}, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: { + X2: [ + { + PalletInstance: STATEMINE_PALLET_INSTANCE, + }, + { + GeneralIndex: ASSET_ID, + }, + ]}, + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5000000000; + + balanceStmnBefore = await helper.balance.getSubstrate(alice.address); + await helper.xcm.limitedReserveTransferAssets(alice, dest, beneficiary, assets, feeAssetItem, weightLimit); + + balanceStmnAfter = await helper.balance.getSubstrate(alice.address); + + // common good parachain take commission in it native token + console.log( + 'Opal to Westmint transaction fees on Westmint: %s WND', + helper.util.bigIntToDecimals(balanceStmnBefore - balanceStmnAfter, WESTMINT_DECIMALS), + ); + expect(balanceStmnBefore > balanceStmnAfter).to.be.true; + + }); + + + // ensure that asset has been delivered + await helper.wait.newBlocks(3); + + // expext collection id will be with id 1 + const free = await helper.ft.getBalance(1, {Substrate: alice.address}); + + balanceOpalAfter = await helper.balance.getSubstrate(alice.address); + + // commission has not paid in USDT token + expect(free == TRANSFER_AMOUNT).to.be.true; + console.log( + 'Opal to Westmint transaction fees on Opal: %s USDT', + helper.util.bigIntToDecimals(TRANSFER_AMOUNT - free), + ); + // ... and parachain native token + expect(balanceOpalAfter == balanceOpalBefore).to.be.true; + console.log( + 'Opal to Westmint transaction fees on Opal: %s WND', + helper.util.bigIntToDecimals(balanceOpalAfter - balanceOpalBefore, WESTMINT_DECIMALS), + ); + }); + + itSub('Should connect and send USDT from Unique to Statemine back', async ({helper}) => { + const destination = { + V1: { + parents: 1, + interior: {X2: [ + { + Parachain: STATEMINE_CHAIN, + }, + { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }, + ]}, + }, + }; + + const currencies: [any, bigint][] = [ + [ + { + ForeignAssetId: 0, + }, + //10_000_000_000_000_000n, + TRANSFER_AMOUNT, + ], + [ + { + NativeAssetId: 'Parent', + }, + 400_000_000_000_000n, + ], + ]; + + const feeItem = 1; + const destWeight = 500000000000; + + await helper.xTokens.transferMulticurrencies(alice, currencies, feeItem, destination, destWeight); + + // the commission has been paid in parachain native token + balanceOpalFinal = await helper.balance.getSubstrate(alice.address); + expect(balanceOpalAfter > balanceOpalFinal).to.be.true; + + await usingWestmintPlaygrounds(westmintUrl, async (helper) => { + await helper.wait.newBlocks(3); + + // The USDT token never paid fees. Its amount not changed from begin value. + // Also check that xcm transfer has been succeeded + expect((await helper.assets.account(ASSET_ID, alice.address))! == ASSET_AMOUNT).to.be.true; + }); + }); + + itSub('Should connect and send Relay token to Unique', async ({helper}) => { + const TRANSFER_AMOUNT_RELAY = 50_000_000_000_000_000n; + + balanceBobBefore = await helper.balance.getSubstrate(bob.address); + balanceBobRelayTokenBefore = await helper.tokens.accounts(bob.address, {NativeAssetId: 'Parent'}); + + // Providing the relay currency to the unique sender account + await usingRelayPlaygrounds(relayUrl, async (helper) => { + const destination = { + V1: { + parents: 0, + interior: {X1: { + Parachain: UNIQUE_CHAIN, + }, + }, + }}; + + const beneficiary = { + V1: { + parents: 0, + interior: {X1: { + AccountId32: { + network: 'Any', + id: bob.addressRaw, + }, + }}, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT_RELAY, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5_000_000_000; + + await helper.xcm.limitedReserveTransferAssets(bob, destination, beneficiary, assets, feeAssetItem, weightLimit); + }); + + await helper.wait.newBlocks(3); + + balanceBobAfter = await helper.balance.getSubstrate(bob.address); + balanceBobRelayTokenAfter = await helper.tokens.accounts(bob.address, {NativeAssetId: 'Parent'}); + + const wndFee = balanceBobRelayTokenAfter - TRANSFER_AMOUNT_RELAY - balanceBobRelayTokenBefore; + console.log( + 'Relay (Westend) to Opal transaction fees: %s OPL', + helper.util.bigIntToDecimals(balanceBobAfter - balanceBobBefore), + ); + console.log( + 'Relay (Westend) to Opal transaction fees: %s WND', + helper.util.bigIntToDecimals(wndFee, WESTMINT_DECIMALS), + ); + expect(balanceBobBefore == balanceBobAfter).to.be.true; + expect(balanceBobRelayTokenBefore < balanceBobRelayTokenAfter).to.be.true; + }); + + itSub('Should connect and send Relay token back', async ({helper}) => { + const destination = { + V1: { + parents: 1, + interior: {X2: [ + { + Parachain: STATEMINE_CHAIN, + }, + { + AccountId32: { + network: 'Any', + id: bob.addressRaw, + }, + }, + ]}, + }, + }; + + const currencies: any = [ + [ + { + NativeAssetId: 'Parent', + }, + 50_000_000_000_000_000n, + ], + ]; + + const feeItem = 0; + const destWeight = 500000000000; + + await helper.xTokens.transferMulticurrencies(bob, currencies, feeItem, destination, destWeight); + + balanceBobFinal = await helper.balance.getSubstrate(bob.address); + console.log('Relay (Westend) to Opal transaction fees: %s OPL', balanceBobAfter - balanceBobFinal); + }); +}); diff --git a/tests/src/xcm/xcmQuartz.test.ts b/tests/src/xcm/xcmQuartz.test.ts new file mode 100644 index 0000000000..eeb8291251 --- /dev/null +++ b/tests/src/xcm/xcmQuartz.test.ts @@ -0,0 +1,595 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {blake2AsHex} from '@polkadot/util-crypto'; +import config from '../config'; +import {XcmV2TraitsOutcome, XcmV2TraitsError} from '../interfaces'; +import {itSub, expect, describeXCM, usingPlaygrounds, usingKaruraPlaygrounds, usingRelayPlaygrounds, usingMoonriverPlaygrounds} from '../util'; + +const QUARTZ_CHAIN = 2095; +const KARURA_CHAIN = 2000; +const MOONRIVER_CHAIN = 2023; + +const relayUrl = config.relayUrl; +const karuraUrl = config.karuraUrl; +const moonriverUrl = config.moonriverUrl; + +const KARURA_DECIMALS = 12; + +const TRANSFER_AMOUNT = 2000000000000000000000000n; + +describeXCM('[XCM] Integration test: Exchanging tokens with Karura', () => { + let alice: IKeyringPair; + let randomAccount: IKeyringPair; + + let balanceQuartzTokenInit: bigint; + let balanceQuartzTokenMiddle: bigint; + let balanceQuartzTokenFinal: bigint; + let balanceKaruraTokenInit: bigint; + let balanceKaruraTokenMiddle: bigint; + let balanceKaruraTokenFinal: bigint; + let balanceQuartzForeignTokenInit: bigint; + let balanceQuartzForeignTokenMiddle: bigint; + let balanceQuartzForeignTokenFinal: bigint; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + alice = await privateKey('//Alice'); + [randomAccount] = await helper.arrange.createAccounts([0n], alice); + }); + + await usingKaruraPlaygrounds(karuraUrl, async (helper) => { + const destination = { + V0: { + X2: [ + 'Parent', + { + Parachain: QUARTZ_CHAIN, + }, + ], + }, + }; + + const metadata = { + name: 'QTZ', + symbol: 'QTZ', + decimals: 18, + minimalBalance: 1n, + }; + + await helper.getSudo().assetRegistry.registerForeignAsset(alice, destination, metadata); + await helper.balance.transferToSubstrate(alice, randomAccount.address, 10000000000000n); + balanceKaruraTokenInit = await helper.balance.getSubstrate(randomAccount.address); + balanceQuartzForeignTokenInit = await helper.tokens.accounts(randomAccount.address, {ForeignAsset: 0}); + }); + + await usingPlaygrounds(async (helper) => { + await helper.balance.transferToSubstrate(alice, randomAccount.address, 10n * TRANSFER_AMOUNT); + balanceQuartzTokenInit = await helper.balance.getSubstrate(randomAccount.address); + }); + }); + + itSub('Should connect and send QTZ to Karura', async ({helper}) => { + const destination = { + V0: { + X2: [ + 'Parent', + { + Parachain: KARURA_CHAIN, + }, + ], + }, + }; + + const beneficiary = { + V0: { + X1: { + AccountId32: { + network: 'Any', + id: randomAccount.addressRaw, + }, + }, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5000000000; + + await helper.xcm.limitedReserveTransferAssets(randomAccount, destination, beneficiary, assets, feeAssetItem, weightLimit); + balanceQuartzTokenMiddle = await helper.balance.getSubstrate(randomAccount.address); + + const qtzFees = balanceQuartzTokenInit - balanceQuartzTokenMiddle - TRANSFER_AMOUNT; + console.log('[Quartz -> Karura] transaction fees on Quartz: %s QTZ', helper.util.bigIntToDecimals(qtzFees)); + expect(qtzFees > 0n).to.be.true; + + await usingKaruraPlaygrounds(karuraUrl, async (helper) => { + await helper.wait.newBlocks(3); + balanceQuartzForeignTokenMiddle = await helper.tokens.accounts(randomAccount.address, {ForeignAsset: 0}); + balanceKaruraTokenMiddle = await helper.balance.getSubstrate(randomAccount.address); + + const karFees = balanceKaruraTokenInit - balanceKaruraTokenMiddle; + const qtzIncomeTransfer = balanceQuartzForeignTokenMiddle - balanceQuartzForeignTokenInit; + + console.log( + '[Quartz -> Karura] transaction fees on Karura: %s KAR', + helper.util.bigIntToDecimals(karFees, KARURA_DECIMALS), + ); + console.log('[Quartz -> Karura] income %s QTZ', helper.util.bigIntToDecimals(qtzIncomeTransfer)); + expect(karFees == 0n).to.be.true; + expect(qtzIncomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + }); + + itSub('Should connect to Karura and send QTZ back', async ({helper}) => { + await usingKaruraPlaygrounds(karuraUrl, async (helper) => { + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: QUARTZ_CHAIN}, + { + AccountId32: { + network: 'Any', + id: randomAccount.addressRaw, + }, + }, + ], + }, + }, + }; + + const id = { + ForeignAsset: 0, + }; + + const destWeight = 50000000; + + await helper.xTokens.transfer(randomAccount, id, TRANSFER_AMOUNT, destination, destWeight); + balanceKaruraTokenFinal = await helper.balance.getSubstrate(randomAccount.address); + balanceQuartzForeignTokenFinal = await helper.tokens.accounts(randomAccount.address, id); + + const karFees = balanceKaruraTokenMiddle - balanceKaruraTokenFinal; + const qtzOutcomeTransfer = balanceQuartzForeignTokenMiddle - balanceQuartzForeignTokenFinal; + + console.log( + '[Karura -> Quartz] transaction fees on Karura: %s KAR', + helper.util.bigIntToDecimals(karFees, KARURA_DECIMALS), + ); + console.log('[Karura -> Quartz] outcome %s QTZ', helper.util.bigIntToDecimals(qtzOutcomeTransfer)); + + expect(karFees > 0).to.be.true; + expect(qtzOutcomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + + await helper.wait.newBlocks(3); + + balanceQuartzTokenFinal = await helper.balance.getSubstrate(randomAccount.address); + const actuallyDelivered = balanceQuartzTokenFinal - balanceQuartzTokenMiddle; + expect(actuallyDelivered > 0).to.be.true; + + console.log('[Karura -> Quartz] actually delivered %s QTZ', helper.util.bigIntToDecimals(actuallyDelivered)); + + const qtzFees = TRANSFER_AMOUNT - actuallyDelivered; + console.log('[Karura -> Quartz] transaction fees on Quartz: %s QTZ', helper.util.bigIntToDecimals(qtzFees)); + expect(qtzFees == 0n).to.be.true; + }); +}); + +// These tests are relevant only when the foreign asset pallet is disabled +describeXCM('[XCM] Integration test: Quartz rejects non-native tokens', () => { + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (_helper, privateKey) => { + alice = await privateKey('//Alice'); + }); + }); + + itSub('Quartz rejects tokens from the Relay', async ({helper}) => { + await usingRelayPlaygrounds(relayUrl, async (helper) => { + const destination = { + V1: { + parents: 0, + interior: {X1: { + Parachain: QUARTZ_CHAIN, + }, + }, + }}; + + const beneficiary = { + V1: { + parents: 0, + interior: {X1: { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }}, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: 50_000_000_000_000_000n, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5_000_000_000; + + await helper.xcm.limitedReserveTransferAssets(alice, destination, beneficiary, assets, feeAssetItem, weightLimit); + }); + + const maxWaitBlocks = 3; + + const dmpQueueExecutedDownward = await helper.wait.event(maxWaitBlocks, 'dmpQueue', 'ExecutedDownward'); + + expect( + dmpQueueExecutedDownward != null, + '[Relay] dmpQueue.ExecutedDownward event is expected', + ).to.be.true; + + const event = dmpQueueExecutedDownward!.event; + const outcome = event.data[1] as XcmV2TraitsOutcome; + + expect( + outcome.isIncomplete, + '[Relay] The outcome of the XCM should be `Incomplete`', + ).to.be.true; + + const incomplete = outcome.asIncomplete; + expect( + incomplete[1].toString() == 'AssetNotFound', + '[Relay] The XCM error should be `AssetNotFound`', + ).to.be.true; + }); + + itSub('Quartz rejects KAR tokens from Karura', async ({helper}) => { + await usingKaruraPlaygrounds(karuraUrl, async (helper) => { + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: QUARTZ_CHAIN}, + { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }, + ], + }, + }, + }; + + const id = { + Token: 'KAR', + }; + + const destWeight = 50000000; + + await helper.xTokens.transfer(alice, id, 100_000_000_000n, destination, destWeight); + }); + + const maxWaitBlocks = 3; + + const xcmpQueueFailEvent = await helper.wait.event(maxWaitBlocks, 'xcmpQueue', 'Fail'); + + expect( + xcmpQueueFailEvent != null, + '[Karura] xcmpQueue.FailEvent event is expected', + ).to.be.true; + + const event = xcmpQueueFailEvent!.event; + const outcome = event.data[1] as XcmV2TraitsError; + + expect( + outcome.isUntrustedReserveLocation, + '[Karura] The XCM error should be `UntrustedReserveLocation`', + ).to.be.true; + }); +}); + +describeXCM('[XCM] Integration test: Exchanging QTZ with Moonriver', () => { + // Quartz constants + let quartzDonor: IKeyringPair; + let quartzAssetLocation; + + let randomAccountQuartz: IKeyringPair; + let randomAccountMoonriver: IKeyringPair; + + // Moonriver constants + let assetId: string; + + const councilVotingThreshold = 2; + const technicalCommitteeThreshold = 2; + const votingPeriod = 3; + const delayPeriod = 0; + + const quartzAssetMetadata = { + name: 'xcQuartz', + symbol: 'xcQTZ', + decimals: 18, + isFrozen: false, + minimalBalance: 1n, + }; + + let balanceQuartzTokenInit: bigint; + let balanceQuartzTokenMiddle: bigint; + let balanceQuartzTokenFinal: bigint; + let balanceForeignQtzTokenInit: bigint; + let balanceForeignQtzTokenMiddle: bigint; + let balanceForeignQtzTokenFinal: bigint; + let balanceMovrTokenInit: bigint; + let balanceMovrTokenMiddle: bigint; + let balanceMovrTokenFinal: bigint; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + quartzDonor = await privateKey('//Alice'); + [randomAccountQuartz] = await helper.arrange.createAccounts([0n], quartzDonor); + + balanceForeignQtzTokenInit = 0n; + }); + + await usingMoonriverPlaygrounds(moonriverUrl, async (helper) => { + const alithAccount = helper.account.alithAccount(); + const baltatharAccount = helper.account.baltatharAccount(); + const dorothyAccount = helper.account.dorothyAccount(); + + randomAccountMoonriver = helper.account.create(); + + // >>> Sponsoring Dorothy >>> + console.log('Sponsoring Dorothy.......'); + await helper.balance.transferToEthereum(alithAccount, dorothyAccount.address, 11_000_000_000_000_000_000n); + console.log('Sponsoring Dorothy.......DONE'); + // <<< Sponsoring Dorothy <<< + + quartzAssetLocation = { + XCM: { + parents: 1, + interior: {X1: {Parachain: QUARTZ_CHAIN}}, + }, + }; + const existentialDeposit = 1n; + const isSufficient = true; + const unitsPerSecond = 1n; + const numAssetsWeightHint = 0; + + const encodedProposal = helper.assetManager.makeRegisterForeignAssetProposal({ + location: quartzAssetLocation, + metadata: quartzAssetMetadata, + existentialDeposit, + isSufficient, + unitsPerSecond, + numAssetsWeightHint, + }); + const proposalHash = blake2AsHex(encodedProposal); + + console.log('Encoded proposal for registerForeignAsset & setAssetUnitsPerSecond is %s', encodedProposal); + console.log('Encoded length %d', encodedProposal.length); + console.log('Encoded proposal hash for batch utility after schedule is %s', proposalHash); + + // >>> Note motion preimage >>> + console.log('Note motion preimage.......'); + await helper.democracy.notePreimage(baltatharAccount, encodedProposal); + console.log('Note motion preimage.......DONE'); + // <<< Note motion preimage <<< + + // >>> Propose external motion through council >>> + console.log('Propose external motion through council.......'); + const externalMotion = helper.democracy.externalProposeMajority(proposalHash); + const encodedMotion = externalMotion?.method.toHex() || ''; + const motionHash = blake2AsHex(encodedMotion); + console.log('Motion hash is %s', motionHash); + + await helper.collective.council.propose(baltatharAccount, councilVotingThreshold, externalMotion, externalMotion.encodedLength); + + const councilProposalIdx = await helper.collective.council.proposalCount() - 1; + await helper.collective.council.vote(dorothyAccount, motionHash, councilProposalIdx, true); + await helper.collective.council.vote(baltatharAccount, motionHash, councilProposalIdx, true); + + await helper.collective.council.close(dorothyAccount, motionHash, councilProposalIdx, 1_000_000_000, externalMotion.encodedLength); + console.log('Propose external motion through council.......DONE'); + // <<< Propose external motion through council <<< + + // >>> Fast track proposal through technical committee >>> + console.log('Fast track proposal through technical committee.......'); + const fastTrack = helper.democracy.fastTrack(proposalHash, votingPeriod, delayPeriod); + const encodedFastTrack = fastTrack?.method.toHex() || ''; + const fastTrackHash = blake2AsHex(encodedFastTrack); + console.log('FastTrack hash is %s', fastTrackHash); + + await helper.collective.techCommittee.propose(alithAccount, technicalCommitteeThreshold, fastTrack, fastTrack.encodedLength); + + const techProposalIdx = await helper.collective.techCommittee.proposalCount() - 1; + await helper.collective.techCommittee.vote(baltatharAccount, fastTrackHash, techProposalIdx, true); + await helper.collective.techCommittee.vote(alithAccount, fastTrackHash, techProposalIdx, true); + + await helper.collective.techCommittee.close(baltatharAccount, fastTrackHash, techProposalIdx, 1_000_000_000, fastTrack.encodedLength); + console.log('Fast track proposal through technical committee.......DONE'); + // <<< Fast track proposal through technical committee <<< + + // >>> Referendum voting >>> + console.log('Referendum voting.......'); + await helper.democracy.referendumVote(dorothyAccount, 0, { + balance: 10_000_000_000_000_000_000n, + vote: {aye: true, conviction: 1}, + }); + console.log('Referendum voting.......DONE'); + // <<< Referendum voting <<< + + // >>> Acquire Quartz AssetId Info on Moonriver >>> + console.log('Acquire Quartz AssetId Info on Moonriver.......'); + + // Wait for the democracy execute + await helper.wait.newBlocks(5); + + assetId = (await helper.assetManager.assetTypeId(quartzAssetLocation)).toString(); + + console.log('QTZ asset ID is %s', assetId); + console.log('Acquire Quartz AssetId Info on Moonriver.......DONE'); + // >>> Acquire Quartz AssetId Info on Moonriver >>> + + // >>> Sponsoring random Account >>> + console.log('Sponsoring random Account.......'); + await helper.balance.transferToEthereum(baltatharAccount, randomAccountMoonriver.address, 11_000_000_000_000_000_000n); + console.log('Sponsoring random Account.......DONE'); + // <<< Sponsoring random Account <<< + + balanceMovrTokenInit = await helper.balance.getEthereum(randomAccountMoonriver.address); + }); + + await usingPlaygrounds(async (helper) => { + await helper.balance.transferToSubstrate(quartzDonor, randomAccountQuartz.address, 10n * TRANSFER_AMOUNT); + balanceQuartzTokenInit = await helper.balance.getSubstrate(randomAccountQuartz.address); + }); + }); + + itSub('Should connect and send QTZ to Moonriver', async ({helper}) => { + const currencyId = { + NativeAssetId: 'Here', + }; + const dest = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: MOONRIVER_CHAIN}, + {AccountKey20: {network: 'Any', key: randomAccountMoonriver.address}}, + ], + }, + }, + }; + const amount = TRANSFER_AMOUNT; + const destWeight = 850000000; + + await helper.xTokens.transfer(randomAccountQuartz, currencyId, amount, dest, destWeight); + + balanceQuartzTokenMiddle = await helper.balance.getSubstrate(randomAccountQuartz.address); + expect(balanceQuartzTokenMiddle < balanceQuartzTokenInit).to.be.true; + + const transactionFees = balanceQuartzTokenInit - balanceQuartzTokenMiddle - TRANSFER_AMOUNT; + console.log('[Quartz -> Moonriver] transaction fees on Quartz: %s QTZ', helper.util.bigIntToDecimals(transactionFees)); + expect(transactionFees > 0).to.be.true; + + await usingMoonriverPlaygrounds(moonriverUrl, async (helper) => { + await helper.wait.newBlocks(3); + + balanceMovrTokenMiddle = await helper.balance.getEthereum(randomAccountMoonriver.address); + + const movrFees = balanceMovrTokenInit - balanceMovrTokenMiddle; + console.log('[Quartz -> Moonriver] transaction fees on Moonriver: %s MOVR',helper.util.bigIntToDecimals(movrFees)); + expect(movrFees == 0n).to.be.true; + + balanceForeignQtzTokenMiddle = (await helper.assets.account(assetId, randomAccountMoonriver.address))!; // BigInt(qtzRandomAccountAsset['balance']); + const qtzIncomeTransfer = balanceForeignQtzTokenMiddle - balanceForeignQtzTokenInit; + console.log('[Quartz -> Moonriver] income %s QTZ', helper.util.bigIntToDecimals(qtzIncomeTransfer)); + expect(qtzIncomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + }); + + itSub('Should connect to Moonriver and send QTZ back', async ({helper}) => { + await usingMoonriverPlaygrounds(moonriverUrl, async (helper) => { + const asset = { + V1: { + id: { + Concrete: { + parents: 1, + interior: { + X1: {Parachain: QUARTZ_CHAIN}, + }, + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT, + }, + }, + }; + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: QUARTZ_CHAIN}, + {AccountId32: {network: 'Any', id: randomAccountQuartz.addressRaw}}, + ], + }, + }, + }; + const destWeight = 50000000; + + await helper.xTokens.transferMultiasset(randomAccountMoonriver, asset, destination, destWeight); + + balanceMovrTokenFinal = await helper.balance.getEthereum(randomAccountMoonriver.address); + + const movrFees = balanceMovrTokenMiddle - balanceMovrTokenFinal; + console.log('[Moonriver -> Quartz] transaction fees on Moonriver: %s MOVR', helper.util.bigIntToDecimals(movrFees)); + expect(movrFees > 0).to.be.true; + + const qtzRandomAccountAsset = await helper.assets.account(assetId, randomAccountMoonriver.address); + + expect(qtzRandomAccountAsset).to.be.null; + + balanceForeignQtzTokenFinal = 0n; + + const qtzOutcomeTransfer = balanceForeignQtzTokenMiddle - balanceForeignQtzTokenFinal; + console.log('[Quartz -> Moonriver] outcome %s QTZ', helper.util.bigIntToDecimals(qtzOutcomeTransfer)); + expect(qtzOutcomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + + await helper.wait.newBlocks(3); + + balanceQuartzTokenFinal = await helper.balance.getSubstrate(randomAccountQuartz.address); + const actuallyDelivered = balanceQuartzTokenFinal - balanceQuartzTokenMiddle; + expect(actuallyDelivered > 0).to.be.true; + + console.log('[Moonriver -> Quartz] actually delivered %s QTZ', helper.util.bigIntToDecimals(actuallyDelivered)); + + const qtzFees = TRANSFER_AMOUNT - actuallyDelivered; + console.log('[Moonriver -> Quartz] transaction fees on Quartz: %s QTZ', helper.util.bigIntToDecimals(qtzFees)); + expect(qtzFees == 0n).to.be.true; + }); +}); diff --git a/tests/src/xcm/xcmUnique.test.ts b/tests/src/xcm/xcmUnique.test.ts new file mode 100644 index 0000000000..7ab7f9d960 --- /dev/null +++ b/tests/src/xcm/xcmUnique.test.ts @@ -0,0 +1,600 @@ +// Copyright 2019-2022 Unique Network (Gibraltar) Ltd. +// This file is part of Unique Network. + +// Unique Network is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Unique Network is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Unique Network. If not, see . + +import {IKeyringPair} from '@polkadot/types/types'; +import {blake2AsHex} from '@polkadot/util-crypto'; +import config from '../config'; +import {XcmV2TraitsError, XcmV2TraitsOutcome} from '../interfaces'; +import {itSub, expect, describeXCM, usingPlaygrounds, usingAcalaPlaygrounds, usingRelayPlaygrounds, usingMoonbeamPlaygrounds} from '../util'; + +const UNIQUE_CHAIN = 2037; +const ACALA_CHAIN = 2000; +const MOONBEAM_CHAIN = 2004; + +const relayUrl = config.relayUrl; +const acalaUrl = config.acalaUrl; +const moonbeamUrl = config.moonbeamUrl; + +const ACALA_DECIMALS = 12; + +const TRANSFER_AMOUNT = 2000000000000000000000000n; + +describeXCM('[XCM] Integration test: Exchanging tokens with Acala', () => { + let alice: IKeyringPair; + let randomAccount: IKeyringPair; + + let balanceUniqueTokenInit: bigint; + let balanceUniqueTokenMiddle: bigint; + let balanceUniqueTokenFinal: bigint; + let balanceAcalaTokenInit: bigint; + let balanceAcalaTokenMiddle: bigint; + let balanceAcalaTokenFinal: bigint; + let balanceUniqueForeignTokenInit: bigint; + let balanceUniqueForeignTokenMiddle: bigint; + let balanceUniqueForeignTokenFinal: bigint; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + alice = await privateKey('//Alice'); + [randomAccount] = await helper.arrange.createAccounts([0n], alice); + }); + + await usingAcalaPlaygrounds(acalaUrl, async (helper) => { + const destination = { + V0: { + X2: [ + 'Parent', + { + Parachain: UNIQUE_CHAIN, + }, + ], + }, + }; + + const metadata = { + name: 'UNQ', + symbol: 'UNQ', + decimals: 18, + minimalBalance: 1n, + }; + + await helper.getSudo().assetRegistry.registerForeignAsset(alice, destination, metadata); + await helper.balance.transferToSubstrate(alice, randomAccount.address, 10000000000000n); + balanceAcalaTokenInit = await helper.balance.getSubstrate(randomAccount.address); + balanceUniqueForeignTokenInit = await helper.tokens.accounts(randomAccount.address, {ForeignAsset: 0}); + }); + + await usingPlaygrounds(async (helper) => { + await helper.balance.transferToSubstrate(alice, randomAccount.address, 10n * TRANSFER_AMOUNT); + balanceUniqueTokenInit = await helper.balance.getSubstrate(randomAccount.address); + }); + }); + + itSub('Should connect and send UNQ to Acala', async ({helper}) => { + + const destination = { + V0: { + X2: [ + 'Parent', + { + Parachain: ACALA_CHAIN, + }, + ], + }, + }; + + const beneficiary = { + V0: { + X1: { + AccountId32: { + network: 'Any', + id: randomAccount.addressRaw, + }, + }, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5000000000; + + await helper.xcm.limitedReserveTransferAssets(randomAccount, destination, beneficiary, assets, feeAssetItem, weightLimit); + + balanceUniqueTokenMiddle = await helper.balance.getSubstrate(randomAccount.address); + + const unqFees = balanceUniqueTokenInit - balanceUniqueTokenMiddle - TRANSFER_AMOUNT; + console.log('[Unique -> Acala] transaction fees on Unique: %s UNQ', helper.util.bigIntToDecimals(unqFees)); + expect(unqFees > 0n).to.be.true; + + await usingAcalaPlaygrounds(acalaUrl, async (helper) => { + await helper.wait.newBlocks(3); + + balanceUniqueForeignTokenMiddle = await helper.tokens.accounts(randomAccount.address, {ForeignAsset: 0}); + balanceAcalaTokenMiddle = await helper.balance.getSubstrate(randomAccount.address); + + const acaFees = balanceAcalaTokenInit - balanceAcalaTokenMiddle; + const unqIncomeTransfer = balanceUniqueForeignTokenMiddle - balanceUniqueForeignTokenInit; + + console.log( + '[Unique -> Acala] transaction fees on Acala: %s ACA', + helper.util.bigIntToDecimals(acaFees, ACALA_DECIMALS), + ); + console.log('[Unique -> Acala] income %s UNQ', helper.util.bigIntToDecimals(unqIncomeTransfer)); + expect(acaFees == 0n).to.be.true; + expect(unqIncomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + }); + + itSub('Should connect to Acala and send UNQ back', async ({helper}) => { + await usingAcalaPlaygrounds(acalaUrl, async (helper) => { + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: UNIQUE_CHAIN}, + { + AccountId32: { + network: 'Any', + id: randomAccount.addressRaw, + }, + }, + ], + }, + }, + }; + + const id = { + ForeignAsset: 0, + }; + + const destWeight = 50000000; + + await helper.xTokens.transfer(randomAccount, id, TRANSFER_AMOUNT, destination, destWeight); + + balanceAcalaTokenFinal = await helper.balance.getSubstrate(randomAccount.address); + balanceUniqueForeignTokenFinal = await helper.tokens.accounts(randomAccount.address, id); + + const acaFees = balanceAcalaTokenMiddle - balanceAcalaTokenFinal; + const unqOutcomeTransfer = balanceUniqueForeignTokenMiddle - balanceUniqueForeignTokenFinal; + + console.log( + '[Acala -> Unique] transaction fees on Acala: %s ACA', + helper.util.bigIntToDecimals(acaFees, ACALA_DECIMALS), + ); + console.log('[Acala -> Unique] outcome %s UNQ', helper.util.bigIntToDecimals(unqOutcomeTransfer)); + + expect(acaFees > 0).to.be.true; + expect(unqOutcomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + + await helper.wait.newBlocks(3); + + balanceUniqueTokenFinal = await helper.balance.getSubstrate(randomAccount.address); + const actuallyDelivered = balanceUniqueTokenFinal - balanceUniqueTokenMiddle; + expect(actuallyDelivered > 0).to.be.true; + + console.log('[Acala -> Unique] actually delivered %s UNQ', helper.util.bigIntToDecimals(actuallyDelivered)); + + const unqFees = TRANSFER_AMOUNT - actuallyDelivered; + console.log('[Acala -> Unique] transaction fees on Unique: %s UNQ', helper.util.bigIntToDecimals(unqFees)); + expect(unqFees == 0n).to.be.true; + }); +}); + +// These tests are relevant only when the foreign asset pallet is disabled +describeXCM('[XCM] Integration test: Unique rejects non-native tokens', () => { + let alice: IKeyringPair; + + before(async () => { + await usingPlaygrounds(async (_helper, privateKey) => { + alice = await privateKey('//Alice'); + }); + }); + + itSub('Unique rejects tokens from the Relay', async ({helper}) => { + await usingRelayPlaygrounds(relayUrl, async (helper) => { + const destination = { + V1: { + parents: 0, + interior: {X1: { + Parachain: UNIQUE_CHAIN, + }, + }, + }}; + + const beneficiary = { + V1: { + parents: 0, + interior: {X1: { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }}, + }, + }; + + const assets = { + V1: [ + { + id: { + Concrete: { + parents: 0, + interior: 'Here', + }, + }, + fun: { + Fungible: 50_000_000_000_000_000n, + }, + }, + ], + }; + + const feeAssetItem = 0; + const weightLimit = 5_000_000_000; + + await helper.xcm.limitedReserveTransferAssets(alice, destination, beneficiary, assets, feeAssetItem, weightLimit); + }); + + const maxWaitBlocks = 3; + + const dmpQueueExecutedDownward = await helper.wait.event(maxWaitBlocks, 'dmpQueue', 'ExecutedDownward'); + + expect( + dmpQueueExecutedDownward != null, + '[Relay] dmpQueue.ExecutedDownward event is expected', + ).to.be.true; + + const event = dmpQueueExecutedDownward!.event; + const outcome = event.data[1] as XcmV2TraitsOutcome; + + expect( + outcome.isIncomplete, + '[Relay] The outcome of the XCM should be `Incomplete`', + ).to.be.true; + + const incomplete = outcome.asIncomplete; + expect( + incomplete[1].toString() == 'AssetNotFound', + '[Relay] The XCM error should be `AssetNotFound`', + ).to.be.true; + }); + + itSub('Unique rejects ACA tokens from Acala', async ({helper}) => { + await usingAcalaPlaygrounds(acalaUrl, async (helper) => { + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: UNIQUE_CHAIN}, + { + AccountId32: { + network: 'Any', + id: alice.addressRaw, + }, + }, + ], + }, + }, + }; + + const id = { + Token: 'ACA', + }; + + const destWeight = 50000000; + + await helper.xTokens.transfer(alice, id, 100_000_000_000n, destination, destWeight); + }); + + const maxWaitBlocks = 3; + + const xcmpQueueFailEvent = await helper.wait.event(maxWaitBlocks, 'xcmpQueue', 'Fail'); + + expect( + xcmpQueueFailEvent != null, + '[Acala] xcmpQueue.FailEvent event is expected', + ).to.be.true; + + const event = xcmpQueueFailEvent!.event; + const outcome = event.data[1] as XcmV2TraitsError; + + expect( + outcome.isUntrustedReserveLocation, + '[Acala] The XCM error should be `UntrustedReserveLocation`', + ).to.be.true; + }); +}); + +describeXCM('[XCM] Integration test: Exchanging UNQ with Moonbeam', () => { + // Unique constants + let uniqueDonor: IKeyringPair; + let uniqueAssetLocation; + + let randomAccountUnique: IKeyringPair; + let randomAccountMoonbeam: IKeyringPair; + + // Moonbeam constants + let assetId: string; + + const councilVotingThreshold = 2; + const technicalCommitteeThreshold = 2; + const votingPeriod = 3; + const delayPeriod = 0; + + const uniqueAssetMetadata = { + name: 'xcUnique', + symbol: 'xcUNQ', + decimals: 18, + isFrozen: false, + minimalBalance: 1n, + }; + + let balanceUniqueTokenInit: bigint; + let balanceUniqueTokenMiddle: bigint; + let balanceUniqueTokenFinal: bigint; + let balanceForeignUnqTokenInit: bigint; + let balanceForeignUnqTokenMiddle: bigint; + let balanceForeignUnqTokenFinal: bigint; + let balanceGlmrTokenInit: bigint; + let balanceGlmrTokenMiddle: bigint; + let balanceGlmrTokenFinal: bigint; + + before(async () => { + await usingPlaygrounds(async (helper, privateKey) => { + uniqueDonor = await privateKey('//Alice'); + [randomAccountUnique] = await helper.arrange.createAccounts([0n], uniqueDonor); + + balanceForeignUnqTokenInit = 0n; + }); + + await usingMoonbeamPlaygrounds(moonbeamUrl, async (helper) => { + const alithAccount = helper.account.alithAccount(); + const baltatharAccount = helper.account.baltatharAccount(); + const dorothyAccount = helper.account.dorothyAccount(); + + randomAccountMoonbeam = helper.account.create(); + + // >>> Sponsoring Dorothy >>> + console.log('Sponsoring Dorothy.......'); + await helper.balance.transferToEthereum(alithAccount, dorothyAccount.address, 11_000_000_000_000_000_000n); + console.log('Sponsoring Dorothy.......DONE'); + // <<< Sponsoring Dorothy <<< + + uniqueAssetLocation = { + XCM: { + parents: 1, + interior: {X1: {Parachain: UNIQUE_CHAIN}}, + }, + }; + const existentialDeposit = 1n; + const isSufficient = true; + const unitsPerSecond = 1n; + const numAssetsWeightHint = 0; + + const encodedProposal = helper.assetManager.makeRegisterForeignAssetProposal({ + location: uniqueAssetLocation, + metadata: uniqueAssetMetadata, + existentialDeposit, + isSufficient, + unitsPerSecond, + numAssetsWeightHint, + }); + const proposalHash = blake2AsHex(encodedProposal); + + console.log('Encoded proposal for registerForeignAsset & setAssetUnitsPerSecond is %s', encodedProposal); + console.log('Encoded length %d', encodedProposal.length); + console.log('Encoded proposal hash for batch utility after schedule is %s', proposalHash); + + // >>> Note motion preimage >>> + console.log('Note motion preimage.......'); + await helper.democracy.notePreimage(baltatharAccount, encodedProposal); + console.log('Note motion preimage.......DONE'); + // <<< Note motion preimage <<< + + // >>> Propose external motion through council >>> + console.log('Propose external motion through council.......'); + const externalMotion = helper.democracy.externalProposeMajority(proposalHash); + const encodedMotion = externalMotion?.method.toHex() || ''; + const motionHash = blake2AsHex(encodedMotion); + console.log('Motion hash is %s', motionHash); + + await helper.collective.council.propose(baltatharAccount, councilVotingThreshold, externalMotion, externalMotion.encodedLength); + + const councilProposalIdx = await helper.collective.council.proposalCount() - 1; + await helper.collective.council.vote(dorothyAccount, motionHash, councilProposalIdx, true); + await helper.collective.council.vote(baltatharAccount, motionHash, councilProposalIdx, true); + + await helper.collective.council.close(dorothyAccount, motionHash, councilProposalIdx, 1_000_000_000, externalMotion.encodedLength); + console.log('Propose external motion through council.......DONE'); + // <<< Propose external motion through council <<< + + // >>> Fast track proposal through technical committee >>> + console.log('Fast track proposal through technical committee.......'); + const fastTrack = helper.democracy.fastTrack(proposalHash, votingPeriod, delayPeriod); + const encodedFastTrack = fastTrack?.method.toHex() || ''; + const fastTrackHash = blake2AsHex(encodedFastTrack); + console.log('FastTrack hash is %s', fastTrackHash); + + await helper.collective.techCommittee.propose(alithAccount, technicalCommitteeThreshold, fastTrack, fastTrack.encodedLength); + + const techProposalIdx = await helper.collective.techCommittee.proposalCount() - 1; + await helper.collective.techCommittee.vote(baltatharAccount, fastTrackHash, techProposalIdx, true); + await helper.collective.techCommittee.vote(alithAccount, fastTrackHash, techProposalIdx, true); + + await helper.collective.techCommittee.close(baltatharAccount, fastTrackHash, techProposalIdx, 1_000_000_000, fastTrack.encodedLength); + console.log('Fast track proposal through technical committee.......DONE'); + // <<< Fast track proposal through technical committee <<< + + // >>> Referendum voting >>> + console.log('Referendum voting.......'); + await helper.democracy.referendumVote(dorothyAccount, 0, { + balance: 10_000_000_000_000_000_000n, + vote: {aye: true, conviction: 1}, + }); + console.log('Referendum voting.......DONE'); + // <<< Referendum voting <<< + + // >>> Acquire Unique AssetId Info on Moonbeam >>> + console.log('Acquire Unique AssetId Info on Moonbeam.......'); + + // Wait for the democracy execute + await helper.wait.newBlocks(5); + + assetId = (await helper.assetManager.assetTypeId(uniqueAssetLocation)).toString(); + + console.log('UNQ asset ID is %s', assetId); + console.log('Acquire Unique AssetId Info on Moonbeam.......DONE'); + // >>> Acquire Unique AssetId Info on Moonbeam >>> + + // >>> Sponsoring random Account >>> + console.log('Sponsoring random Account.......'); + await helper.balance.transferToEthereum(baltatharAccount, randomAccountMoonbeam.address, 11_000_000_000_000_000_000n); + console.log('Sponsoring random Account.......DONE'); + // <<< Sponsoring random Account <<< + + balanceGlmrTokenInit = await helper.balance.getEthereum(randomAccountMoonbeam.address); + }); + + await usingPlaygrounds(async (helper) => { + await helper.balance.transferToSubstrate(uniqueDonor, randomAccountUnique.address, 10n * TRANSFER_AMOUNT); + balanceUniqueTokenInit = await helper.balance.getSubstrate(randomAccountUnique.address); + }); + }); + + itSub('Should connect and send UNQ to Moonbeam', async ({helper}) => { + const currencyId = { + NativeAssetId: 'Here', + }; + const dest = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: MOONBEAM_CHAIN}, + {AccountKey20: {network: 'Any', key: randomAccountMoonbeam.address}}, + ], + }, + }, + }; + const amount = TRANSFER_AMOUNT; + const destWeight = 850000000; + + await helper.xTokens.transfer(randomAccountUnique, currencyId, amount, dest, destWeight); + + balanceUniqueTokenMiddle = await helper.balance.getSubstrate(randomAccountUnique.address); + expect(balanceUniqueTokenMiddle < balanceUniqueTokenInit).to.be.true; + + const transactionFees = balanceUniqueTokenInit - balanceUniqueTokenMiddle - TRANSFER_AMOUNT; + console.log('[Unique -> Moonbeam] transaction fees on Unique: %s UNQ', helper.util.bigIntToDecimals(transactionFees)); + expect(transactionFees > 0).to.be.true; + + await usingMoonbeamPlaygrounds(moonbeamUrl, async (helper) => { + await helper.wait.newBlocks(3); + + balanceGlmrTokenMiddle = await helper.balance.getEthereum(randomAccountMoonbeam.address); + + const glmrFees = balanceGlmrTokenInit - balanceGlmrTokenMiddle; + console.log('[Unique -> Moonbeam] transaction fees on Moonbeam: %s GLMR', helper.util.bigIntToDecimals(glmrFees)); + expect(glmrFees == 0n).to.be.true; + + balanceForeignUnqTokenMiddle = (await helper.assets.account(assetId, randomAccountMoonbeam.address))!; + + const unqIncomeTransfer = balanceForeignUnqTokenMiddle - balanceForeignUnqTokenInit; + console.log('[Unique -> Moonbeam] income %s UNQ', helper.util.bigIntToDecimals(unqIncomeTransfer)); + expect(unqIncomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + }); + + itSub('Should connect to Moonbeam and send UNQ back', async ({helper}) => { + await usingMoonbeamPlaygrounds(moonbeamUrl, async (helper) => { + const asset = { + V1: { + id: { + Concrete: { + parents: 1, + interior: { + X1: {Parachain: UNIQUE_CHAIN}, + }, + }, + }, + fun: { + Fungible: TRANSFER_AMOUNT, + }, + }, + }; + const destination = { + V1: { + parents: 1, + interior: { + X2: [ + {Parachain: UNIQUE_CHAIN}, + {AccountId32: {network: 'Any', id: randomAccountUnique.addressRaw}}, + ], + }, + }, + }; + const destWeight = 50000000; + + await helper.xTokens.transferMultiasset(randomAccountMoonbeam, asset, destination, destWeight); + + balanceGlmrTokenFinal = await helper.balance.getEthereum(randomAccountMoonbeam.address); + + const glmrFees = balanceGlmrTokenMiddle - balanceGlmrTokenFinal; + console.log('[Moonbeam -> Unique] transaction fees on Moonbeam: %s GLMR', helper.util.bigIntToDecimals(glmrFees)); + expect(glmrFees > 0).to.be.true; + + const unqRandomAccountAsset = await helper.assets.account(assetId, randomAccountMoonbeam.address); + + expect(unqRandomAccountAsset).to.be.null; + + balanceForeignUnqTokenFinal = 0n; + + const unqOutcomeTransfer = balanceForeignUnqTokenMiddle - balanceForeignUnqTokenFinal; + console.log('[Unique -> Moonbeam] outcome %s UNQ', helper.util.bigIntToDecimals(unqOutcomeTransfer)); + expect(unqOutcomeTransfer == TRANSFER_AMOUNT).to.be.true; + }); + + await helper.wait.newBlocks(3); + + balanceUniqueTokenFinal = await helper.balance.getSubstrate(randomAccountUnique.address); + const actuallyDelivered = balanceUniqueTokenFinal - balanceUniqueTokenMiddle; + expect(actuallyDelivered > 0).to.be.true; + + console.log('[Moonbeam -> Unique] actually delivered %s UNQ', helper.util.bigIntToDecimals(actuallyDelivered)); + + const unqFees = TRANSFER_AMOUNT - actuallyDelivered; + console.log('[Moonbeam -> Unique] transaction fees on Unique: %s UNQ', helper.util.bigIntToDecimals(unqFees)); + expect(unqFees == 0n).to.be.true; + }); +}); diff --git a/tests/tsconfig.json b/tests/tsconfig.json index c2eb63e9e1..72b7de8ca0 100644 --- a/tests/tsconfig.json +++ b/tests/tsconfig.json @@ -21,6 +21,9 @@ "./src/**/*", "./src/interfaces/*.ts" ], + "exclude": [ + "./src/.outdated" + ], "lib": [ "es2017" ], diff --git a/tests/update_types.sh b/tests/update_types.sh index e007e7b6fb..4afdc7afcd 100755 --- a/tests/update_types.sh +++ b/tests/update_types.sh @@ -12,8 +12,3 @@ rsync -ar --exclude .gitignore src/interfaces/ unique-types-js for file in unique-types-js/augment-* unique-types-js/**/types.ts unique-types-js/registry.ts; do sed -i '1s;^;//@ts-nocheck\n;' $file done - -pushd unique-types-js -git add . -git commit -m "chore: regenerate types" -popd diff --git a/tests/yarn.lock b/tests/yarn.lock index 558263f676..157cfc567e 100644 --- a/tests/yarn.lock +++ b/tests/yarn.lock @@ -10,150 +10,155 @@ "@jridgewell/gen-mapping" "^0.1.0" "@jridgewell/trace-mapping" "^0.3.9" -"@babel/code-frame@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" - integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== +"@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== dependencies: - "@babel/highlight" "^7.16.7" + "@babel/highlight" "^7.18.6" -"@babel/compat-data@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.17.10.tgz#711dc726a492dfc8be8220028b1b92482362baab" - integrity sha512-GZt/TCsG70Ms19gfZO1tM4CVnXsPgEPBCpJu+Qz3L0LUDsY5nZqFZglIoPC1kIYOtNBZlrnFT+klg12vFGZXrw== +"@babel/compat-data@^7.19.3": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.19.4.tgz#95c86de137bf0317f3a570e1b6e996b427299747" + integrity sha512-CHIGpJcUQ5lU9KrPHTjBMhVwQG6CQjxfg36fGXl3qk/Gik1WwWachaXFuo0uCWJT/mStOKtcbFJCaVLihC1CMw== -"@babel/core@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.2.tgz#87b2fcd7cce9becaa7f5acebdc4f09f3dd19d876" - integrity sha512-A8pri1YJiC5UnkdrWcmfZTJTV85b4UXTAfImGmCfYmax4TR9Cw8sDS0MOk++Gp2mE/BefVJ5nwy5yzqNJbP/DQ== +"@babel/core@^7.19.3": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.6.tgz#7122ae4f5c5a37c0946c066149abd8e75f81540f" + integrity sha512-D2Ue4KHpc6Ys2+AxpIx1BZ8+UegLLLE2p3KJEuJRKmokHOtl49jQ5ny1773KsGLZs8MQvBidAF6yWUJxRqtKtg== dependencies: "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.18.2" - "@babel/helper-compilation-targets" "^7.18.2" - "@babel/helper-module-transforms" "^7.18.0" - "@babel/helpers" "^7.18.2" - "@babel/parser" "^7.18.0" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.18.2" - "@babel/types" "^7.18.2" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.6" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.6" + "@babel/helpers" "^7.19.4" + "@babel/parser" "^7.19.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.6" + "@babel/types" "^7.19.4" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.2" json5 "^2.2.1" semver "^6.3.0" -"@babel/generator@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.2.tgz#33873d6f89b21efe2da63fe554460f3df1c5880d" - integrity sha512-W1lG5vUwFvfMd8HVXqdfbuG7RuaSrTCCD8cl8fP8wOivdbtbIg2Db3IWUcgvfxKbbn6ZBGYRW/Zk1MIwK49mgw== +"@babel/generator@^7.19.6": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.6.tgz#9e481a3fe9ca6261c972645ae3904ec0f9b34a1d" + integrity sha512-oHGRUQeoX1QrKeJIKVe0hwjGqNnVYsM5Nep5zo0uE0m42sLH+Fsd2pStJ5sRM1bNyTUUoz0pe2lTeMJrb/taTA== dependencies: - "@babel/types" "^7.18.2" - "@jridgewell/gen-mapping" "^0.3.0" + "@babel/types" "^7.19.4" + "@jridgewell/gen-mapping" "^0.3.2" jsesc "^2.5.1" -"@babel/helper-compilation-targets@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.2.tgz#67a85a10cbd5fc7f1457fec2e7f45441dc6c754b" - integrity sha512-s1jnPotJS9uQnzFtiZVBUxe67CuBa679oWFHpxYYnTpRL/1ffhyX44R9uYiXoa/pLXcY9H2moJta0iaanlk/rQ== +"@babel/helper-compilation-targets@^7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" + integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== dependencies: - "@babel/compat-data" "^7.17.10" - "@babel/helper-validator-option" "^7.16.7" - browserslist "^4.20.2" + "@babel/compat-data" "^7.19.3" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" semver "^6.3.0" -"@babel/helper-environment-visitor@^7.16.7", "@babel/helper-environment-visitor@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.2.tgz#8a6d2dedb53f6bf248e31b4baf38739ee4a637bd" - integrity sha512-14GQKWkX9oJzPiQQ7/J36FTXcD4kSp8egKjO9nINlSKiHITRA9q/R74qu8S9xlc/b/yjsJItQUeeh3xnGN0voQ== - -"@babel/helper-function-name@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12" - integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg== - dependencies: - "@babel/template" "^7.16.7" - "@babel/types" "^7.17.0" - -"@babel/helper-hoist-variables@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" - integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-module-imports@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" - integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-module-transforms@^7.18.0": - version "7.18.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.0.tgz#baf05dec7a5875fb9235bd34ca18bad4e21221cd" - integrity sha512-kclUYSUBIjlvnzN2++K9f2qzYKFgjmnmjwL4zlmU5f8ZtzgWe8s0rUPSTGy2HmK4P8T52MQsS+HTQAgZd3dMEA== - dependencies: - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-module-imports" "^7.16.7" - "@babel/helper-simple-access" "^7.17.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/helper-validator-identifier" "^7.16.7" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.18.0" - "@babel/types" "^7.18.0" - -"@babel/helper-simple-access@^7.17.7": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.2.tgz#4dc473c2169ac3a1c9f4a51cfcd091d1c36fcff9" - integrity sha512-7LIrjYzndorDY88MycupkpQLKS1AFfsVRm2k/9PtKScSy5tZq0McZTj+DiMRynboZfIqOKvo03pmhTaUgiD6fQ== - dependencies: - "@babel/types" "^7.18.2" - -"@babel/helper-split-export-declaration@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" - integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-validator-identifier@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" - integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== - -"@babel/helper-validator-option@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" - integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ== - -"@babel/helpers@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.2.tgz#970d74f0deadc3f5a938bfa250738eb4ac889384" - integrity sha512-j+d+u5xT5utcQSzrh9p+PaJX94h++KN+ng9b9WEJq7pkUPAd61FGqhjuUEdfknb3E/uDBb7ruwEeKkIxNJPIrg== - dependencies: - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.18.2" - "@babel/types" "^7.18.2" - -"@babel/highlight@^7.16.7": - version "7.17.12" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.12.tgz#257de56ee5afbd20451ac0a75686b6b404257351" - integrity sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg== - dependencies: - "@babel/helper-validator-identifier" "^7.16.7" +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.19.6": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.19.6.tgz#6c52cc3ac63b70952d33ee987cbee1c9368b533f" + integrity sha512-fCmcfQo/KYr/VXXDIyd3CBGZ6AFhPFy1TfSEJ+PilGVlQT6jcbqtHAM4C1EciRqMza7/TpOUZliuSH+U6HAhJw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.19.4" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.6" + "@babel/types" "^7.19.4" + +"@babel/helper-simple-access@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.19.4.tgz#be553f4951ac6352df2567f7daa19a0ee15668e7" + integrity sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg== + dependencies: + "@babel/types" "^7.19.4" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" + integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helpers@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.19.4.tgz#42154945f87b8148df7203a25c31ba9a73be46c5" + integrity sha512-G+z3aOx2nfDHwX/kyVii5fJq+bgscg89/dJNWpYeKeBv3v9xX8EIabmx1k6u9LS04H7nROFVRVK+e3k0VHp+sw== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.4" + "@babel/types" "^7.19.4" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.16.7", "@babel/parser@^7.18.0": - version "7.18.4" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.4.tgz#6774231779dd700e0af29f6ad8d479582d7ce5ef" - integrity sha512-FDge0dFazETFcxGw/EXzOkN8uJp0PC7Qbm+Pe9T+av2zlBpOgunFHkQPPn+eRuClU73JF+98D531UgayY89tow== +"@babel/parser@^7.18.10", "@babel/parser@^7.19.6": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.19.6.tgz#b923430cb94f58a7eae8facbffa9efd19130e7f8" + integrity sha512-h1IUp81s2JYJ3mRkdxJgs4UvmSsRvDrx5ICSJbPvtWYv5i1nTBGcBpnog+89rAFMwvvru6E5NUHdBe01UeSzYA== -"@babel/register@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.17.7.tgz#5eef3e0f4afc07e25e847720e7b987ae33f08d0b" - integrity sha512-fg56SwvXRifootQEDQAu1mKdjh5uthPzdO0N6t358FktfL4XjAVXuH58ULoiW8mesxiOgNIrxiImqEwv0+hRRA== +"@babel/register@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.18.9.tgz#1888b24bc28d5cc41c412feb015e9ff6b96e439c" + integrity sha512-ZlbnXDcNYHMR25ITwwNKT88JiaukkdVj/nG7r3wnuXkOTHc60Uy05PwMCPre0hSkY68E6zK3xz+vUJSP2jWmcw== dependencies: clone-deep "^4.0.1" find-cache-dir "^2.0.0" @@ -161,44 +166,45 @@ pirates "^4.0.5" source-map-support "^0.5.16" -"@babel/runtime@^7.17.9", "@babel/runtime@^7.18.3": - version "7.18.3" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.3.tgz#c7b654b57f6f63cf7f8b418ac9ca04408c4579f4" - integrity sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug== +"@babel/runtime@^7.18.9", "@babel/runtime@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.19.4.tgz#a42f814502ee467d55b38dd1c256f53a7b885c78" + integrity sha512-EXpLCrk55f+cYqmHsSR+yD/0gAIMxxA9QK9lnQWzhMCvt+YmoBN7Zx94s++Kv0+unHk39vxNO8t+CMA2WSS3wA== dependencies: regenerator-runtime "^0.13.4" -"@babel/template@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" - integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/parser" "^7.16.7" - "@babel/types" "^7.16.7" - -"@babel/traverse@^7.18.0", "@babel/traverse@^7.18.2": - version "7.18.2" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.2.tgz#b77a52604b5cc836a9e1e08dca01cba67a12d2e8" - integrity sha512-9eNwoeovJ6KH9zcCNnENY7DMFwTU9JdGCFtqNLfUAqtUHRCOsTOqWoffosP8vKmNYeSBUv3yVJXjfd8ucwOjUA== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.18.2" - "@babel/helper-environment-visitor" "^7.18.2" - "@babel/helper-function-name" "^7.17.9" - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/parser" "^7.18.0" - "@babel/types" "^7.18.2" +"@babel/template@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.4", "@babel/traverse@^7.19.6": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.19.6.tgz#7b4c865611df6d99cb131eec2e8ac71656a490dc" + integrity sha512-6l5HrUCzFM04mfbG09AagtYyR2P0B71B1wN7PfSPiksDPz2k5H9CBC1tcZpz2M8OxbKTPccByoOJ22rUKbpmQQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.6" + "@babel/types" "^7.19.4" debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.16.7", "@babel/types@^7.17.0", "@babel/types@^7.18.0", "@babel/types@^7.18.2": - version "7.18.4" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.4.tgz#27eae9b9fd18e9dccc3f9d6ad051336f307be354" - integrity sha512-ThN1mBcMq5pG/Vm2IcBmPPfyPXbd8S02rS+OBIDENdufvqC7Z/jHPCv9IcP01277aKtDI8g/2XysBN4hA8niiw== +"@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.19.4.tgz#0dd5c91c573a202d600490a35b33246fed8a41c7" + integrity sha512-M5LK7nAeS6+9j7hAq+b3fQs+pNfUtTGq+yFFfHnauFA8zQtLRfmuipmsKDKKLuyG+wC8ABW43A153YNawNTEtw== dependencies: - "@babel/helper-validator-identifier" "^7.16.7" + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" "@cspotcode/source-map-support@^0.8.0": @@ -208,14 +214,14 @@ dependencies: "@jridgewell/trace-mapping" "0.3.9" -"@eslint/eslintrc@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f" - integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw== +"@eslint/eslintrc@^1.3.3": + version "1.3.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.3.tgz#2b044ab39fdfa75b4688184f9e573ce3c5b0ff95" + integrity sha512-uj3pT6Mg+3t39fvLrj8iuCIJ38zKO9FpGtJ4BBJebJhEwjoT+KLVNCcHT5QC9NGRIEi7fZ0ZR8YRb884auB4Lg== dependencies: ajv "^6.12.4" debug "^4.3.2" - espree "^9.3.2" + espree "^9.4.0" globals "^13.15.0" ignore "^5.2.0" import-fresh "^3.2.1" @@ -224,12 +230,12 @@ strip-json-comments "^3.1.1" "@ethereumjs/common@^2.5.0", "@ethereumjs/common@^2.6.4": - version "2.6.4" - resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.6.4.tgz#1b3cdd3aa4ee3b0ca366756fc35e4a03022a01cc" - integrity sha512-RDJh/R/EAr+B7ZRg5LfJ0BIpf/1LydFgYdvZEuTraojCbVypO2sQ+QnpP5u2wJf9DASyooKqu8O4FJEWUV6NXw== + version "2.6.5" + resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.6.5.tgz#0a75a22a046272579d91919cb12d84f2756e8d30" + integrity sha512-lRyVQOeCDaIVtgfbowla32pzeDv2Obr8oR8Put5RdUBNRGr1VGPGQNGP6elWIpgK3YdpzqTOh4GyUGOureVeeA== dependencies: crc-32 "^1.2.0" - ethereumjs-util "^7.1.4" + ethereumjs-util "^7.1.5" "@ethereumjs/tx@^3.3.2": version "3.5.2" @@ -239,191 +245,197 @@ "@ethereumjs/common" "^2.6.4" ethereumjs-util "^7.1.5" -"@ethersproject/abi@5.0.7": - version "5.0.7" - resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.7.tgz#79e52452bd3ca2956d0e1c964207a58ad1a0ee7b" - integrity sha512-Cqktk+hSIckwP/W8O47Eef60VwmoSC/L3lY0+dIBhQPCNn9E4V7rwmm2aFrNRRDJfFlGuZ1khkQUOc3oBX+niw== - dependencies: - "@ethersproject/address" "^5.0.4" - "@ethersproject/bignumber" "^5.0.7" - "@ethersproject/bytes" "^5.0.4" - "@ethersproject/constants" "^5.0.4" - "@ethersproject/hash" "^5.0.4" - "@ethersproject/keccak256" "^5.0.3" - "@ethersproject/logger" "^5.0.5" - "@ethersproject/properties" "^5.0.3" - "@ethersproject/strings" "^5.0.4" - -"@ethersproject/abstract-provider@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.6.1.tgz#02ddce150785caf0c77fe036a0ebfcee61878c59" - integrity sha512-BxlIgogYJtp1FS8Muvj8YfdClk3unZH0vRMVX791Z9INBNT/kuACZ9GzaY1Y4yFq+YSy6/w4gzj3HCRKrK9hsQ== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/networks" "^5.6.3" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/transactions" "^5.6.2" - "@ethersproject/web" "^5.6.1" - -"@ethersproject/abstract-signer@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.6.2.tgz#491f07fc2cbd5da258f46ec539664713950b0b33" - integrity sha512-n1r6lttFBG0t2vNiI3HoWaS/KdOt8xyDjzlP2cuevlWLG6EX0OwcKLyG/Kp/cuwNxdy/ous+R/DEMdTUwWQIjQ== - dependencies: - "@ethersproject/abstract-provider" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - -"@ethersproject/address@^5.0.4", "@ethersproject/address@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.6.1.tgz#ab57818d9aefee919c5721d28cd31fd95eff413d" - integrity sha512-uOgF0kS5MJv9ZvCz7x6T2EXJSzotiybApn4XlOgoTX0xdtyVIJ7pF+6cGPxiEq/dpBiTfMiw7Yc81JcwhSYA0Q== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/rlp" "^5.6.1" - -"@ethersproject/base64@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.6.1.tgz#2c40d8a0310c9d1606c2c37ae3092634b41d87cb" - integrity sha512-qB76rjop6a0RIYYMiB4Eh/8n+Hxu2NIZm8S/Q7kNo5pmZfXhHGHmS4MinUainiBC54SCyRnwzL+KZjj8zbsSsw== - dependencies: - "@ethersproject/bytes" "^5.6.1" - -"@ethersproject/bignumber@^5.0.7", "@ethersproject/bignumber@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.6.2.tgz#72a0717d6163fab44c47bcc82e0c550ac0315d66" - integrity sha512-v7+EEUbhGqT3XJ9LMPsKvXYHFc8eHxTowFCG/HgJErmq4XHJ2WR7aeyICg3uTOAQ7Icn0GFHAohXEhxQHq4Ubw== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" +"@ethersproject/abi@^5.6.3": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" + integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/abstract-provider@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" + integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + +"@ethersproject/abstract-signer@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" + integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + +"@ethersproject/address@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" + integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + +"@ethersproject/base64@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" + integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + +"@ethersproject/bignumber@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" + integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" bn.js "^5.2.1" -"@ethersproject/bytes@^5.0.4", "@ethersproject/bytes@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.6.1.tgz#24f916e411f82a8a60412344bf4a813b917eefe7" - integrity sha512-NwQt7cKn5+ZE4uDn+X5RAXLp46E1chXoaMmrxAyA0rblpxz8t58lVkrHXoRIn0lz1joQElQ8410GqhTqMOwc6g== - dependencies: - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/constants@^5.0.4", "@ethersproject/constants@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.6.1.tgz#e2e974cac160dd101cf79fdf879d7d18e8cb1370" - integrity sha512-QSq9WVnZbxXYFftrjSjZDUshp6/eKp6qrtdBtUCm0QxCV5z1fG/w3kdlcsjMCQuQHUnAclKoK7XpXMezhRDOLg== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - -"@ethersproject/hash@^5.0.4": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.6.1.tgz#224572ea4de257f05b4abf8ae58b03a67e99b0f4" - integrity sha512-L1xAHurbaxG8VVul4ankNX5HgQ8PNCTrnVXEiFnE9xoRnaUcgfD12tZINtDinSllxPLCtGwguQxJ5E6keE84pA== - dependencies: - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" - -"@ethersproject/keccak256@^5.0.3", "@ethersproject/keccak256@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.6.1.tgz#b867167c9b50ba1b1a92bccdd4f2d6bd168a91cc" - integrity sha512-bB7DQHCTRDooZZdL3lk9wpL0+XuG3XLGHLh3cePnybsO3V0rdCAOQGpn/0R3aODmnTOOkCATJiD2hnL+5bwthA== - dependencies: - "@ethersproject/bytes" "^5.6.1" +"@ethersproject/bytes@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" + integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/constants@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" + integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + +"@ethersproject/hash@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" + integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/keccak256@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" + integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== + dependencies: + "@ethersproject/bytes" "^5.7.0" js-sha3 "0.8.0" -"@ethersproject/logger@^5.0.5", "@ethersproject/logger@^5.6.0": - version "5.6.0" - resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.6.0.tgz#d7db1bfcc22fd2e4ab574cba0bb6ad779a9a3e7a" - integrity sha512-BiBWllUROH9w+P21RzoxJKzqoqpkyM1pRnEKG69bulE9TSQD8SAIvTQqIMZmmCO8pUNkgLP1wndX1gKghSpBmg== +"@ethersproject/logger@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" + integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== -"@ethersproject/networks@^5.6.3": - version "5.6.3" - resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.6.3.tgz#3ee3ab08f315b433b50c99702eb32e0cf31f899f" - integrity sha512-QZxRH7cA5Ut9TbXwZFiCyuPchdWi87ZtVNHWZd0R6YFgYtes2jQ3+bsslJ0WdyDe0i6QumqtoYqvY3rrQFRZOQ== +"@ethersproject/networks@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" + integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== dependencies: - "@ethersproject/logger" "^5.6.0" + "@ethersproject/logger" "^5.7.0" -"@ethersproject/properties@^5.0.3", "@ethersproject/properties@^5.6.0": - version "5.6.0" - resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.6.0.tgz#38904651713bc6bdd5bdd1b0a4287ecda920fa04" - integrity sha512-szoOkHskajKePTJSZ46uHUWWkbv7TzP2ypdEK6jGMqJaEt2sb0jCgfBo0gH0m2HBpRixMuJ6TBRaQCF7a9DoCg== +"@ethersproject/properties@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" + integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== dependencies: - "@ethersproject/logger" "^5.6.0" + "@ethersproject/logger" "^5.7.0" -"@ethersproject/rlp@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.6.1.tgz#df8311e6f9f24dcb03d59a2bac457a28a4fe2bd8" - integrity sha512-uYjmcZx+DKlFUk7a5/W9aQVaoEC7+1MOBgNtvNg13+RnuUwT4F0zTovC0tmay5SmRslb29V1B7Y5KCri46WhuQ== +"@ethersproject/rlp@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" + integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" -"@ethersproject/signing-key@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.6.2.tgz#8a51b111e4d62e5a62aee1da1e088d12de0614a3" - integrity sha512-jVbu0RuP7EFpw82vHcL+GP35+KaNruVAZM90GxgQnGqB6crhBqW/ozBfFvdeImtmb4qPko0uxXjn8l9jpn0cwQ== +"@ethersproject/signing-key@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" + integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" bn.js "^5.2.1" elliptic "6.5.4" hash.js "1.1.7" -"@ethersproject/strings@^5.0.4", "@ethersproject/strings@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.6.1.tgz#dbc1b7f901db822b5cafd4ebf01ca93c373f8952" - integrity sha512-2X1Lgk6Jyfg26MUnsHiT456U9ijxKUybz8IM1Vih+NJxYtXhmvKBcHOmvGqpFSVJ0nQ4ZCoIViR8XlRw1v/+Cw== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.6.2.tgz#793a774c01ced9fe7073985bb95a4b4e57a6370b" - integrity sha512-BuV63IRPHmJvthNkkt9G70Ullx6AcM+SDc+a8Aw/8Yew6YwT51TcBKEp1P4oOQ/bP25I18JJr7rcFRgFtU9B2Q== - dependencies: - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/rlp" "^5.6.1" - "@ethersproject/signing-key" "^5.6.2" - -"@ethersproject/web@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.6.1.tgz#6e2bd3ebadd033e6fe57d072db2b69ad2c9bdf5d" - integrity sha512-/vSyzaQlNXkO1WV+RneYKqCJwualcUdx/Z3gseVovZP0wIlOFcCE1hkRhKBH8ImKbGQbMl9EAAyJFrJu7V0aqA== - dependencies: - "@ethersproject/base64" "^5.6.1" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" - -"@humanwhocodes/config-array@^0.9.2": - version "0.9.5" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" - integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== +"@ethersproject/strings@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" + integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/transactions@^5.6.2", "@ethersproject/transactions@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" + integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + +"@ethersproject/web@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" + integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== + dependencies: + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@humanwhocodes/config-array@^0.10.5": + version "0.10.7" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" + integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== dependencies: "@humanwhocodes/object-schema" "^1.2.1" debug "^4.1.1" minimatch "^3.0.4" +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + "@humanwhocodes/object-schema@^1.2.1": version "1.2.1" resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" @@ -437,29 +449,29 @@ "@jridgewell/set-array" "^1.0.0" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/gen-mapping@^0.3.0": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9" - integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg== +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== dependencies: - "@jridgewell/set-array" "^1.0.0" + "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@^3.0.3": - version "3.0.7" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" - integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== +"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== -"@jridgewell/set-array@^1.0.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" - integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.13" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" - integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@0.3.9": version "0.3.9" @@ -470,22 +482,22 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping@^0.3.9": - version "0.3.13" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea" - integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w== + version "0.3.17" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" + integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" -"@noble/hashes@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.0.0.tgz#d5e38bfbdaba174805a4e649f13be9a9ed3351ae" - integrity sha512-DZVbtY62kc3kkBtMHqwCOfXrT/hnoORy5BJ4+HU1IR59X0KWAOqsfzQPcUl/lQLlG7qXbe/fZ3r/emxtAl+sqg== +"@noble/hashes@1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.3.tgz#360afc77610e0a61f3417e497dcf36862e4f8111" + integrity sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A== -"@noble/secp256k1@1.5.5": - version "1.5.5" - resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.5.5.tgz#315ab5745509d1a8c8e90d0bdf59823ccf9bcfc3" - integrity sha512-sZ1W6gQzYnu45wPrWx8D3kwI2/U29VYTx9OjbDAd7jwRItJ0cSTMPRL/C8AWZFn9kWFLQGqEXVEE86w4Z8LpIQ== +"@noble/secp256k1@1.7.0": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.7.0.tgz#d15357f7c227e751d90aa06b05a0e5cf993ba8c1" + integrity sha512-kbacwGSsH/CTout0ZnZWxnW1B+jH/7r/WAAKLBtrRJ/+CUH7lgmQzl3GTrQua3SGKWNSDsS6lmjnDpIJ5Dxyaw== "@nodelib/fs.scandir@2.1.5": version "2.1.5" @@ -508,453 +520,442 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@polkadot/api-augment@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/api-augment/-/api-augment-8.7.2-15.tgz#a141d3cd595a39e7e2965330268b5eb92bdd5849" - integrity sha512-QGXosX6p0RFYNhWepZCIaRiyCvHnVt5Pb6U7/77UxIszgGRHfHFDsYr4v5bGiaRTOj/E8moc2Ufi/+VgOiG9sw== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/api-base" "8.7.2-15" - "@polkadot/rpc-augment" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-augment" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/util" "^9.4.1" - -"@polkadot/api-base@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/api-base/-/api-base-8.7.2-15.tgz#c909d3bf0fbfb3cc46ca7067199e36e72b959bdb" - integrity sha512-HXdtaqbpnfFbOazjI9CPSYM37S4mzhxUs8hLMKrWqpHL//at4tiMa5dRyev9VSKeE6gqeqCT9JTBvEAZ9eNR6Q== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/rpc-core" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/util" "^9.4.1" - rxjs "^7.5.5" - -"@polkadot/api-contract@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/api-contract/-/api-contract-8.7.2-15.tgz#687706fb4bd33c4a88187db3a269292f6e559892" - integrity sha512-Pr1Nm5zBpW9foCKm/Q6hIT5KHCeFVE8EFSfHBgjbitYpFOGnz19kduEpa0vxIcfq2WVXcVPTQ2eqjGtHoThNqA== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/api" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/types-create" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/util-crypto" "^9.4.1" - rxjs "^7.5.5" - -"@polkadot/api-derive@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/api-derive/-/api-derive-8.7.2-15.tgz#b29f24d435c036c9bf5624d18a9d93196cf2c4f4" - integrity sha512-0R3M9LFKoQ0d7elIDQjPKuV5EAHTtkU/72Lgxw2GYStsOqcnfFNomfLoLMuk8Xy4ETUAp/Kq1eMJpvsY6hSTtA== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/api" "8.7.2-15" - "@polkadot/api-augment" "8.7.2-15" - "@polkadot/api-base" "8.7.2-15" - "@polkadot/rpc-core" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/util-crypto" "^9.4.1" - rxjs "^7.5.5" - -"@polkadot/api@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/api/-/api-8.7.2-15.tgz#c7ede416e4d277c227fc93fdfdc4d27634935d08" - integrity sha512-tzEUWsXIPzPbnpn/3LTGtJ7SXzMgCJ/da5d9q0UH3vsx1gDEjuZEWXOeSYLHgbqQSgwPukvMVuGtRjcC+A/WZQ== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/api-augment" "8.7.2-15" - "@polkadot/api-base" "8.7.2-15" - "@polkadot/api-derive" "8.7.2-15" - "@polkadot/keyring" "^9.4.1" - "@polkadot/rpc-augment" "8.7.2-15" - "@polkadot/rpc-core" "8.7.2-15" - "@polkadot/rpc-provider" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-augment" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/types-create" "8.7.2-15" - "@polkadot/types-known" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/util-crypto" "^9.4.1" +"@polkadot/api-augment@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/api-augment/-/api-augment-9.5.2.tgz#55168dd112517028fea5f2ab9c54ea627e43ac3a" + integrity sha512-dH6QMY8Z3zI6CrgSU3eSe6f0KWDb5PYGztg/FXGPrjh7Vjic7syWZ1LD6zaHJAFWDp80BEdEXfqr4lConrCKGg== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/api-base" "9.5.2" + "@polkadot/rpc-augment" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/types-augment" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/util" "^10.1.11" + +"@polkadot/api-base@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/api-base/-/api-base-9.5.2.tgz#ac0a6b5546a54bcc753ac55c9f033caa9f8b4e5c" + integrity sha512-BBsH9SLB1FHgjdiU32cZX1puL3Eh8IjOJHjRsO/5SdttciQhF5g/u/m/mM/55qnlXmffI9s2Jre18G0XtVU9Aw== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/rpc-core" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/util" "^10.1.11" + rxjs "^7.5.7" + +"@polkadot/api-derive@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/api-derive/-/api-derive-9.5.2.tgz#c0412cfc13fa71f93b315d126b12b5ab38e6438c" + integrity sha512-kWn12dlqfIES1trNLd3O1i2qa4T97v/co1VMCgVstICwCt3+mGZgpxkMqQqPiWHagKEVeBNoAn+h8eOiQlbujA== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/api" "9.5.2" + "@polkadot/api-augment" "9.5.2" + "@polkadot/api-base" "9.5.2" + "@polkadot/rpc-core" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/util" "^10.1.11" + "@polkadot/util-crypto" "^10.1.11" + rxjs "^7.5.7" + +"@polkadot/api@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/api/-/api-9.5.2.tgz#cef83928e47c393fbebf2788bc86841b6ab37a41" + integrity sha512-iEF/E8vQan3fHmIEl3bX7Yn/1jQLlvSDwPOxiQdj4tIcF36HX6vCbkdhQKRif0CNYES58TA9EKFiCNg81k+kXw== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/api-augment" "9.5.2" + "@polkadot/api-base" "9.5.2" + "@polkadot/api-derive" "9.5.2" + "@polkadot/keyring" "^10.1.11" + "@polkadot/rpc-augment" "9.5.2" + "@polkadot/rpc-core" "9.5.2" + "@polkadot/rpc-provider" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/types-augment" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/types-create" "9.5.2" + "@polkadot/types-known" "9.5.2" + "@polkadot/util" "^10.1.11" + "@polkadot/util-crypto" "^10.1.11" eventemitter3 "^4.0.7" - rxjs "^7.5.5" - -"@polkadot/keyring@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/keyring/-/keyring-9.4.1.tgz#4bc8d1c1962756841742abac0d7e4ef233d9c2a9" - integrity sha512-op6Tj8E9GHeZYvEss38FRUrX+GlBj6qiwF4BlFrAvPqjPnRn8TT9NhRLroiCwvxeNg3uMtEF/5xB+vvdI0I6qw== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/util" "9.4.1" - "@polkadot/util-crypto" "9.4.1" - -"@polkadot/networks@9.4.1", "@polkadot/networks@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/networks/-/networks-9.4.1.tgz#acdf3d64421ce0e3d3ba68797fc29a28ee40c185" - integrity sha512-ibH8bZ2/XMXv0XEsP1fGOqNnm2mg1rHo5kHXSJ3QBcZJFh1+xkI4Ovl2xrFfZ+SYATA3Wsl5R6knqimk2EqyJQ== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/util" "9.4.1" - "@substrate/ss58-registry" "^1.22.0" - -"@polkadot/rpc-augment@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/rpc-augment/-/rpc-augment-8.7.2-15.tgz#6175126968dfb79ba5549b03cac8c3860666e72b" - integrity sha512-IgfkR9CHT8jDuGYkb75DBFu+yJNW32+vOt3oS0sf57VqkHketSq9rD3mtZD37V/21Q4a17yrqKQOte7mMl9kcg== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/rpc-core" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/util" "^9.4.1" - -"@polkadot/rpc-core@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/rpc-core/-/rpc-core-8.7.2-15.tgz#827a31adf833fb866cb5f39dbd86c5f0b44d63a4" - integrity sha512-yGmpESOmGyzY7+D3yUxbKToz/eP/q8vDyOGajLnHn12TcnjgbAfMdc4xdU6cQex+mSsPwS0YQFuPrPXGloCOHA== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/rpc-augment" "8.7.2-15" - "@polkadot/rpc-provider" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/util" "^9.4.1" - rxjs "^7.5.5" - -"@polkadot/rpc-provider@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/rpc-provider/-/rpc-provider-8.7.2-15.tgz#99dd30085284442265225e0f12aef3849b7bfe44" - integrity sha512-EwgBnUIpGhEfSanDXVviQQ784HYD3DWUPdv9pIvn9qnCZPk7o+MGPvKW73A+XbQpPV9j8tAGnVsSnbDuoSVp1g== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/keyring" "^9.4.1" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-support" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/util-crypto" "^9.4.1" - "@polkadot/x-fetch" "^9.4.1" - "@polkadot/x-global" "^9.4.1" - "@polkadot/x-ws" "^9.4.1" - "@substrate/connect" "0.7.5" + rxjs "^7.5.7" + +"@polkadot/keyring@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/keyring/-/keyring-10.1.11.tgz#a3fed011b0c8826ea2097e04f7189e9be66fbf98" + integrity sha512-Nv8cZaOA/KbdslDMTklJ58+y+UPpic3+oMQoozuq48Ccjv7WeW2BX47XM/RNE8nYFg6EHa6Whfm4IFaFb8s7ag== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/util" "10.1.11" + "@polkadot/util-crypto" "10.1.11" + +"@polkadot/networks@10.1.11", "@polkadot/networks@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/networks/-/networks-10.1.11.tgz#96a5d6c80228f4beada9154cca0f60a63198e7f4" + integrity sha512-4FfOVETXwh6PL6wd6fYJMkRSQKm+xUw3vR5rHqcAnB696FpMFPPErc6asgZ9lYMyzNJRY3yG86HQpFhtCv1nGA== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/util" "10.1.11" + "@substrate/ss58-registry" "^1.33.0" + +"@polkadot/rpc-augment@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/rpc-augment/-/rpc-augment-9.5.2.tgz#739cc3ed2f86f4318432e38381a2cc780dc64f1e" + integrity sha512-QAcunC7p/T4xy6e4m0Q1c9tiVYxnm+S9o10tmtx0K4qXzrc/4I2/tsw3nEGi3BzJhvMpFondSQGcJ3gyLwpmVA== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/rpc-core" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/util" "^10.1.11" + +"@polkadot/rpc-core@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/rpc-core/-/rpc-core-9.5.2.tgz#1a00868038b6c07fe8f58bd0a6cc9519d14001cc" + integrity sha512-4PbNz0GEp3FXYOnsS7mDHZy9DNVBOl56fq8vs09rLkEkrrvGkHmCvabEEWL7OPbwBzwzsCxdgI+IdkVTUKXPkQ== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/rpc-augment" "9.5.2" + "@polkadot/rpc-provider" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/util" "^10.1.11" + rxjs "^7.5.7" + +"@polkadot/rpc-provider@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/rpc-provider/-/rpc-provider-9.5.2.tgz#3e38ea4c3639180f12270b6fe8cbcabf728aaf1d" + integrity sha512-Sn2jfvAsvQcl35o0up8JR/XbDMS/3YVDEN2sFuzXtiD77W2njukItbZT+BolfAW+biAUs3bNomump5k/YLiLKg== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/keyring" "^10.1.11" + "@polkadot/types" "9.5.2" + "@polkadot/types-support" "9.5.2" + "@polkadot/util" "^10.1.11" + "@polkadot/util-crypto" "^10.1.11" + "@polkadot/x-fetch" "^10.1.11" + "@polkadot/x-global" "^10.1.11" + "@polkadot/x-ws" "^10.1.11" + "@substrate/connect" "0.7.14" eventemitter3 "^4.0.7" mock-socket "^9.1.5" - nock "^13.2.6" - -"@polkadot/ts@0.4.22": - version "0.4.22" - resolved "https://registry.yarnpkg.com/@polkadot/ts/-/ts-0.4.22.tgz#f97f6a2134fda700d79ddd03ff39b96de384438d" - integrity sha512-iEo3iaWxCnLiQOYhoXu9pCnBuG9QdCCBfMJoVLgO+66dFnfjnXIc0gb6wEcTFPpJRc1QmC8JP+3xJauQ0pXwOQ== - dependencies: - "@types/chrome" "^0.0.171" - -"@polkadot/typegen@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/typegen/-/typegen-8.7.2-15.tgz#06e9d054db1c63d9862186429a8017b2b80bce2a" - integrity sha512-NC8Ticirh20k1Co17D8cqQawIJ8W9HWDuq6oDyEMT4XkeBbZ1hQRO9JBO14neWDJmYJBhlUotP65jgjs8D5bMw== - dependencies: - "@babel/core" "^7.18.2" - "@babel/register" "^7.17.7" - "@babel/runtime" "^7.18.3" - "@polkadot/api" "8.7.2-15" - "@polkadot/api-augment" "8.7.2-15" - "@polkadot/rpc-augment" "8.7.2-15" - "@polkadot/rpc-provider" "8.7.2-15" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-augment" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/types-create" "8.7.2-15" - "@polkadot/types-support" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/x-ws" "^9.4.1" + nock "^13.2.9" + +"@polkadot/typegen@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/typegen/-/typegen-9.5.2.tgz#b4f3b5eca69c70cc496c8cd3b7804df32282c336" + integrity sha512-DIiicI3VzbqkfjthvHhLYCaElkaKB/qM+P0mGDmb3+NgttJQsH2Sqy/zsT/mjr07hAB1gXf4dhCmj0QQBiR1og== + dependencies: + "@babel/core" "^7.19.3" + "@babel/register" "^7.18.9" + "@babel/runtime" "^7.19.4" + "@polkadot/api" "9.5.2" + "@polkadot/api-augment" "9.5.2" + "@polkadot/rpc-augment" "9.5.2" + "@polkadot/rpc-provider" "9.5.2" + "@polkadot/types" "9.5.2" + "@polkadot/types-augment" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/types-create" "9.5.2" + "@polkadot/types-support" "9.5.2" + "@polkadot/util" "^10.1.11" + "@polkadot/util-crypto" "^10.1.11" + "@polkadot/x-ws" "^10.1.11" handlebars "^4.7.7" websocket "^1.0.34" - yargs "^17.5.1" - -"@polkadot/types-augment@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types-augment/-/types-augment-8.7.2-15.tgz#7ab077a1a31190ad17183196efb1da065c0d0bcd" - integrity sha512-th1jVBDqpyQVB2gCNzo/HV0dIeNinjyPla01BFdhQ5mDKYXJ8fugsLCk5oKUPpItBrj+5NWCgynVvCwm0YJw3g== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/util" "^9.4.1" - -"@polkadot/types-codec@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types-codec/-/types-codec-8.7.2-15.tgz#6afa4ff45dc7afb9250f283f70a40be641367941" - integrity sha512-k8t7/Ern7sY4ZKQc5cYY3h1bg7/GAEaTPmKz094DhPJmEhi3NNgeJ4uyeB/JYCo5GbxXQG6W2M021s582urjMw== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/util" "^9.4.1" - -"@polkadot/types-create@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types-create/-/types-create-8.7.2-15.tgz#106a11eb71dc2743b140d8640a3b3e7fc5ccf10e" - integrity sha512-xB9jAJ3XQh/U05b+X77m5TPh4N9oBwwpePkAmLhovTSOSeobj7qeUKrZqccs0BSxJnJPlLwrwuusjeTtTfZCHw== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/util" "^9.4.1" - -"@polkadot/types-known@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types-known/-/types-known-8.7.2-15.tgz#171b8d3963a5c38d46f98a7c14be59033f9a4da8" - integrity sha512-c5YuuauPCu70chDnV7Fphh7SbAQl8JWj+PoY37I5BACCNFxtUx5KnP93BChiD0QxcHs2QqD6RdjW6O7cVRUKfA== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/networks" "^9.4.1" - "@polkadot/types" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/types-create" "8.7.2-15" - "@polkadot/util" "^9.4.1" - -"@polkadot/types-support@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types-support/-/types-support-8.7.2-15.tgz#2d726e3d5615383ca97db3f32ee21e2aad077fcb" - integrity sha512-Tl6xm9r/uqrKQK1OUdi5X9MaTgplBYPj3tY9677ZPV7QGYWt0Uz912u9fC2v0PGNReDXtzvrlgvk0aoErwzF5Q== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/util" "^9.4.1" - -"@polkadot/types@8.7.2-15": - version "8.7.2-15" - resolved "https://registry.yarnpkg.com/@polkadot/types/-/types-8.7.2-15.tgz#5b25b6b76c916637a1d15133b5880a73079e65bc" - integrity sha512-KfJKzk6/Ta8vZVJH8+xYYPvd9SD+4fdl4coGgKuPGYZFsjDGnYvAX4ls6/WKby51JK5s24sqaUP3vZisIgh4wA== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/keyring" "^9.4.1" - "@polkadot/types-augment" "8.7.2-15" - "@polkadot/types-codec" "8.7.2-15" - "@polkadot/types-create" "8.7.2-15" - "@polkadot/util" "^9.4.1" - "@polkadot/util-crypto" "^9.4.1" - rxjs "^7.5.5" - -"@polkadot/util-crypto@9.4.1", "@polkadot/util-crypto@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/util-crypto/-/util-crypto-9.4.1.tgz#af50d9b3e3fcf9760ee8eb262b1cc61614c21d98" - integrity sha512-V6xMOjdd8Kt/QmXlcDYM4WJDAmKuH4vWSlIcMmkFHnwH/NtYVdYIDZswLQHKL8gjLijPfVTHpWaJqNFhGpZJEg== - dependencies: - "@babel/runtime" "^7.18.3" - "@noble/hashes" "1.0.0" - "@noble/secp256k1" "1.5.5" - "@polkadot/networks" "9.4.1" - "@polkadot/util" "9.4.1" - "@polkadot/wasm-crypto" "^6.1.1" - "@polkadot/x-bigint" "9.4.1" - "@polkadot/x-randomvalues" "9.4.1" - "@scure/base" "1.0.0" + yargs "^17.6.0" + +"@polkadot/types-augment@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types-augment/-/types-augment-9.5.2.tgz#d9e77756b0e36455d708f5af8265ef011ddf8d91" + integrity sha512-LDJdv/84sECwA0R5lK85/orxjoozJe3+2jeLjRiKr8S6qm9XRfz0wLCSF866kpSGBZ4B1dYBUhzjoSu95y2Jug== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/types" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/util" "^10.1.11" + +"@polkadot/types-codec@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types-codec/-/types-codec-9.5.2.tgz#345c38ccef17651b8cabd159a42810893b5e7e44" + integrity sha512-FJPjE3ceTGTcadeC8d5C+aSR8SLKuQrXKIBmMNBky+WwzEo0vufRqxFWcPLxAOEeeUPgBXS967tP15+UU4psGA== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/util" "^10.1.11" + "@polkadot/x-bigint" "^10.1.11" + +"@polkadot/types-create@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types-create/-/types-create-9.5.2.tgz#a85dcb794ea11e5d528baa34b65e57cfafc905cf" + integrity sha512-YbplL8K0LqUEHoV3FgZ5B83oVV67KGbLXsWHVVaUZBPsmtXJXrbBfSyJgl/80I2n4lXEBmg3sFAYMbaSTvL05A== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/types-codec" "9.5.2" + "@polkadot/util" "^10.1.11" + +"@polkadot/types-known@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types-known/-/types-known-9.5.2.tgz#a71fd08932b1643bbf346321472ed48ab1ade215" + integrity sha512-iNaGOF6dGiTvy3Ns8Z7WNjYD1SGnZiapDAKPH4brPuJqMpN6/FxYpfPSSOKx+IJEamsdINcaggb87eWyPxH8CA== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/networks" "^10.1.11" + "@polkadot/types" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/types-create" "9.5.2" + "@polkadot/util" "^10.1.11" + +"@polkadot/types-support@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types-support/-/types-support-9.5.2.tgz#f2990d19cbd78c24e5b7116466fb1d89f93a8ca7" + integrity sha512-Zdbl5fvGQjUkyE1r67vhyPEqLUwlZ35GCnkoobY9MgN6gylhSjNue/shpG4uGsEjWVQL7GkFkrPiwtzDArVilg== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/util" "^10.1.11" + +"@polkadot/types@9.5.2": + version "9.5.2" + resolved "https://registry.yarnpkg.com/@polkadot/types/-/types-9.5.2.tgz#33ab2caea08f084141a01038adbe53ed69ab7d9c" + integrity sha512-6C5xzOrMK+fu0JMOlSO+8dPDhpwKPOaKMv3v5BMvBEWtDNKM81/QQoAoYT7DSVXq/V16icSFxPs9IWC+6Qq5ag== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/keyring" "^10.1.11" + "@polkadot/types-augment" "9.5.2" + "@polkadot/types-codec" "9.5.2" + "@polkadot/types-create" "9.5.2" + "@polkadot/util" "^10.1.11" + "@polkadot/util-crypto" "^10.1.11" + rxjs "^7.5.7" + +"@polkadot/util-crypto@10.1.11", "@polkadot/util-crypto@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/util-crypto/-/util-crypto-10.1.11.tgz#e59bdc8e1e2bd98a115e2e2ed45461e68a14a48c" + integrity sha512-wG63frIMAR5T/HXGM0SFNzZZdk7qDBsfLXfn6PIZiXCCCsdEYPzS5WltB7fkhicYpbePJ7VgdCAddj1l4IcGyg== + dependencies: + "@babel/runtime" "^7.19.4" + "@noble/hashes" "1.1.3" + "@noble/secp256k1" "1.7.0" + "@polkadot/networks" "10.1.11" + "@polkadot/util" "10.1.11" + "@polkadot/wasm-crypto" "^6.3.1" + "@polkadot/x-bigint" "10.1.11" + "@polkadot/x-randomvalues" "10.1.11" + "@scure/base" "1.1.1" ed2curve "^0.3.0" tweetnacl "^1.0.3" -"@polkadot/util@9.4.1", "@polkadot/util@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/util/-/util-9.4.1.tgz#49446e88b1231b0716bf6b4eb4818145f08a1294" - integrity sha512-z0HcnIe3zMWyK1s09wQIwc1M8gDKygSF9tDAbC8H9KDeIRZB2ldhwWEFx/1DJGOgFFrmRfkxeC6dcDpfzQhFow== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-bigint" "9.4.1" - "@polkadot/x-global" "9.4.1" - "@polkadot/x-textdecoder" "9.4.1" - "@polkadot/x-textencoder" "9.4.1" - "@types/bn.js" "^5.1.0" +"@polkadot/util@10.1.11", "@polkadot/util@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/util/-/util-10.1.11.tgz#22bcdabbd7a0d266417f6569cc655f516d371a82" + integrity sha512-6m51lw6g6ilqO/k4BQY7rD0lYM9NCnC4FiM7CEEUc7j8q86qxdcZ88zdNldkhNsTIQnfmCtkK3GRzZW6VYrbUw== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-bigint" "10.1.11" + "@polkadot/x-global" "10.1.11" + "@polkadot/x-textdecoder" "10.1.11" + "@polkadot/x-textencoder" "10.1.11" + "@types/bn.js" "^5.1.1" bn.js "^5.2.1" - ip-regex "^4.3.0" -"@polkadot/wasm-bridge@6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-bridge/-/wasm-bridge-6.1.1.tgz#9342f2b3c139df72fa45c8491b348f8ebbfa57fa" - integrity sha512-Cy0k00VCu+HWxie+nn9GWPlSPdiZl8Id8ulSGA2FKET0jIbffmOo4e1E2FXNucfR1UPEpqov5BCF9T5YxEXZDg== +"@polkadot/wasm-bridge@6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-bridge/-/wasm-bridge-6.3.1.tgz#439fa78e80947a7cb695443e1f64b25c30bb1487" + integrity sha512-1TYkHsb9AEFhU9uZj3biEnN2yKQNzdrwSjiTvfCYnt97pnEkKsZI6cku+YPZQv5w/x9CQa5Yua9e2DVVZSivGA== dependencies: - "@babel/runtime" "^7.17.9" - -"@polkadot/wasm-crypto-asmjs@6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-6.1.1.tgz#6d09045679120b43fbfa435b29c3690d1f788ebb" - integrity sha512-gG4FStVumkyRNH7WcTB+hn3EEwCssJhQyi4B1BOUt+eYYmw9xJdzIhqjzSd9b/yF2e5sRaAzfnMj2srGufsE6A== - dependencies: - "@babel/runtime" "^7.17.9" - -"@polkadot/wasm-crypto-init@6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-init/-/wasm-crypto-init-6.1.1.tgz#73731071bea9b4e22b380d75099da9dc683fadf5" - integrity sha512-rbBm/9FOOUjISL4gGNokjcKy2X+Af6Chaet4zlabatpImtPIAK26B2UUBGoaRUnvl/w6K3+GwBL4LuBC+CvzFw== - dependencies: - "@babel/runtime" "^7.17.9" - "@polkadot/wasm-bridge" "6.1.1" - "@polkadot/wasm-crypto-asmjs" "6.1.1" - "@polkadot/wasm-crypto-wasm" "6.1.1" - -"@polkadot/wasm-crypto-wasm@6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-6.1.1.tgz#3fdc8f1280710e4d68112544b2473e811c389a2a" - integrity sha512-zkz5Ct4KfTBT+YNEA5qbsHhTV58/FAxDave8wYIOaW4TrBnFPPs+J0WBWlGFertgIhPkvjFnQC/xzRyhet9prg== - dependencies: - "@babel/runtime" "^7.17.9" - "@polkadot/wasm-util" "6.1.1" - -"@polkadot/wasm-crypto@^6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto/-/wasm-crypto-6.1.1.tgz#8e2c2d64d24eeaa78eb0b74ea1c438b7bc704176" - integrity sha512-hv9RCbMYtgjCy7+FKZFnO2Afu/whax9sk6udnZqGRBRiwaNagtyliWZGrKNGvaXMIO0VyaY4jWUwSzUgPrLu1A== - dependencies: - "@babel/runtime" "^7.17.9" - "@polkadot/wasm-bridge" "6.1.1" - "@polkadot/wasm-crypto-asmjs" "6.1.1" - "@polkadot/wasm-crypto-init" "6.1.1" - "@polkadot/wasm-crypto-wasm" "6.1.1" - "@polkadot/wasm-util" "6.1.1" - -"@polkadot/wasm-util@6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/@polkadot/wasm-util/-/wasm-util-6.1.1.tgz#58a566aba68f90d2a701c78ad49a1a9521b17f5b" - integrity sha512-DgpLoFXMT53UKcfZ8eT2GkJlJAOh89AWO+TP6a6qeZQpvXVe5f1yR45WQpkZlgZyUP+/19+kY56GK0pQxfslqg== - dependencies: - "@babel/runtime" "^7.17.9" - -"@polkadot/x-bigint@9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-bigint/-/x-bigint-9.4.1.tgz#0a7c6b5743a6fb81ab6a1c3a48a584e774c37910" - integrity sha512-KlbXboegENoyrpjj+eXfY13vsqrXgk4620zCAUhKNH622ogdvAepHbY/DpV6w0FLEC6MwN9zd5cRuDBEXVeWiw== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" - -"@polkadot/x-fetch@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-fetch/-/x-fetch-9.4.1.tgz#92802d3880db826a90bf1be90174a9fc73fc044a" - integrity sha512-CZFPZKgy09TOF5pOFRVVhGrAaAPdSMyrUSKwdO2I8DzdIE1tmjnol50dlnZja5t8zTD0n1uIY1H4CEWwc5NF/g== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" - "@types/node-fetch" "^2.6.1" - node-fetch "^2.6.7" - -"@polkadot/x-global@9.4.1", "@polkadot/x-global@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-global/-/x-global-9.4.1.tgz#3bd44862ea2b7e0fb2de766dfa4d56bb46d19e17" - integrity sha512-eN4oZeRdIKQeUPNN7OtH5XeYp349d8V9+gW6W0BmCfB2lTg8TDlG1Nj+Cyxpjl9DNF5CiKudTq72zr0dDSRbwA== - dependencies: - "@babel/runtime" "^7.18.3" - -"@polkadot/x-randomvalues@9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-randomvalues/-/x-randomvalues-9.4.1.tgz#ab995b3a22aee6bffc18490e636e1a7409f36a15" - integrity sha512-TLOQw3JNPgCrcq9WO2ipdeG8scsSreu3m9hwj3n7nX/QKlVzSf4G5bxJo5TW1dwcUdHwBuVox+3zgCmo+NPh+Q== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" - -"@polkadot/x-textdecoder@9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-textdecoder/-/x-textdecoder-9.4.1.tgz#1d891b82f4192d92dd373d14ea4b5654d0130484" - integrity sha512-yLulcgVASFUBJqrvS6Ssy0ko9teAfbu1ajH0r3Jjnqkpmmz2DJ1CS7tAktVa7THd4GHPGeKAVfxl+BbV/LZl+w== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" - -"@polkadot/x-textencoder@9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-textencoder/-/x-textencoder-9.4.1.tgz#09c47727d7713884cf82fd773e478487fe39d479" - integrity sha512-/47wa31jBa43ULqMO60vzcJigTG+ZAGNcyT5r6hFLrQzRzc8nIBjIOD8YWtnKM92r9NvlNv2wJhdamqyU0mntg== - dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" - -"@polkadot/x-ws@^9.4.1": - version "9.4.1" - resolved "https://registry.yarnpkg.com/@polkadot/x-ws/-/x-ws-9.4.1.tgz#c48f2ef3e80532f4b366b57b6661429b46a16155" - integrity sha512-zQjVxXgHsBVn27u4bjY01cFO6XWxgv2b3MMOpNHTKTAs8SLEmFf0LcT7fBShimyyudyTeJld5pHApJ4qp1OXxA== + "@babel/runtime" "^7.18.9" + +"@polkadot/wasm-crypto-asmjs@6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-6.3.1.tgz#e8f469c9cf4a7709c8131a96f857291953f3e30a" + integrity sha512-zbombRfA5v/mUWQQhgg2YwaxhRmxRIrvskw65x+lruax3b6xPBFDs7yplopiJU3r8h2pTgQvX/DUksvqz2TCRQ== + dependencies: + "@babel/runtime" "^7.18.9" + +"@polkadot/wasm-crypto-init@6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-init/-/wasm-crypto-init-6.3.1.tgz#b590220c53c94b9a54d5dc236d0cbe943db76706" + integrity sha512-9yaUBcu+snwjJLmPPGl3cyGRQ1afyFGm16qzTM0sgG/ZCfUlK4uk8KWZe+sBUKgoxb2oXY7Y4WklKgQI1YBdfw== + dependencies: + "@babel/runtime" "^7.18.9" + "@polkadot/wasm-bridge" "6.3.1" + "@polkadot/wasm-crypto-asmjs" "6.3.1" + "@polkadot/wasm-crypto-wasm" "6.3.1" + +"@polkadot/wasm-crypto-wasm@6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-6.3.1.tgz#67f720e7f9694fef096abe9d60abbac02e032383" + integrity sha512-idSlzKGVzCfeCMRHsacRvqwojSaTadFxL/Dbls4z1thvfa3U9Ku0d2qVtlwg7Hj+tYWDiuP8Kygs+6bQwfs0XA== + dependencies: + "@babel/runtime" "^7.18.9" + "@polkadot/wasm-util" "6.3.1" + +"@polkadot/wasm-crypto@^6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto/-/wasm-crypto-6.3.1.tgz#63f5798aca2b2ff0696f190e6862d9781d8f280c" + integrity sha512-OO8h0qeVkqp4xYZaRVl4iuWOEtq282pNBHDKb6SOJuI2g59eWGcKh4EQU9Me2VP6qzojIqptrkrVt7KQXC68gA== + dependencies: + "@babel/runtime" "^7.18.9" + "@polkadot/wasm-bridge" "6.3.1" + "@polkadot/wasm-crypto-asmjs" "6.3.1" + "@polkadot/wasm-crypto-init" "6.3.1" + "@polkadot/wasm-crypto-wasm" "6.3.1" + "@polkadot/wasm-util" "6.3.1" + +"@polkadot/wasm-util@6.3.1": + version "6.3.1" + resolved "https://registry.yarnpkg.com/@polkadot/wasm-util/-/wasm-util-6.3.1.tgz#439ebb68a436317af388ed6438b8f879df3afcda" + integrity sha512-12oAv5J7Yoc9m6jixrSaQCxpOkWOyzHx3DMC8qmLjRiwdBWxqLmImOVRVnFsbaxqSbhBIHRuJphVxWE+GZETDg== + dependencies: + "@babel/runtime" "^7.18.9" + +"@polkadot/x-bigint@10.1.11", "@polkadot/x-bigint@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-bigint/-/x-bigint-10.1.11.tgz#7d62ce10cccd55b86a415342db95b9feeb099776" + integrity sha512-TC4KZ+ni/SJhcf/LIwD49C/kwvACu0nCchETNO+sAfJ7COXZwHDUJXVXmwN5PgkQxwsWsKKuJmzR/Fi1bgMWnQ== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" + +"@polkadot/x-fetch@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-fetch/-/x-fetch-10.1.11.tgz#8f579bb166096c977acff91a40b3848fb5581900" + integrity sha512-WtyUr9itVD9BLnxCUloJ1iwrXOY/lnlEShEYKHcSm6MIHtbJolePd3v1+o5mOX+bdDbHXhPZnH8anCCqDNDRqg== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" + "@types/node-fetch" "^2.6.2" + node-fetch "^3.2.10" + +"@polkadot/x-global@10.1.11", "@polkadot/x-global@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-global/-/x-global-10.1.11.tgz#37dda3ef1cebfd14c68c69279ae6521957817866" + integrity sha512-bWz5gdcELy6+xfr27R1GE5MPX4nfVlchzHQH+DR6OBbSi9g/PeycQAvFB6IkTmP+YEbNNtIpxnSP37zoUaG3xw== + dependencies: + "@babel/runtime" "^7.19.4" + +"@polkadot/x-randomvalues@10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-randomvalues/-/x-randomvalues-10.1.11.tgz#f9e088f8b400770d3e53ba9e0c0f0d464047f89e" + integrity sha512-V2V37f5hoM5B32eCpGw87Lwstin2+ArXhOZ8ENKncbQLXzbF9yTODueDoA5Vt0MJCs2CDP9cyiCYykcanqVkxg== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" + +"@polkadot/x-textdecoder@10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-textdecoder/-/x-textdecoder-10.1.11.tgz#314c79e27545a41fe0494a26196bf2dff5cfcb5d" + integrity sha512-QZqie04SR6pAj260PaLBfZUGXWKI357t4ROVJhpaj06qc1zrk1V8Mwkr49+WzjAPFEOqo70HWnzXmPNCH4dQiw== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" + +"@polkadot/x-textencoder@10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-textencoder/-/x-textencoder-10.1.11.tgz#23b18b3ffbc649572728aa37d7787432bb3a03b5" + integrity sha512-UX+uV9AbDID81waaG/NvTkkf7ZNVW7HSHaddgbWjQEVW2Ex4ByccBarY5jEi6cErEPKfzCamKhgXflu0aV9LWw== + dependencies: + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" + +"@polkadot/x-ws@^10.1.11": + version "10.1.11" + resolved "https://registry.yarnpkg.com/@polkadot/x-ws/-/x-ws-10.1.11.tgz#7431ad72064d56519d4293278f03ae97b9ea9271" + integrity sha512-EUbL/R1A/NxYf6Rnb1M7U9yeTuo5r4y2vcQllE5aBLaQ0cFnRykHzlmZlVX1E7O5uy3lYVdxWC7sNgxItIWkWA== dependencies: - "@babel/runtime" "^7.18.3" - "@polkadot/x-global" "9.4.1" + "@babel/runtime" "^7.19.4" + "@polkadot/x-global" "10.1.11" "@types/websocket" "^1.0.5" websocket "^1.0.34" -"@scure/base@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.0.0.tgz#109fb595021de285f05a7db6806f2f48296fcee7" - integrity sha512-gIVaYhUsy+9s58m/ETjSJVKHhKTBMmcRb9cEV5/5dwvfDlfORjKrFsDeDHWRrm6RjcPvCLZFwGJjAjLj1gg4HA== +"@scure/base@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938" + integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== +"@sindresorhus/is@^4.0.0", "@sindresorhus/is@^4.6.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== -"@substrate/connect-extension-protocol@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@substrate/connect-extension-protocol/-/connect-extension-protocol-1.0.0.tgz#d452beda84b3ebfcf0e88592a4695e729a91e858" - integrity sha512-nFVuKdp71hMd/MGlllAOh+a2hAqt8m6J2G0aSsS/RcALZexxF9jodbFc62ni8RDtJboeOfXAHhenYOANvJKPIg== +"@substrate/connect-extension-protocol@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@substrate/connect-extension-protocol/-/connect-extension-protocol-1.0.1.tgz#fa5738039586c648013caa6a0c95c43265dbe77d" + integrity sha512-161JhCC1csjH3GE5mPLEd7HbWtwNSPJBg3p1Ksz9SFlTzj/bgEwudiRN2y5i0MoLGCIJRYKyKGMxVnd29PzNjg== -"@substrate/connect@0.7.5": - version "0.7.5" - resolved "https://registry.yarnpkg.com/@substrate/connect/-/connect-0.7.5.tgz#8d868ed905df25c87ff9bad9fa8db6d4137012c9" - integrity sha512-sdAZ6IGuTNxRGlH/O+6IaXvkYzZFwMK03VbQMgxUzry9dz1+JzyaNf8iOTVHxhMIUZc0h0E90JQz/hNiUYPlUw== +"@substrate/connect@0.7.14": + version "0.7.14" + resolved "https://registry.yarnpkg.com/@substrate/connect/-/connect-0.7.14.tgz#c090e952e9cdd93185a94d24fbc424ea20fe7bbe" + integrity sha512-uW5uBmihpivshmmmw+rsg7qOV0KqVSep4rWOXFMP8aFQinvmqw4JqxP21og4H/7JZxttYUBFQVsdtXHGKJ0aVQ== dependencies: - "@substrate/connect-extension-protocol" "^1.0.0" - "@substrate/smoldot-light" "0.6.16" + "@substrate/connect-extension-protocol" "^1.0.1" + "@substrate/smoldot-light" "0.6.34" eventemitter3 "^4.0.7" -"@substrate/smoldot-light@0.6.16": - version "0.6.16" - resolved "https://registry.yarnpkg.com/@substrate/smoldot-light/-/smoldot-light-0.6.16.tgz#04ec70cf1df285431309fe5704d3b2dd701faa0b" - integrity sha512-Ej0ZdNPTW0EXbp45gv/5Kt/JV+c9cmRZRYAXg+EALxXPm0hW9h2QdVLm61A2PAskOGptW4wnJ1WzzruaenwAXQ== +"@substrate/smoldot-light@0.6.34": + version "0.6.34" + resolved "https://registry.yarnpkg.com/@substrate/smoldot-light/-/smoldot-light-0.6.34.tgz#273dba622102281fd0fdb0e375198bff2ec584c3" + integrity sha512-+HK9MaJ0HelJmpf4YYR+salJ7dhVBltmhGlyz5l8OXS9DW18fe0Z2wxEo8P5kX9CUxlCXEb8J9JBRQAYBPHbwQ== dependencies: - buffer "^6.0.1" pako "^2.0.4" - websocket "^1.0.32" + ws "^8.8.1" -"@substrate/ss58-registry@^1.22.0": - version "1.22.0" - resolved "https://registry.yarnpkg.com/@substrate/ss58-registry/-/ss58-registry-1.22.0.tgz#d115bc5dcab8c0f5800e05e4ef265949042b13ec" - integrity sha512-IKqrPY0B3AeIXEc5/JGgEhPZLy+SmVyQf+k0SIGcNSTqt1GLI3gQFEOFwSScJdem+iYZQUrn6YPPxC3TpdSC3A== +"@substrate/ss58-registry@^1.33.0": + version "1.33.0" + resolved "https://registry.yarnpkg.com/@substrate/ss58-registry/-/ss58-registry-1.33.0.tgz#b93218fc86405769716b02f0ce5e61df221b37ae" + integrity sha512-DztMuMcEfu+tJrtIQIIp5gO8/XJZ8N8UwPObDCSNgrp7trtSkPJAUFB9qXaReXtN9UvTcVBMTWk6VPfFi04Wkg== -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== +"@szmarczak/http-timer@^4.0.5": + version "4.0.6" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" + integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== + dependencies: + defer-to-connect "^2.0.0" + +"@szmarczak/http-timer@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-5.0.1.tgz#c7c1bf1141cdd4751b0399c8fc7b8b664cd5be3a" + integrity sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw== dependencies: - defer-to-connect "^1.0.1" + defer-to-connect "^2.0.1" "@tsconfig/node10@^1.0.7": - version "1.0.8" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" - integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== "@tsconfig/node12@^1.0.7": - version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c" - integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== "@tsconfig/node14@^1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2" - integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== "@tsconfig/node16@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" - integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== -"@types/bn.js@^4.11.5": - version "4.11.6" - resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c" - integrity sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg== +"@types/bn.js@^5.1.0", "@types/bn.js@^5.1.1": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.1.tgz#b51e1b55920a4ca26e9285ff79936bbdec910682" + integrity sha512-qNrYbZqMx0uJAfKnKclPh+dTwK33KfLHYqtyODwd5HnXOjnkhc4qgn3BrK6RWyGZm5+sIFE7Q7Vz6QQtJB7w7g== dependencies: "@types/node" "*" -"@types/bn.js@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.0.tgz#32c5d271503a12653c62cf4d2b45e6eab8cebc68" - integrity sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA== +"@types/cacheable-request@^6.0.1", "@types/cacheable-request@^6.0.2": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.2.tgz#c324da0197de0a98a2312156536ae262429ff6b9" + integrity sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA== dependencies: + "@types/http-cache-semantics" "*" + "@types/keyv" "*" "@types/node" "*" + "@types/responselike" "*" "@types/chai-as-promised@^7.1.5": version "7.1.5" @@ -963,58 +964,52 @@ dependencies: "@types/chai" "*" -"@types/chai@*", "@types/chai@^4.3.1": - version "4.3.1" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.1.tgz#e2c6e73e0bdeb2521d00756d099218e9f5d90a04" - integrity sha512-/zPMqDkzSZ8t3VtxOa4KPq7uzzW978M9Tvh+j7GHKuo6k6GTLxPJ4J5gE5cjfJ26pnXst0N5Hax8Sr0T2Mi9zQ== - -"@types/chrome@^0.0.171": - version "0.0.171" - resolved "https://registry.yarnpkg.com/@types/chrome/-/chrome-0.0.171.tgz#6ee9aca52fabbe645372088fcc86b33cff33fcba" - integrity sha512-CnCwFKI3COygib3DNJrCjePeoU2OCDGGbUcmftXtQ3loMABsLgwpG8z+LxV4kjQJFzmJDqOyhCSsbY9yyEfapQ== - dependencies: - "@types/filesystem" "*" - "@types/har-format" "*" - -"@types/filesystem@*": - version "0.0.32" - resolved "https://registry.yarnpkg.com/@types/filesystem/-/filesystem-0.0.32.tgz#307df7cc084a2293c3c1a31151b178063e0a8edf" - integrity sha512-Yuf4jR5YYMR2DVgwuCiP11s0xuVRyPKmz8vo6HBY3CGdeMj8af93CFZX+T82+VD1+UqHOxTq31lO7MI7lepBtQ== +"@types/chai-like@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@types/chai-like/-/chai-like-1.1.1.tgz#c454039b0a2f92664fb5b7b7a2a66c3358783ae7" + integrity sha512-s46EZsupBuVhLn66DbRee5B0SELLmL4nFXVrBiV29BxLGm9Sh7Bful623j3AfiQRu2zAP4cnlZ3ETWB3eWc4bA== dependencies: - "@types/filewriter" "*" + "@types/chai" "*" -"@types/filewriter@*": - version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/filewriter/-/filewriter-0.0.29.tgz#a48795ecadf957f6c0d10e0c34af86c098fa5bee" - integrity sha512-BsPXH/irW0ht0Ji6iw/jJaK8Lj3FJemon2gvEqHKpCdDCeemHa+rI3WBGq5z7cDMZgoLjY40oninGxqk+8NzNQ== +"@types/chai@*", "@types/chai@^4.3.3": + version "4.3.3" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.3.tgz#3c90752792660c4b562ad73b3fbd68bf3bc7ae07" + integrity sha512-hC7OMnszpxhZPduX+m+nrx+uFoLkWOMiR4oa/AZF3MuSETYTZmFfJAHqZEM8MVlvfG7BEUcgvtwoCTxBp6hm3g== -"@types/har-format@*": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@types/har-format/-/har-format-1.2.8.tgz#e6908b76d4c88be3db642846bb8b455f0bfb1c4e" - integrity sha512-OP6L9VuZNdskgNN3zFQQ54ceYD8OLq5IbqO4VK91ORLfOm7WdT/CiT/pHEBSQEqCInJ2y3O6iCm/zGtPElpgJQ== +"@types/http-cache-semantics@*": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" + integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== "@types/json-schema@^7.0.9": version "7.0.11" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== -"@types/mocha@^9.1.1": - version "9.1.1" - resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-9.1.1.tgz#e7c4f1001eefa4b8afbd1eee27a237fee3bf29c4" - integrity sha512-Z61JK7DKDtdKTWwLeElSEBcWGRLY8g95ic5FoQqI9CMx0ns/Ghep3B4DfcEimiKMvtamNVULVNKEsiwV3aQmXw== +"@types/keyv@*": + version "4.2.0" + resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-4.2.0.tgz#65b97868ab757906f2dbb653590d7167ad023fa0" + integrity sha512-xoBtGl5R9jeKUhc8ZqeYaRDx04qqJ10yhhXYGmJ4Jr8qKpvMsDQQrNUvF/wUJ4klOtmJeJM+p2Xo3zp9uaC3tw== + dependencies: + keyv "*" + +"@types/mocha@^10.0.0": + version "10.0.0" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-10.0.0.tgz#3d9018c575f0e3f7386c1de80ee66cc21fbb7a52" + integrity sha512-rADY+HtTOA52l9VZWtgQfn4p+UDVM2eDVkMZT1I6syp0YKxW2F9v+0pbRZLsvskhQv/vMb6ZfCay81GHbz5SHg== -"@types/node-fetch@^2.6.1": - version "2.6.1" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975" - integrity sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA== +"@types/node-fetch@^2.6.2": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== dependencies: "@types/node" "*" form-data "^3.0.0" -"@types/node@*", "@types/node@^17.0.35": - version "17.0.41" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.41.tgz#1607b2fd3da014ae5d4d1b31bc792a39348dfb9b" - integrity sha512-xA6drNNeqb5YyV5fO3OAEsnXLfO7uF0whiOfPTz5AeDo8KeZFmODKnvwPymMNO8qE/an8pVY/O50tig2SQCrGw== +"@types/node@*", "@types/node@^18.11.2": + version "18.11.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.2.tgz#c59b7641832531264fda3f1ba610362dc9a7dfc8" + integrity sha512-BWN3M23gLO2jVG8g/XHIRFWiiV4/GckeFIqbU/C4V3xpoBBWSMk4OZomouN0wCkfQFPqgZikyLr7DOYDysIkkw== "@types/node@^12.12.6": version "12.20.55" @@ -1028,6 +1023,13 @@ dependencies: "@types/node" "*" +"@types/responselike@*", "@types/responselike@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" + integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + dependencies: + "@types/node" "*" + "@types/secp256k1@^4.0.1": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.3.tgz#1b8e55d8e00f08ee7220b4d59a6abe89c37a901c" @@ -1035,6 +1037,11 @@ dependencies: "@types/node" "*" +"@types/semver@^7.3.12": + version "7.3.12" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.12.tgz#920447fdd78d76b19de0438b7f60df3c4a80bf1c" + integrity sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A== + "@types/websocket@^1.0.5": version "1.0.5" resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.5.tgz#3fb80ed8e07f88e51961211cd3682a3a4a81569c" @@ -1042,90 +1049,92 @@ dependencies: "@types/node" "*" -"@typescript-eslint/eslint-plugin@^5.26.0": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.27.1.tgz#fdf59c905354139046b41b3ed95d1609913d0758" - integrity sha512-6dM5NKT57ZduNnJfpY81Phe9nc9wolnMCnknb1im6brWi1RYv84nbMS3olJa27B6+irUVV1X/Wb+Am0FjJdGFw== +"@typescript-eslint/eslint-plugin@^5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.40.1.tgz#3203a6ff396b1194083faaa6e5110c401201d7d5" + integrity sha512-FsWboKkWdytGiXT5O1/R9j37YgcjO8MKHSUmWnIEjVaz0krHkplPnYi7mwdb+5+cs0toFNQb0HIrN7zONdIEWg== dependencies: - "@typescript-eslint/scope-manager" "5.27.1" - "@typescript-eslint/type-utils" "5.27.1" - "@typescript-eslint/utils" "5.27.1" + "@typescript-eslint/scope-manager" "5.40.1" + "@typescript-eslint/type-utils" "5.40.1" + "@typescript-eslint/utils" "5.40.1" debug "^4.3.4" - functional-red-black-tree "^1.0.1" ignore "^5.2.0" regexpp "^3.2.0" semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/parser@^5.26.0": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.27.1.tgz#3a4dcaa67e45e0427b6ca7bb7165122c8b569639" - integrity sha512-7Va2ZOkHi5NP+AZwb5ReLgNF6nWLGTeUJfxdkVUAPPSaAdbWNnFZzLZ4EGGmmiCTg+AwlbE1KyUYTBglosSLHQ== +"@typescript-eslint/parser@^5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.40.1.tgz#e7f8295dd8154d0d37d661ddd8e2f0ecfdee28dd" + integrity sha512-IK6x55va5w4YvXd4b3VrXQPldV9vQTxi5ov+g4pMANsXPTXOcfjx08CRR1Dfrcc51syPtXHF5bgLlMHYFrvQtg== dependencies: - "@typescript-eslint/scope-manager" "5.27.1" - "@typescript-eslint/types" "5.27.1" - "@typescript-eslint/typescript-estree" "5.27.1" + "@typescript-eslint/scope-manager" "5.40.1" + "@typescript-eslint/types" "5.40.1" + "@typescript-eslint/typescript-estree" "5.40.1" debug "^4.3.4" -"@typescript-eslint/scope-manager@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.27.1.tgz#4d1504392d01fe5f76f4a5825991ec78b7b7894d" - integrity sha512-fQEOSa/QroWE6fAEg+bJxtRZJTH8NTskggybogHt4H9Da8zd4cJji76gA5SBlR0MgtwF7rebxTbDKB49YUCpAg== +"@typescript-eslint/scope-manager@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.40.1.tgz#a7a5197dfd234622a2421ea590ee0ccc02e18dfe" + integrity sha512-jkn4xsJiUQucI16OLCXrLRXDZ3afKhOIqXs4R3O+M00hdQLKR58WuyXPZZjhKLFCEP2g+TXdBRtLQ33UfAdRUg== dependencies: - "@typescript-eslint/types" "5.27.1" - "@typescript-eslint/visitor-keys" "5.27.1" + "@typescript-eslint/types" "5.40.1" + "@typescript-eslint/visitor-keys" "5.40.1" -"@typescript-eslint/type-utils@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.27.1.tgz#369f695199f74c1876e395ebea202582eb1d4166" - integrity sha512-+UC1vVUWaDHRnC2cQrCJ4QtVjpjjCgjNFpg8b03nERmkHv9JV9X5M19D7UFMd+/G7T/sgFwX2pGmWK38rqyvXw== +"@typescript-eslint/type-utils@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.40.1.tgz#091e4ce3bebbdb68f4980bae9dee2e4e1725f601" + integrity sha512-DLAs+AHQOe6n5LRraXiv27IYPhleF0ldEmx6yBqBgBLaNRKTkffhV1RPsjoJBhVup2zHxfaRtan8/YRBgYhU9Q== dependencies: - "@typescript-eslint/utils" "5.27.1" + "@typescript-eslint/typescript-estree" "5.40.1" + "@typescript-eslint/utils" "5.40.1" debug "^4.3.4" tsutils "^3.21.0" -"@typescript-eslint/types@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.27.1.tgz#34e3e629501349d38be6ae97841298c03a6ffbf1" - integrity sha512-LgogNVkBhCTZU/m8XgEYIWICD6m4dmEDbKXESCbqOXfKZxRKeqpiJXQIErv66sdopRKZPo5l32ymNqibYEH/xg== +"@typescript-eslint/types@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.40.1.tgz#de37f4f64de731ee454bb2085d71030aa832f749" + integrity sha512-Icg9kiuVJSwdzSQvtdGspOlWNjVDnF3qVIKXdJ103o36yRprdl3Ge5cABQx+csx960nuMF21v8qvO31v9t3OHw== -"@typescript-eslint/typescript-estree@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.27.1.tgz#7621ee78607331821c16fffc21fc7a452d7bc808" - integrity sha512-DnZvvq3TAJ5ke+hk0LklvxwYsnXpRdqUY5gaVS0D4raKtbznPz71UJGnPTHEFo0GDxqLOLdMkkmVZjSpET1hFw== +"@typescript-eslint/typescript-estree@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.40.1.tgz#9a7d25492f02c69882ce5e0cd1857b0c55645d72" + integrity sha512-5QTP/nW5+60jBcEPfXy/EZL01qrl9GZtbgDZtDPlfW5zj/zjNrdI2B5zMUHmOsfvOr2cWqwVdWjobCiHcedmQA== dependencies: - "@typescript-eslint/types" "5.27.1" - "@typescript-eslint/visitor-keys" "5.27.1" + "@typescript-eslint/types" "5.40.1" + "@typescript-eslint/visitor-keys" "5.40.1" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/utils@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.27.1.tgz#b4678b68a94bc3b85bf08f243812a6868ac5128f" - integrity sha512-mZ9WEn1ZLDaVrhRaYgzbkXBkTPghPFsup8zDbbsYTxC5OmqrFE7skkKS/sraVsLP3TcT3Ki5CSyEFBRkLH/H/w== +"@typescript-eslint/utils@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.40.1.tgz#3204fb73a559d3b7bab7dc9d3c44487c2734a9ca" + integrity sha512-a2TAVScoX9fjryNrW6BZRnreDUszxqm9eQ9Esv8n5nXApMW0zeANUYlwh/DED04SC/ifuBvXgZpIK5xeJHQ3aw== dependencies: "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.27.1" - "@typescript-eslint/types" "5.27.1" - "@typescript-eslint/typescript-estree" "5.27.1" + "@types/semver" "^7.3.12" + "@typescript-eslint/scope-manager" "5.40.1" + "@typescript-eslint/types" "5.40.1" + "@typescript-eslint/typescript-estree" "5.40.1" eslint-scope "^5.1.1" eslint-utils "^3.0.0" + semver "^7.3.7" -"@typescript-eslint/visitor-keys@5.27.1": - version "5.27.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.27.1.tgz#05a62666f2a89769dac2e6baa48f74e8472983af" - integrity sha512-xYs6ffo01nhdJgPieyk7HAOpjhTsx7r/oB9LWEhwAXgwn33tkr+W8DI2ChboqhZlC4q3TC6geDYPoiX8ROqyOQ== +"@typescript-eslint/visitor-keys@5.40.1": + version "5.40.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.40.1.tgz#f3d2bf5af192f4432b84cec6fdcb387193518754" + integrity sha512-A2DGmeZ+FMja0geX5rww+DpvILpwo1OsiQs0M+joPWJYsiEFBLsH0y1oFymPNul6Z5okSmHpP4ivkc2N0Cgfkw== dependencies: - "@typescript-eslint/types" "5.27.1" + "@typescript-eslint/types" "5.40.1" eslint-visitor-keys "^3.3.0" -"@ungap/promise-all-settled@1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz#aa58042711d6e3275dd37dc597e5d31e8c290a44" - integrity sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q== +abortcontroller-polyfill@^1.7.3: + version "1.7.5" + resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz#6738495f4e901fbb57b6c0611d0c75f76c485bed" + integrity sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ== accepts@~1.3.8: version "1.3.8" @@ -1145,10 +1154,10 @@ acorn-walk@^8.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn@^8.4.1, acorn@^8.7.1: - version "8.7.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" - integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== +acorn@^8.4.1, acorn@^8.8.0: + version "8.8.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4: version "6.12.6" @@ -1288,10 +1297,10 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" -bignumber.js@^9.0.0, bignumber.js@^9.0.2: - version "9.0.2" - resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.2.tgz#71c6c6bed38de64e24a65ebe16cfcf23ae693673" - integrity sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw== +bignumber.js@^9.0.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.0.tgz#8d340146107fe3a6cb8d40699643c302e8773b62" + integrity sha512-4LwHK4nfDOraBCtst+wOWIHbu1vhvAPJK8g8nROd4iuc3PSEjWif/qwbkh8jwCJz6yDBvtU4KPynETgrfh7y3A== binary-extensions@^2.0.0: version "2.2.0" @@ -1323,10 +1332,10 @@ bn.js@^5.0.0, bn.js@^5.1.1, bn.js@^5.1.2, bn.js@^5.2.0, bn.js@^5.2.1: resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== -body-parser@1.20.0, body-parser@^1.16.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== +body-parser@1.20.1, body-parser@^1.16.0: + version "1.20.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== dependencies: bytes "3.1.2" content-type "~1.0.4" @@ -1336,7 +1345,7 @@ body-parser@1.20.0, body-parser@^1.16.0: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.10.3" + qs "6.11.0" raw-body "2.5.1" type-is "~1.6.18" unpipe "1.0.0" @@ -1427,16 +1436,15 @@ browserify-sign@^4.0.0: readable-stream "^3.6.0" safe-buffer "^5.2.0" -browserslist@^4.20.2: - version "4.20.4" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.4.tgz#98096c9042af689ee1e0271333dbc564b8ce4477" - integrity sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw== +browserslist@^4.21.3: + version "4.21.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== dependencies: - caniuse-lite "^1.0.30001349" - electron-to-chromium "^1.4.147" - escalade "^3.1.1" - node-releases "^2.0.5" - picocolors "^1.0.0" + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" bs58@^4.0.0: version "4.0.1" @@ -1477,18 +1485,10 @@ buffer@^5.0.5, buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" -buffer@^6.0.1: - version "6.0.3" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" - integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.2.1" - bufferutil@^4.0.1: - version "4.0.6" - resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.6.tgz#ebd6c67c7922a0e902f053e5d8be5ec850e48433" - integrity sha512-jduaYOYtnio4aIAyc6UbvPCVcgq7nYpVnucyxr6eCYg/Woad9Hf/oxxBRDnGGjPfjUm6j5O/uBWhIu4iLebFaw== + version "4.0.7" + resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.7.tgz#60c0d19ba2c992dd8273d3f73772ffc894c153ad" + integrity sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw== dependencies: node-gyp-build "^4.3.0" @@ -1497,18 +1497,28 @@ bytes@3.1.2: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== -cacheable-request@^6.0.0: +cacheable-lookup@^5.0.3: + version "5.0.4" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" + integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== + +cacheable-lookup@^6.0.4: version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-6.1.0.tgz#0330a543471c61faa4e9035db583aad753b36385" + integrity sha512-KJ/Dmo1lDDhmW2XDPMo+9oiy/CeqosPguPCrgcVzKyZrL6pM1gU2GmPY/xo6OQPTUaA/c0kwHuywB4E6nmT9ww== + +cacheable-request@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.2.tgz#ea0d0b889364a25854757301ca12b2da77f91d27" + integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== dependencies: clone-response "^1.0.2" get-stream "^5.1.0" http-cache-semantics "^4.0.0" - keyv "^3.0.0" + keyv "^4.0.0" lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" + normalize-url "^6.0.1" + responselike "^2.0.0" call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" @@ -1528,10 +1538,10 @@ camelcase@^6.0.0: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== -caniuse-lite@^1.0.30001349: - version "1.0.30001352" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001352.tgz#cc6f5da3f983979ad1e2cdbae0505dccaa7c6a12" - integrity sha512-GUgH8w6YergqPQDGWhJGt8GDRnY0L/iJVQcU3eJ46GYf52R8tk0Wxp0PymuFVZboJYXGiCqwozAYZNRjVj6IcA== +caniuse-lite@^1.0.30001400: + version "1.0.30001422" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001422.tgz#f2d7c6202c49a8359e6e35add894d88ef93edba1" + integrity sha512-hSesn02u1QacQHhaxl/kNMZwqVG35Sz/8DgvmgedxSH8z9UUpcDYSPYgsj3x5dQNRcNp6BwpSfQfVzYUTm+fog== caseless@~0.12.0: version "0.12.0" @@ -1545,6 +1555,11 @@ chai-as-promised@^7.1.1: dependencies: check-error "^1.0.2" +chai-like@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/chai-like/-/chai-like-1.1.1.tgz#8c558a414c34514e814d497c772547ceb7958f64" + integrity sha512-VKa9z/SnhXhkT1zIjtPACFWSoWsqVoaz1Vg+ecrKo5DCKVlgL30F/pEyEvXPBOVwCgLZcWUleCM/C1okaKdTTA== + chai@^4.3.6: version "4.3.6" resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.6.tgz#ffe4ba2d9fa9d6680cc0b370adae709ec9011e9c" @@ -1633,6 +1648,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -1643,9 +1667,9 @@ clone-deep@^4.0.1: shallow-clone "^3.0.0" clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" @@ -1727,11 +1751,9 @@ content-type@~1.0.4: integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== cookie-signature@1.0.6: version "1.0.6" @@ -1743,11 +1765,6 @@ cookie@0.5.0: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== -cookiejar@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.3.tgz#fc7a6216e408e74414b90230050842dacda75acc" - integrity sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ== - core-util-is@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -1802,6 +1819,13 @@ create-require@^1.1.0: resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== +cross-fetch@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" + integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + dependencies: + node-fetch "2.6.7" + cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -1843,6 +1867,11 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" +data-uri-to-buffer@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz#b5db46aea50f6176428ac05b73be39a57701a64b" + integrity sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA== + debug@2.6.9, debug@^2.2.0: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1867,7 +1896,7 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og== -decompress-response@^3.2.0, decompress-response@^3.3.0: +decompress-response@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== @@ -1893,10 +1922,10 @@ deep-is@^0.1.3: resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +defer-to-connect@^2.0.0, defer-to-connect@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" + integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" @@ -1967,11 +1996,6 @@ dom-walk@^0.1.0: resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.2.tgz#0c548bef048f4d1f2a97249002236060daa3fd84" integrity sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w== -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA== - ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" @@ -1992,10 +2016,10 @@ ee-first@1.1.1: resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== -electron-to-chromium@^1.4.147: - version "1.4.150" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.150.tgz#89f0e12505462d5df7e56c5b91aff7e1dfdd33ec" - integrity sha512-MP3oBer0X7ZeS9GJ0H6lmkn561UxiwOIY9TTkdxVY7lI9G6GVCKfgJaHaDcakwdKxBXA4T3ybeswH/WBIN/KTA== +electron-to-chromium@^1.4.251: + version "1.4.284" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" + integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.5.3, elliptic@^6.5.4: version "6.5.4" @@ -2028,30 +2052,31 @@ end-of-stream@^1.1.0: once "^1.4.0" es-abstract@^1.19.0, es-abstract@^1.19.5, es-abstract@^1.20.0: - version "1.20.1" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" - integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== + version "1.20.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== dependencies: call-bind "^1.0.2" es-to-primitive "^1.2.1" function-bind "^1.1.1" function.prototype.name "^1.1.5" - get-intrinsic "^1.1.1" + get-intrinsic "^1.1.3" get-symbol-description "^1.0.0" has "^1.0.3" has-property-descriptors "^1.0.0" has-symbols "^1.0.3" internal-slot "^1.0.3" - is-callable "^1.2.4" + is-callable "^1.2.7" is-negative-zero "^2.0.2" is-regex "^1.1.4" is-shared-array-buffer "^1.0.2" is-string "^1.0.7" is-weakref "^1.0.2" - object-inspect "^1.12.0" + object-inspect "^1.12.2" object-keys "^1.1.1" - object.assign "^4.1.2" + object.assign "^4.1.4" regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" string.prototype.trimend "^1.0.5" string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" @@ -2066,9 +2091,9 @@ es-to-primitive@^1.2.1: is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: - version "0.10.61" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.61.tgz#311de37949ef86b6b0dcea894d1ffedb909d3269" - integrity sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA== + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== dependencies: es6-iterator "^2.0.3" es6-symbol "^3.1.3" @@ -2083,6 +2108,11 @@ es6-iterator@^2.0.3: es5-ext "^0.10.35" es6-symbol "^3.1.1" +es6-promise@^4.2.8: + version "4.2.8" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" + integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== + es6-symbol@^3.1.1, es6-symbol@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" @@ -2111,6 +2141,14 @@ escape-string-regexp@^1.0.5: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== +eslint-plugin-mocha@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-mocha/-/eslint-plugin-mocha-10.1.0.tgz#69325414f875be87fb2cb00b2ef33168d4eb7c8d" + integrity sha512-xLqqWUF17llsogVOC+8C6/jvQ+4IoOREbN7ZCHuOHuD6cT5cDD4h7f2LgsZuzMAiwswWE21tO7ExaknHVDrSkw== + dependencies: + eslint-utils "^3.0.0" + rambda "^7.1.0" + eslint-scope@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" @@ -2144,13 +2182,14 @@ eslint-visitor-keys@^3.3.0: resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== -eslint@^8.16.0: - version "8.17.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.17.0.tgz#1cfc4b6b6912f77d24b874ca1506b0fe09328c21" - integrity sha512-gq0m0BTJfci60Fz4nczYxNAlED+sMcihltndR8t9t1evnU/azx53x3t2UHXC/uRjcbvRw/XctpaNygSTcQD+Iw== +eslint@^8.25.0: + version "8.25.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.25.0.tgz#00eb962f50962165d0c4ee3327708315eaa8058b" + integrity sha512-DVlJOZ4Pn50zcKW5bYH7GQK/9MsoQG2d5eDH0ebEkE8PbgzTTmtt/VTH9GGJ4BfeZCpBLqFfvsjX35UacUL83A== dependencies: - "@eslint/eslintrc" "^1.3.0" - "@humanwhocodes/config-array" "^0.9.2" + "@eslint/eslintrc" "^1.3.3" + "@humanwhocodes/config-array" "^0.10.5" + "@humanwhocodes/module-importer" "^1.0.1" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" @@ -2160,18 +2199,21 @@ eslint@^8.16.0: eslint-scope "^7.1.1" eslint-utils "^3.0.0" eslint-visitor-keys "^3.3.0" - espree "^9.3.2" + espree "^9.4.0" esquery "^1.4.0" esutils "^2.0.2" fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" + find-up "^5.0.0" glob-parent "^6.0.1" globals "^13.15.0" + globby "^11.1.0" + grapheme-splitter "^1.0.4" ignore "^5.2.0" import-fresh "^3.0.0" imurmurhash "^0.1.4" is-glob "^4.0.0" + js-sdsl "^4.1.4" js-yaml "^4.1.0" json-stable-stringify-without-jsonify "^1.0.1" levn "^0.4.1" @@ -2183,14 +2225,13 @@ eslint@^8.16.0: strip-ansi "^6.0.1" strip-json-comments "^3.1.0" text-table "^0.2.0" - v8-compile-cache "^2.0.3" -espree@^9.3.2: - version "9.3.2" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" - integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== +espree@^9.4.0: + version "9.4.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== dependencies: - acorn "^8.7.1" + acorn "^8.8.0" acorn-jsx "^5.3.2" eslint-visitor-keys "^3.3.0" @@ -2285,7 +2326,7 @@ ethereum-cryptography@^0.1.3: secp256k1 "^4.0.1" setimmediate "^1.0.5" -ethereumjs-util@^7.0.10, ethereumjs-util@^7.1.0, ethereumjs-util@^7.1.4, ethereumjs-util@^7.1.5: +ethereumjs-util@^7.0.10, ethereumjs-util@^7.1.0, ethereumjs-util@^7.1.5: version "7.1.5" resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-7.1.5.tgz#9ecf04861e4fbbeed7465ece5f23317ad1129181" integrity sha512-SDl5kKrQAudFBUe5OJM9Ac6WmMyYmXX/6sTmLZ3ffG2eY6ZIGBes3pEDxNN6V72WyOw4CPD5RomKdsa8DAAwLg== @@ -2323,13 +2364,13 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: safe-buffer "^5.1.1" express@^4.14.0: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + version "4.18.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.0" + body-parser "1.20.1" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.5.0" @@ -2348,7 +2389,7 @@ express@^4.14.0: parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.7" - qs "6.10.3" + qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" send "0.18.0" @@ -2360,11 +2401,11 @@ express@^4.14.0: vary "~1.1.2" ext@^1.1.2: - version "1.6.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.6.0.tgz#3871d50641e874cc172e2b53f919842d19db4c52" - integrity sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg== + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== dependencies: - type "^2.5.0" + type "^2.7.2" extend@~3.0.2: version "3.0.2" @@ -2387,9 +2428,9 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-glob@^3.2.9: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -2414,6 +2455,14 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fetch-blob@^3.1.2, fetch-blob@^3.1.4: + version "3.2.0" + resolved "https://registry.yarnpkg.com/fetch-blob/-/fetch-blob-3.2.0.tgz#f09b8d4bbd45adc6f0c20b7e787e793e309dcce9" + integrity sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ== + dependencies: + node-domexception "^1.0.0" + web-streams-polyfill "^3.0.3" + file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -2459,7 +2508,7 @@ find-process@^1.4.7: commander "^5.1.0" debug "^4.1.1" -find-up@5.0.0: +find-up@5.0.0, find-up@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== @@ -2488,14 +2537,14 @@ flat@^5.0.2: integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== flatted@^3.1.0: - version "3.2.5" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" - integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== follow-redirects@^1.12.1: - version "1.15.1" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" - integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== for-each@^0.3.3: version "0.3.3" @@ -2509,6 +2558,11 @@ forever-agent@~0.6.1: resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== +form-data-encoder@1.7.1: + version "1.7.1" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.1.tgz#ac80660e4f87ee0d3d3c3638b7da8278ddb8ec96" + integrity sha512-EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg== + form-data@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" @@ -2527,6 +2581,13 @@ form-data@~2.3.2: combined-stream "^1.0.6" mime-types "^2.1.12" +formdata-polyfill@^4.0.10: + version "4.0.10" + resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz#24807c31c9d402e002ab3d8c720144ceb8848423" + integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g== + dependencies: + fetch-blob "^3.1.2" + forwarded@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" @@ -2578,11 +2639,6 @@ function.prototype.name@^1.1.5: es-abstract "^1.19.0" functions-have-names "^1.2.2" -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== - functions-have-names@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" @@ -2603,27 +2659,15 @@ get-func-name@^2.0.0: resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" integrity sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598" - integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA== +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== dependencies: function-bind "^1.1.1" has "^1.0.3" has-symbols "^1.0.3" -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== - -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - get-stream@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" @@ -2631,6 +2675,11 @@ get-stream@^5.1.0: dependencies: pump "^3.0.0" +get-stream@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + get-symbol-description@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" @@ -2698,9 +2747,9 @@ globals@^11.1.0: integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.15.0: - version "13.15.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" - integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== dependencies: type-fest "^0.20.2" @@ -2716,48 +2765,52 @@ globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" -got@9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - -got@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a" - integrity sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw== - dependencies: - decompress-response "^3.2.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-plain-obj "^1.1.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - p-cancelable "^0.3.0" - p-timeout "^1.1.1" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - url-parse-lax "^1.0.0" - url-to-options "^1.0.1" +got@12.1.0: + version "12.1.0" + resolved "https://registry.yarnpkg.com/got/-/got-12.1.0.tgz#099f3815305c682be4fd6b0ee0726d8e4c6b0af4" + integrity sha512-hBv2ty9QN2RdbJJMK3hesmSkFTjVIHyIDDbssCKnSmq62edGgImJWD10Eb1k77TiV1bxloxqcFAVK8+9pkhOig== + dependencies: + "@sindresorhus/is" "^4.6.0" + "@szmarczak/http-timer" "^5.0.1" + "@types/cacheable-request" "^6.0.2" + "@types/responselike" "^1.0.0" + cacheable-lookup "^6.0.4" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + form-data-encoder "1.7.1" + get-stream "^6.0.1" + http2-wrapper "^2.1.10" + lowercase-keys "^3.0.0" + p-cancelable "^3.0.0" + responselike "^2.0.0" + +got@^11.8.5: + version "11.8.5" + resolved "https://registry.yarnpkg.com/got/-/got-11.8.5.tgz#ce77d045136de56e8f024bebb82ea349bc730046" + integrity sha512-o0Je4NvQObAuZPHLFoRSkdG2lTgtcynqymzg2Vupdx6PorhaT5MCbIyXG6d4D94kk8ZG57QeosgdiqfJWhEhlQ== + dependencies: + "@sindresorhus/is" "^4.0.0" + "@szmarczak/http-timer" "^4.0.5" + "@types/cacheable-request" "^6.0.1" + "@types/responselike" "^1.0.0" + cacheable-lookup "^5.0.3" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + http2-wrapper "^1.0.0-beta.5.2" + lowercase-keys "^2.0.0" + p-cancelable "^2.0.0" + responselike "^2.0.0" graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.2.10" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + handlebars@^4.7.7: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" @@ -2805,23 +2858,11 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" -has-symbol-support-x@^1.4.1: - version "1.4.2" - resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" - integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== - -has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: +has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has-to-string-tag-x@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" - integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== - dependencies: - has-symbol-support-x "^1.4.1" - has-tostringtag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" @@ -2897,6 +2938,22 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" +http2-wrapper@^1.0.0-beta.5.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" + integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.0.0" + +http2-wrapper@^2.1.10: + version "2.1.11" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-2.1.11.tgz#d7c980c7ffb85be3859b6a96c800b2951ae257ef" + integrity sha512-aNAk5JzLturWEUiuhAN73Jcbq96R7rTitAoXV54FYMatvihnpD2+6PUgU4ce3D/m5VDbw+F5CsyKSF176ptitQ== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.2.0" + iconv-lite@0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" @@ -2911,7 +2968,7 @@ idna-uts46-hx@^2.3.1: dependencies: punycode "2.1.0" -ieee754@^1.1.13, ieee754@^1.2.1: +ieee754@^1.1.13: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -2956,11 +3013,6 @@ internal-slot@^1.0.3: has "^1.0.3" side-channel "^1.0.4" -ip-regex@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-4.3.0.tgz#687275ab0f57fa76978ff8f4dddc8a23d5990db5" - integrity sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q== - ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" @@ -2996,10 +3048,10 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" - integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== is-date-object@^1.0.1: version "1.0.5" @@ -3059,16 +3111,6 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" - integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== - -is-plain-obj@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== - is-plain-obj@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" @@ -3089,11 +3131,6 @@ is-regex@^1.1.4: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-retry-allowed@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" - integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== - is-shared-array-buffer@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" @@ -3101,11 +3138,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== - is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" @@ -3163,13 +3195,10 @@ isstream@~0.1.2: resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== -isurl@^1.0.0-alpha5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" - integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== - dependencies: - has-to-string-tag-x "^1.2.0" - is-object "^1.0.1" +js-sdsl@^4.1.4: + version "4.1.5" + resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" + integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== js-sha3@0.8.0, js-sha3@^0.8.0: version "0.8.0" @@ -3203,10 +3232,10 @@ jsesc@^2.5.1: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== json-schema-traverse@^0.4.1: version "0.4.1" @@ -3259,12 +3288,12 @@ keccak@^3.0.0: node-gyp-build "^4.2.0" readable-stream "^3.6.0" -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== +keyv@*, keyv@^4.0.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.0.tgz#dbce9ade79610b6e641a9a65f2f6499ba06b9bc6" + integrity sha512-2YvuMsA+jnFGtBareKqgANOEKe1mk3HKiXu2fRmAfyxG0MJAywNhi5ttWA3PMjl4NmpyjZNbFifR2vNjW1znfA== dependencies: - json-buffer "3.0.0" + json-buffer "3.0.1" kind-of@^6.0.2: version "6.0.3" @@ -3319,16 +3348,16 @@ loupe@^2.3.1: dependencies: get-func-name "^2.0.0" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - lowercase-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lowercase-keys@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" + integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -3416,7 +3445,7 @@ mime@1.6.0: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mimic-response@^1.0.0, mimic-response@^1.0.1: +mimic-response@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== @@ -3458,9 +3487,9 @@ minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: brace-expansion "^1.1.7" minimist@^1.2.5, minimist@^1.2.6: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" @@ -3496,12 +3525,11 @@ mkdirp@^0.5.5: dependencies: minimist "^1.2.6" -mocha@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.0.0.tgz#205447d8993ec755335c4b13deba3d3a13c4def9" - integrity sha512-0Wl+elVUD43Y0BqPZBzZt8Tnkw9CMUdNYnUsTfOM1vuhJVZL+kiesFYsqwBkEEuEixaiPe5ZQdqDgX2jddhmoA== +mocha@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.1.0.tgz#dbf1114b7c3f9d0ca5de3133906aea3dfc89ef7a" + integrity sha512-vUF7IYxEoN7XhQpFLxQAEMtE4W91acW4B6En9l97MwE9stL1A9gusXfoHZCLVHDUJ/7V5+lbCM6yMqzo5vNymg== dependencies: - "@ungap/promise-all-settled" "1.1.2" ansi-colors "4.1.1" browser-stdout "1.3.1" chokidar "3.5.3" @@ -3619,10 +3647,10 @@ next-tick@^1.1.0: resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== -nock@^13.2.6: - version "13.2.6" - resolved "https://registry.yarnpkg.com/nock/-/nock-13.2.6.tgz#35e419cd9d385ffa67e59523d9699e41b29e1a03" - integrity sha512-GbyeSwSEP0FYouzETZ0l/XNm5tNcDNcXJKw3LCAb+mx8bZSwg1wEEvdL0FAyg5TkBJYiWSCtw6ag4XfmBy60FA== +nock@^13.2.9: + version "13.2.9" + resolved "https://registry.yarnpkg.com/nock/-/nock-13.2.9.tgz#4faf6c28175d36044da4cfa68e33e5a15086ad4c" + integrity sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA== dependencies: debug "^4.1.0" json-stringify-safe "^5.0.1" @@ -3634,32 +3662,46 @@ node-addon-api@^2.0.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== -node-fetch@^2.6.7: +node-domexception@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + +node-fetch@2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" +node-fetch@^3.2.10: + version "3.2.10" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8" + integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA== + dependencies: + data-uri-to-buffer "^4.0.0" + fetch-blob "^3.1.4" + formdata-polyfill "^4.0.10" + node-gyp-build@^4.2.0, node-gyp-build@^4.3.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.4.0.tgz#42e99687ce87ddeaf3a10b99dc06abc11021f3f4" - integrity sha512-amJnQCcgtRVw9SvoebO3BKGESClrfXGCUTX9hSn1OuGQTQBOZmVd0Z0OlecpuRksKvbsUqALE8jls/ErClAPuQ== + version "4.5.0" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" + integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg== -node-releases@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" - integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== number-to-bn@1.7.0: version "1.7.0" @@ -3679,7 +3721,7 @@ object-assign@^4, object-assign@^4.1.0, object-assign@^4.1.1: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== -object-inspect@^1.12.0, object-inspect@^1.9.0: +object-inspect@^1.12.2, object-inspect@^1.9.0: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== @@ -3689,14 +3731,14 @@ object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object.assign@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" - integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== +object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - has-symbols "^1.0.1" + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" object-keys "^1.1.1" oboe@2.1.5: @@ -3737,20 +3779,15 @@ os-tmpdir@~1.0.2: resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== -p-cancelable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" - integrity sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw== - -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== +p-cancelable@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" + integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== +p-cancelable@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050" + integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw== p-limit@^2.0.0: version "2.3.0" @@ -3780,13 +3817,6 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -p-timeout@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386" - integrity sha512-gb0ryzr+K2qFqFv6qi3khoeqMZF/+ajxQipEF6NteZVnvz9tzdsfAVj3lYtn1gAXvH5lfLwfxEII799gt/mRIA== - dependencies: - p-finally "^1.0.0" - p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -3908,16 +3938,6 @@ prelude-ls@^1.2.1: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg== - -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== - process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" @@ -3937,9 +3957,9 @@ proxy-addr@~2.0.7: ipaddr.js "1.9.1" psl@^1.1.28: - version "1.8.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== public-encrypt@^4.0.0: version "4.0.3" @@ -3971,10 +3991,10 @@ punycode@^2.1.0, punycode@^2.1.1: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== +qs@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== dependencies: side-channel "^1.0.4" @@ -3997,6 +4017,16 @@ queue-microtask@^1.2.2: resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +rambda@^7.1.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/rambda/-/rambda-7.3.0.tgz#90e440ead53030a216093865d8d97997a80868ca" + integrity sha512-RFVofZYaG2TaVcxjnM0ejdVWf/59rFq1f57OGnjP3GT/bthzFw0GVr5rkP9PKbVlEuF/Y7bOVPLfiiYfxq/EWQ== + randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" @@ -4044,9 +4074,9 @@ readdirp@~3.6.0: picomatch "^2.2.1" regenerator-runtime@^0.13.4: - version "0.13.9" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + version "0.13.10" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz#ed07b19616bcbec5da6274ebc75ae95634bfc2ee" + integrity sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw== regexp.prototype.flags@^1.4.3: version "1.4.3" @@ -4093,17 +4123,22 @@ require-directory@^2.1.1: resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== +resolve-alpn@^1.0.0, resolve-alpn@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" + integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== + resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== +responselike@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" + integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== dependencies: - lowercase-keys "^1.0.0" + lowercase-keys "^2.0.0" reusify@^1.0.4: version "1.0.4" @@ -4139,10 +4174,10 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^7.5.5: - version "7.5.5" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.5.tgz#2ebad89af0f560f460ad5cc4213219e1f7dd4e9f" - integrity sha512-sy+H0pQofO95VDmFLzyaw9xNJU4KTRSwQIGM6+iG3SypAtCiLDzpeG8sJrNCWn2Up9km+KhkvTdbkrdy+yzZdw== +rxjs@^7.5.7: + version "7.5.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.7.tgz#2ec0d57fdc89ece220d2e702730ae8f1e49def39" + integrity sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA== dependencies: tslib "^2.1.0" @@ -4151,11 +4186,20 @@ safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, s resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== -safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@~5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" @@ -4186,9 +4230,9 @@ semver@^6.3.0: integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== dependencies: lru-cache "^6.0.0" @@ -4290,12 +4334,12 @@ simple-concat@^1.0.0: resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== -simple-get@^2.7.0, simple-get@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" - integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== +simple-get@^2.7.0: + version "2.8.2" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-2.8.2.tgz#5708fb0919d440657326cd5fe7d2599d07705019" + integrity sha512-Ijd/rV5o+mSBBs4F/x9oDPtTx9Zb6X9brmnXvMW4J7IR15ngi9q5xxqWBKU744jTZiaXtxaPL7uHG6vtN8kUkw== dependencies: - decompress-response "^6.0.0" + decompress-response "^3.3.0" once "^1.3.1" simple-concat "^1.0.0" @@ -4304,10 +4348,10 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -solc@0.8.14-fixed: - version "0.8.14-fixed" - resolved "https://registry.yarnpkg.com/solc/-/solc-0.8.14-fixed.tgz#a730a1e8259ac06313f6b7287df046ebe1dddc13" - integrity sha512-jFYa2fKbk95olckuDbhs9kbtaUhLRllM7aC++mLinJBUcdHbaHVM8LxHaJpOIDdnHBV9TpIP4XBybVugqMDyhA== +solc@0.8.17: + version "0.8.17" + resolved "https://registry.yarnpkg.com/solc/-/solc-0.8.17.tgz#c748fec6a64bf029ec406aa9b37e75938d1115ae" + integrity sha512-Dtidk2XtTTmkB3IKdyeg6wLYopJnBVxdoykN8oP8VY3PQjN16BScYoUJTXFm2OP7P0hXNAqWiJNmmfuELtLf8g== dependencies: command-exists "^1.2.8" commander "^8.1.0" @@ -4430,15 +4474,15 @@ supports-color@^7.1.0: has-flag "^4.0.0" swarm-js@^0.1.40: - version "0.1.40" - resolved "https://registry.yarnpkg.com/swarm-js/-/swarm-js-0.1.40.tgz#b1bc7b6dcc76061f6c772203e004c11997e06b99" - integrity sha512-yqiOCEoA4/IShXkY3WKwP5PvZhmoOOD8clsKA7EEcRILMkTEYHCQ21HDCAcVpmIxZq4LyZvWeRJ6quIyHk1caA== + version "0.1.42" + resolved "https://registry.yarnpkg.com/swarm-js/-/swarm-js-0.1.42.tgz#497995c62df6696f6e22372f457120e43e727979" + integrity sha512-BV7c/dVlA3R6ya1lMlSSNPLYrntt0LUq4YMgy3iwpCIc6rZnS5W2wUoctarZ5pXlpKtxDDf9hNziEkcfrxdhqQ== dependencies: bluebird "^3.5.0" buffer "^5.0.5" eth-lib "^0.1.26" fs-extra "^4.0.2" - got "^7.1.0" + got "^11.8.5" mime-types "^2.1.16" mkdirp-promise "^5.0.1" mock-fs "^4.1.0" @@ -4462,12 +4506,12 @@ tar@^4.0.2: text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== -timed-out@^4.0.0, timed-out@^4.0.1: +timed-out@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" - integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= + integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA== tmp@0.0.33: version "0.0.33" @@ -4479,12 +4523,7 @@ tmp@0.0.33: to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= - -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-regex-range@^5.0.1: version "5.0.1" @@ -4509,12 +4548,12 @@ tough-cookie@~2.5.0: tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== -ts-node@^10.8.0: - version "10.8.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.8.1.tgz#ea2bd3459011b52699d7e88daa55a45a1af4f066" - integrity sha512-Wwsnao4DQoJsN034wePSg5nZiw4YKXf56mPIAeD6wVmiv+RytNSWqc2f3fKvcUoV+Yn2+yocD71VOfQHbmVX4g== +ts-node@^10.9.1: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== dependencies: "@cspotcode/source-map-support" "^0.8.0" "@tsconfig/node10" "^1.0.7" @@ -4550,7 +4589,7 @@ tsutils@^3.21.0: tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== dependencies: safe-buffer "^5.0.1" @@ -4562,7 +4601,7 @@ tweetnacl@1.x.x, tweetnacl@^1.0.3: tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" @@ -4594,10 +4633,10 @@ type@^1.0.1: resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== -type@^2.5.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/type/-/type-2.6.0.tgz#3ca6099af5981d36ca86b78442973694278a219f" - integrity sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ== +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== typedarray-to-buffer@^3.1.5: version "3.1.5" @@ -4606,15 +4645,15 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@^4.7.2: - version "4.7.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.3.tgz#8364b502d5257b540f9de4c40be84c98e23a129d" - integrity sha512-WOkT3XYvrpXx4vMMqlD+8R8R37fZkjyLGlxavMc4iB8lrl8L0DeTcHbYgw/v0N/z9wAFsgBhcsF0ruoySS22mA== +typescript@^4.8.4: + version "4.8.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" + integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== uglify-js@^3.1.4: - version "3.16.0" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.16.0.tgz#b778ba0831ca102c1d8ecbdec2d2bdfcc7353190" - integrity sha512-FEikl6bR30n0T3amyBh3LoiBdqHRy/f4H80+My34HOesOKyHfOsxAPAxOoqC0JUnC1amnO0IwkYC3sko51caSw== + version "3.17.3" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.3.tgz#f0feedf019c4510f164099e8d7e72ff2d7304377" + integrity sha512-JmMFDME3iufZnBpyKL+uS78LRiC+mK55zWfM5f/pWBJfpOttXAqYfdDGRukYhJuyRinvPVAtUhvy7rlDybNtFg== ultron@~1.1.0: version "1.1.1" @@ -4639,7 +4678,15 @@ universalify@^0.1.0: unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +update-browserslist-db@^1.0.9: + version "1.0.10" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" uri-js@^4.2.2: version "4.4.1" @@ -4648,34 +4695,15 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - integrity sha1-evjzA2Rem9eaJy56FKxovAYJ2nM= - dependencies: - prepend-http "^1.0.1" - -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" - url-set-query@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/url-set-query/-/url-set-query-1.0.0.tgz#016e8cfd7c20ee05cafe7795e892bd0702faa339" - integrity sha1-AW6M/Xwg7gXK/neV6JK9BwL6ozk= - -url-to-options@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" - integrity sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k= + integrity sha512-3AChu4NiXquPfeckE5R5cGdiHCMWJx1dwCWOmWIL4KHAziJNOFIYJlpGFeKDvwLPHovZRCxK3cYlwzqI9Vp+Gg== utf-8-validate@^5.0.2: - version "5.0.9" - resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.9.tgz#ba16a822fbeedff1a58918f2a6a6b36387493ea3" - integrity sha512-Yek7dAy0v3Kl0orwMlvi7TPtiCNrdfHNd7Gcc/pLq4BLXqfAmd0J7OWMizUQnTTJsyjKn02mU7anqwfmUP4J8Q== + version "5.0.10" + resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.10.tgz#d7d10ea39318171ca982718b6b96a8d2442571a2" + integrity sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ== dependencies: node-gyp-build "^4.3.0" @@ -4687,24 +4715,23 @@ utf8@3.0.0: util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== util@^0.12.0: - version "0.12.4" - resolved "https://registry.yarnpkg.com/util/-/util-0.12.4.tgz#66121a31420df8f01ca0c464be15dfa1d1850253" - integrity sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw== + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== dependencies: inherits "^2.0.3" is-arguments "^1.0.4" is-generator-function "^1.0.7" is-typed-array "^1.1.3" - safe-buffer "^5.1.2" which-typed-array "^1.1.2" utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== uuid@3.3.2: version "3.3.2" @@ -4721,11 +4748,6 @@ v8-compile-cache-lib@^3.0.1: resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== - varint@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.2.tgz#5b47f8a947eb668b848e034dcfa87d0ff8a7f7a4" @@ -4734,96 +4756,101 @@ varint@^5.0.0: vary@^1, vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" -web3-bzz@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.7.3.tgz#6860a584f748838af5e3932b6798e024ab8ae951" - integrity sha512-y2i2IW0MfSqFc1JBhBSQ59Ts9xE30hhxSmLS13jLKWzie24/An5dnoGarp2rFAy20tevJu1zJVPYrEl14jiL5w== +web-streams-polyfill@^3.0.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + +web3-bzz@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.8.0.tgz#2023676d7c17ea36512bf76eb310755a02a3d464" + integrity sha512-caDtdKeLi7+2Vb+y+cq2yyhkNjnxkFzVW0j1DtemarBg3dycG1iEl75CVQMLNO6Wkg+HH9tZtRnUyFIe5LIUeQ== dependencies: "@types/node" "^12.12.6" - got "9.6.0" + got "12.1.0" swarm-js "^0.1.40" -web3-core-helpers@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.7.3.tgz#9a8d7830737d0e9c48694b244f4ce0f769ba67b9" - integrity sha512-qS2t6UKLhRV/6C7OFHtMeoHphkcA+CKUr2vfpxy4hubs3+Nj28K9pgiqFuvZiXmtEEwIAE2A28GBOC3RdcSuFg== +web3-core-helpers@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.8.0.tgz#5dcfdda1a4ea277041d912003198f1334ca29d7c" + integrity sha512-nMAVwZB3rEp/khHI2BvFy0e/xCryf501p5NGjswmJtEM+Zrd3Biaw52JrB1qAZZIzCA8cmLKaOgdfamoDOpWdw== dependencies: - web3-eth-iban "1.7.3" - web3-utils "1.7.3" + web3-eth-iban "1.8.0" + web3-utils "1.8.0" -web3-core-method@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.7.3.tgz#eb2a4f140448445c939518c0fa6216b3d265c5e9" - integrity sha512-SeF8YL/NVFbj/ddwLhJeS0io8y7wXaPYA2AVT0h2C2ESYkpvOtQmyw2Bc3aXxBmBErKcbOJjE2ABOKdUmLSmMA== +web3-core-method@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.8.0.tgz#9c2da8896808917d1679c319f19e2174ba17086c" + integrity sha512-c94RAzo3gpXwf2rf8rL8C77jOzNWF4mXUoUfZYYsiY35cJFd46jQDPI00CB5+ZbICTiA5mlVzMj4e7jAsTqiLA== dependencies: - "@ethersproject/transactions" "^5.0.0-beta.135" - web3-core-helpers "1.7.3" - web3-core-promievent "1.7.3" - web3-core-subscriptions "1.7.3" - web3-utils "1.7.3" + "@ethersproject/transactions" "^5.6.2" + web3-core-helpers "1.8.0" + web3-core-promievent "1.8.0" + web3-core-subscriptions "1.8.0" + web3-utils "1.8.0" -web3-core-promievent@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.7.3.tgz#2d0eeef694569b61355054c721578f67df925b80" - integrity sha512-+mcfNJLP8h2JqcL/UdMGdRVfTdm+bsoLzAFtLpazE4u9kU7yJUgMMAqnK59fKD3Zpke3DjaUJKwz1TyiGM5wig== +web3-core-promievent@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.8.0.tgz#979765fd4d37ab0f158f0ee54037b279b737bd53" + integrity sha512-FGLyjAuOaAQ+ZhV6iuw9tg/9WvIkSZXKHQ4mdTyQ8MxVraOtFivOCbuLLsGgapfHYX+RPxsc1j1YzQjKoupagQ== dependencies: eventemitter3 "4.0.4" -web3-core-requestmanager@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.7.3.tgz#226f79d16e546c9157d00908de215e984cae84e9" - integrity sha512-bC+jeOjPbagZi2IuL1J5d44f3zfPcgX+GWYUpE9vicNkPUxFBWRG+olhMo7L+BIcD57cTmukDlnz+1xBULAjFg== +web3-core-requestmanager@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.8.0.tgz#06189df80cf52d24a195a7ef655031afe8192df3" + integrity sha512-2AoYCs3Owl5foWcf4uKPONyqFygSl9T54L8b581U16nsUirjhoTUGK/PBhMDVcLCmW4QQmcY5A8oPFpkQc1TTg== dependencies: util "^0.12.0" - web3-core-helpers "1.7.3" - web3-providers-http "1.7.3" - web3-providers-ipc "1.7.3" - web3-providers-ws "1.7.3" + web3-core-helpers "1.8.0" + web3-providers-http "1.8.0" + web3-providers-ipc "1.8.0" + web3-providers-ws "1.8.0" -web3-core-subscriptions@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.7.3.tgz#ca456dfe2c219a0696c5cf34c13b03c3599ec5d5" - integrity sha512-/i1ZCLW3SDxEs5mu7HW8KL4Vq7x4/fDXY+yf/vPoDljlpvcLEOnI8y9r7om+0kYwvuTlM6DUHHafvW0221TyRQ== +web3-core-subscriptions@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.8.0.tgz#ff66ae4467c8cb4716367248bcefb1845c0f8b83" + integrity sha512-7lHVRzDdg0+Gcog55lG6Q3D8JV+jN+4Ly6F8cSn9xFUAwOkdbgdWsjknQG7t7CDWy21DQkvdiY2BJF8S68AqOA== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" -web3-core@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.7.3.tgz#2ef25c4cc023997f43af9f31a03b571729ff3cda" - integrity sha512-4RNxueGyevD1XSjdHE57vz/YWRHybpcd3wfQS33fgMyHZBVLFDNwhn+4dX4BeofVlK/9/cmPAokLfBUStZMLdw== +web3-core@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.8.0.tgz#90afce527ac1b1dff8cbed2acbc0336530b8aacf" + integrity sha512-9sCA+Z02ci6zoY2bAquFiDjujRwmSKHiSGi4B8IstML8okSytnzXk1izHYSynE7ahIkguhjWAuXFvX76F5rAbA== dependencies: - "@types/bn.js" "^4.11.5" + "@types/bn.js" "^5.1.0" "@types/node" "^12.12.6" bignumber.js "^9.0.0" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-requestmanager "1.7.3" - web3-utils "1.7.3" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-requestmanager "1.8.0" + web3-utils "1.8.0" -web3-eth-abi@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.7.3.tgz#2a1123c7252c37100eecd0b1fb2fb2c51366071f" - integrity sha512-ZlD8DrJro0ocnbZViZpAoMX44x5aYAb73u2tMq557rMmpiluZNnhcCYF/NnVMy6UIkn7SF/qEA45GXA1ne6Tnw== +web3-eth-abi@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.8.0.tgz#47fdff00bfdfa72064c9c612ff6369986598196d" + integrity sha512-xPeMb2hS9YLQK/Q5YZpkcmzoRGM+/R8bogSrYHhNC3hjZSSU0YRH+1ZKK0f9YF4qDZaPMI8tKWIMSCDIpjG6fg== dependencies: - "@ethersproject/abi" "5.0.7" - web3-utils "1.7.3" + "@ethersproject/abi" "^5.6.3" + web3-utils "1.8.0" -web3-eth-accounts@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.7.3.tgz#cd1789000f13ed3c438e96b3e80ee7be8d3f1a9b" - integrity sha512-aDaWjW1oJeh0LeSGRVyEBiTe/UD2/cMY4dD6pQYa8dOhwgMtNQjxIQ7kacBBXe7ZKhjbIFZDhvXN4mjXZ82R2Q== +web3-eth-accounts@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.8.0.tgz#960d947ee87a49d6c706dc6312334fbfbd6ff812" + integrity sha512-HQ/MDSv4bexwJLvnqsM6xpGE7c2NVOqyhzOZFyMUKXbIwIq85T3TaLnM9pCN7XqMpDcfxqiZ3q43JqQVkzHdmw== dependencies: "@ethereumjs/common" "^2.5.0" "@ethereumjs/tx" "^3.3.2" @@ -4832,127 +4859,129 @@ web3-eth-accounts@1.7.3: ethereumjs-util "^7.0.10" scrypt-js "^3.0.1" uuid "3.3.2" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-utils "1.7.3" - -web3-eth-contract@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.7.3.tgz#c4efc118ed7adafbc1270b633f33e696a39c7fc7" - integrity sha512-7mjkLxCNMWlQrlfM/MmNnlKRHwFk5XrZcbndoMt3KejcqDP6dPHi2PZLutEcw07n/Sk8OMpSamyF3QiGfmyRxw== - dependencies: - "@types/bn.js" "^4.11.5" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-promievent "1.7.3" - web3-core-subscriptions "1.7.3" - web3-eth-abi "1.7.3" - web3-utils "1.7.3" - -web3-eth-ens@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.7.3.tgz#ebc56a4dc7007f4f899259bbae1237d3095e2f3f" - integrity sha512-q7+hFGHIc0mBI3LwgRVcLCQmp6GItsWgUtEZ5bjwdjOnJdbjYddm7PO9RDcTDQ6LIr7hqYaY4WTRnDHZ6BEt5Q== + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-utils "1.8.0" + +web3-eth-contract@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.8.0.tgz#58f4ce0bde74e5ce87663502e409a92abad7b2c5" + integrity sha512-6xeXhW2YoCrz2Ayf2Vm4srWiMOB6LawkvxWJDnUWJ8SMATg4Pgu42C/j8rz/enXbYWt2IKuj0kk8+QszxQbK+Q== + dependencies: + "@types/bn.js" "^5.1.0" + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-promievent "1.8.0" + web3-core-subscriptions "1.8.0" + web3-eth-abi "1.8.0" + web3-utils "1.8.0" + +web3-eth-ens@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.8.0.tgz#f1937371eac54b087ebe2e871780c2710d39998d" + integrity sha512-/eFbQEwvsMOEiOhw9/iuRXCsPkqAmHHWuFOrThQkozRgcnSTRnvxkkRC/b6koiT5/HaKeUs4yQDg+/ixsIxZxA== dependencies: content-hash "^2.5.2" eth-ens-namehash "2.0.8" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-promievent "1.7.3" - web3-eth-abi "1.7.3" - web3-eth-contract "1.7.3" - web3-utils "1.7.3" - -web3-eth-iban@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.7.3.tgz#47433a73380322bba04e17b91fccd4a0e63a390a" - integrity sha512-1GPVWgajwhh7g53mmYDD1YxcftQniIixMiRfOqlnA1w0mFGrTbCoPeVaSQ3XtSf+rYehNJIZAUeDBnONVjXXmg== + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-promievent "1.8.0" + web3-eth-abi "1.8.0" + web3-eth-contract "1.8.0" + web3-utils "1.8.0" + +web3-eth-iban@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.8.0.tgz#3af8a0c95b5f7b0b81ab0bcd2075c1e5dda31520" + integrity sha512-4RbvUxcMpo/e5811sE3a6inJ2H4+FFqUVmlRYs0RaXaxiHweahSRBNcpO0UWgmlePTolj0rXqPT2oEr0DuC8kg== dependencies: - bn.js "^4.11.9" - web3-utils "1.7.3" + bn.js "^5.2.1" + web3-utils "1.8.0" -web3-eth-personal@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.7.3.tgz#ca2464dca356d4335aa8141cf75a6947f10f45a6" - integrity sha512-iTLz2OYzEsJj2qGE4iXC1Gw+KZN924fTAl0ESBFs2VmRhvVaM7GFqZz/wx7/XESl3GVxGxlRje3gNK0oGIoYYQ== +web3-eth-personal@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.8.0.tgz#433c35e2e042844402a12d543c4126ea1494b478" + integrity sha512-L7FT4nR3HmsfZyIAhFpEctKkYGOjRC2h6iFKs9gnFCHZga8yLcYcGaYOBIoYtaKom99MuGBoosayWt/Twh7F5A== dependencies: "@types/node" "^12.12.6" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-net "1.7.3" - web3-utils "1.7.3" - -web3-eth@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.7.3.tgz#9e92785ea18d682548b6044551abe7f2918fc0b5" - integrity sha512-BCIRMPwaMlTCbswXyGT6jj9chCh9RirbDFkPtvqozfQ73HGW7kP78TXXf9+Xdo1GjutQfxi/fQ9yPdxtDJEpDA== - dependencies: - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-subscriptions "1.7.3" - web3-eth-abi "1.7.3" - web3-eth-accounts "1.7.3" - web3-eth-contract "1.7.3" - web3-eth-ens "1.7.3" - web3-eth-iban "1.7.3" - web3-eth-personal "1.7.3" - web3-net "1.7.3" - web3-utils "1.7.3" - -web3-net@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.7.3.tgz#54e35bcc829fdc40cf5001a3870b885d95069810" - integrity sha512-zAByK0Qrr71k9XW0Adtn+EOuhS9bt77vhBO6epAeQ2/VKl8rCGLAwrl3GbeEl7kWa8s/su72cjI5OetG7cYR0g== - dependencies: - web3-core "1.7.3" - web3-core-method "1.7.3" - web3-utils "1.7.3" - -web3-providers-http@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.7.3.tgz#8ea5e39f6ceee0b5bc4e45403fae75cad8ff4cf7" - integrity sha512-TQJfMsDQ5Uq9zGMYlu7azx1L7EvxW+Llks3MaWn3cazzr5tnrDbGh6V17x6LN4t8tFDHWx0rYKr3mDPqyTjOZw== - dependencies: - web3-core-helpers "1.7.3" - xhr2-cookies "1.1.0" - -web3-providers-ipc@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.7.3.tgz#a34872103a8d37a03795fa2f9b259e869287dcaa" - integrity sha512-Z4EGdLKzz6I1Bw+VcSyqVN4EJiT2uAro48Am1eRvxUi4vktGoZtge1ixiyfrRIVb6nPe7KnTFl30eQBtMqS0zA== + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-net "1.8.0" + web3-utils "1.8.0" + +web3-eth@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.8.0.tgz#006974a5d5e30644d05814111f9e162a72e4a09c" + integrity sha512-hist52os3OT4TQFB/GxPSMxTh3995sz6LPvQpPvj7ktSbpg9RNSFaSsPlCT63wUAHA3PZb1FemkAIeQM5t72Lw== + dependencies: + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-subscriptions "1.8.0" + web3-eth-abi "1.8.0" + web3-eth-accounts "1.8.0" + web3-eth-contract "1.8.0" + web3-eth-ens "1.8.0" + web3-eth-iban "1.8.0" + web3-eth-personal "1.8.0" + web3-net "1.8.0" + web3-utils "1.8.0" + +web3-net@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.8.0.tgz#9acff92d7c647d801bc68df0ff4416f104dbe789" + integrity sha512-kX6EAacK7QrOe7DOh0t5yHS5q2kxZmTCxPVwSz9io9xBeE4n4UhmzGJ/VfhP2eM3OPKYeypcR3LEO6zZ8xn2vw== + dependencies: + web3-core "1.8.0" + web3-core-method "1.8.0" + web3-utils "1.8.0" + +web3-providers-http@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.8.0.tgz#3fd1e569ead2095343fac17d53160a3bae674c23" + integrity sha512-/MqxwRzExohBWW97mqlCSW/+NHydGRyoEDUS1bAIF2YjfKFwyRtHgrEzOojzkC9JvB+8LofMvbXk9CcltpZapw== + dependencies: + abortcontroller-polyfill "^1.7.3" + cross-fetch "^3.1.4" + es6-promise "^4.2.8" + web3-core-helpers "1.8.0" + +web3-providers-ipc@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.8.0.tgz#d339a24c4d764e459e425d3ac868a551ac33e3ea" + integrity sha512-tAXHtVXNUOgehaBU8pzAlB3qhjn/PRpjdzEjzHNFqtRRTwzSEKOJxFeEhaUA4FzHnTlbnrs8ujHWUitcp1elfg== dependencies: oboe "2.1.5" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" -web3-providers-ws@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.7.3.tgz#87564facc47387c9004a043a6686e4881ed6acfe" - integrity sha512-PpykGbkkkKtxPgv7U4ny4UhnkqSZDfLgBEvFTXuXLAngbX/qdgfYkhIuz3MiGplfL7Yh93SQw3xDjImXmn2Rgw== +web3-providers-ws@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.8.0.tgz#a0a73e0606981ea32bed40d215000a64753899de" + integrity sha512-bcZtSifsqyJxwkfQYamfdIRp4nhj9eJd7cxHg1uUkfLJK125WP96wyJL1xbPt7qt0MpfnTFn8/UuIqIB6nFENg== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" websocket "^1.0.32" -web3-shh@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.7.3.tgz#84e10adf628556798244b58f73cda1447bb7075e" - integrity sha512-bQTSKkyG7GkuULdZInJ0osHjnmkHij9tAySibpev1XjYdjLiQnd0J9YGF4HjvxoG3glNROpuCyTaRLrsLwaZuw== +web3-shh@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.8.0.tgz#b4abbf4f59d097ce2f74360e61e2e5c0bd6507c7" + integrity sha512-DNRgSa9Jf9xYFUGKSMylrf+zt3MPjhI2qF+UWX07o0y3+uf8zalDGiJOWvIS4upAsdPiKKVJ7co+Neof47OMmg== dependencies: - web3-core "1.7.3" - web3-core-method "1.7.3" - web3-core-subscriptions "1.7.3" - web3-net "1.7.3" + web3-core "1.8.0" + web3-core-method "1.8.0" + web3-core-subscriptions "1.8.0" + web3-net "1.8.0" -web3-utils@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.7.3.tgz#b214d05f124530d8694ad364509ac454d05f207c" - integrity sha512-g6nQgvb/bUpVUIxJE+ezVN+rYwYmlFyMvMIRSuqpi1dk6ApDD00YNArrk7sPcZnjvxOJ76813Xs2vIN2rgh4lg== +web3-utils@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.8.0.tgz#0a506f8c6af9a2ad6ba79689892662769534fc03" + integrity sha512-7nUIl7UWpLVka2f09CMbKOSEvorvHnaugIabU4mj7zfMvm0tSByLcEu3eyV9qgS11qxxLuOkzBIwCstTflhmpQ== dependencies: - bn.js "^4.11.9" + bn.js "^5.2.1" ethereum-bloom-filters "^1.0.6" ethereumjs-util "^7.1.0" ethjs-unit "0.1.6" @@ -4960,23 +4989,23 @@ web3-utils@1.7.3: randombytes "^2.1.0" utf8 "3.0.0" -web3@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3/-/web3-1.7.3.tgz#30fe786338b2cc775881cb28c056ee5da4be65b8" - integrity sha512-UgBvQnKIXncGYzsiGacaiHtm0xzQ/JtGqcSO/ddzQHYxnNuwI72j1Pb4gskztLYihizV9qPNQYHMSCiBlStI9A== +web3@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3/-/web3-1.8.0.tgz#3ca5f0b32de6a1f626407740411219035b5fde64" + integrity sha512-sldr9stK/SALSJTgI/8qpnDuBJNMGjVR84hJ+AcdQ+MLBGLMGsCDNubCoyO6qgk1/Y9SQ7ignegOI/7BPLoiDA== dependencies: - web3-bzz "1.7.3" - web3-core "1.7.3" - web3-eth "1.7.3" - web3-eth-personal "1.7.3" - web3-net "1.7.3" - web3-shh "1.7.3" - web3-utils "1.7.3" + web3-bzz "1.8.0" + web3-core "1.8.0" + web3-eth "1.8.0" + web3-eth-personal "1.8.0" + web3-net "1.8.0" + web3-shh "1.8.0" + web3-utils "1.8.0" webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== websocket@^1.0.32, websocket@^1.0.34: version "1.0.34" @@ -4993,7 +5022,7 @@ websocket@^1.0.32, websocket@^1.0.34: whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" @@ -5036,7 +5065,7 @@ word-wrap@^1.2.3: wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== workerpool@6.2.1: version "6.2.1" @@ -5055,7 +5084,7 @@ wrap-ansi@^7.0.0: wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== ws@^3.0.0: version "3.3.3" @@ -5066,6 +5095,11 @@ ws@^3.0.0: safe-buffer "~5.1.0" ultron "~1.1.0" +ws@^8.8.1: + version "8.9.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" + integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== + xhr-request-promise@^0.1.2: version "0.1.3" resolved "https://registry.yarnpkg.com/xhr-request-promise/-/xhr-request-promise-0.1.3.tgz#2d5f4b16d8c6c893be97f1a62b0ed4cf3ca5f96c" @@ -5086,13 +5120,6 @@ xhr-request@^1.0.1, xhr-request@^1.1.0: url-set-query "^1.0.0" xhr "^2.0.4" -xhr2-cookies@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/xhr2-cookies/-/xhr2-cookies-1.1.0.tgz#7d77449d0999197f155cb73b23df72505ed89d48" - integrity sha1-fXdEnQmZGX8VXLc7I99yUF7YnUg= - dependencies: - cookiejar "^2.1.1" - xhr@^2.0.4, xhr@^2.3.3: version "2.6.0" resolved "https://registry.yarnpkg.com/xhr/-/xhr-2.6.0.tgz#b69d4395e792b4173d6b7df077f0fc5e4e2b249d" @@ -5116,7 +5143,7 @@ y18n@^5.0.5: yaeti@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/yaeti/-/yaeti-0.0.6.tgz#f26f484d72684cf42bedfb76970aa1608fbf9577" - integrity sha1-8m9ITXJoTPQr7ft2lwqhYI+/lXc= + integrity sha512-MvQa//+KcZCUkBTIC9blM+CU9J2GzuTytsOUwf2lidtvkx/6gnEp1QvJv34t9vdjhFmha/mUiNDbN0D0mJWdug== yallist@^3.0.0, yallist@^3.1.1: version "3.1.1" @@ -5139,9 +5166,9 @@ yargs-parser@^20.2.2: integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs-parser@^21.0.0: - version "21.0.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" - integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== yargs-unparser@2.0.0: version "2.0.0" @@ -5166,12 +5193,12 @@ yargs@16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -yargs@^17.5.1: - version "17.5.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" - integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== +yargs@^17.6.0: + version "17.6.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.0.tgz#e134900fc1f218bc230192bdec06a0a5f973e46c" + integrity sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g== dependencies: - cliui "^7.0.2" + cliui "^8.0.1" escalade "^3.1.1" get-caller-file "^2.0.5" require-directory "^2.1.1"