diff --git a/README.md b/README.md index 1c278e2..f886074 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ![workflow](https://user-images.githubusercontent.com/31323835/172648333-efd666c0-d8c3-48d8-b290-117c590c684c.png) -RGB proxy server is intended to facilitate the relay of consignment data +RGB proxy server is intended to facilitate the relay of client-side data between RGB wallets, enabling a better user experience for wallet users. The API it implements adheres to the @@ -10,21 +10,20 @@ The API it implements adheres to the The proxy server is designed to handle the following workflow: -- The payer of an RGB transfer posts to the proxy server the transfer - consignment file using as identifier of the file the blinded UTXO provided by - the payee in the invoice. -- The payee asks the proxy server for the consignment file associated to the - blinded UTXO previously provided in the invoice. -- If there is a file associated to such blinded UTXO, the server returns the - file to the payee. -- The payee validates the content of the consignment file. -- The payee posts to the server and ACK message if she is satisfied with the - content of the proposed consignment file, otherwise she can post a NACK - message to inform the payer that the RGB transfer should be considered as - failed. +- The payer of an RGB transfer posts the transfer + consignment file to the server, typically using the blinded UTXO (provided + by the payee in the invoice) as identifier for the file. +- The payee asks the server for the consignment file associated with the + identifier (e.g. the blinded UTXO). +- If there is a file associated to the provided identifier, the server returns + the file to the payee. +- The payee validates the retrieved consignment file. +- If the consignment is valid, the payee posts an ACK to the server, otherwise + a NACK is posted to inform the payer that the RGB transfer should be + considered as failed. - The payer asks the server for the ACK/NACK status associated with the - consignment file previously posted. If the consignment as been ACKed by the - payee, the payer will proceed in broadcasting the Bitcoin transaction + previously posted consignment file. If the consignment has been ACKed by the + payee, the payer will proceed with broadcasting the Bitcoin transaction containing the commitment to the RGB consignment. The RGB proxy server does not need to be trusted by the users as it is only @@ -49,30 +48,32 @@ npm run build npm run start ``` -## How to use it +## Example usage -The payee generates a blinded UTXO and sends it to the payer (not covered -here). Let's assume the blinded UTXO is `blindTest`. +The payee generates an RGB invoice and sends it to the payer (not covered +here). Let's assume the invoice contains the blinded UTXO `blindTest`. -The payer sends the consignment file for the blinded UTXO to the proxy server: +The payer prepares the transfer, then sends the consignment file and the +related txid to the proxy server, using the blinded UTXO from the invoice as +identifier: ``` # let's create a fake consignment file and send it $ echo "consignment binary data" > consignment.rgb $ curl -X POST -H 'Content-Type: multipart/form-data' \ - -F 'jsonrpc=2.0' -F 'id="3"' -F 'method=consignment.post' \ - -F 'params[blinded_utxo]=blindTest' -F 'file=@consignment.rgb' \ + -F 'jsonrpc=2.0' -F 'id="1"' -F 'method=consignment.post' \ + -F 'params[recipient_id]=blindTest' -F 'params[txid]=527f2b2ebb81c873f128848d7226ecdb7cb4a4025222c54bfec7c358d51b9207' -F 'file=@consignment.rgb' \ localhost:3000/json-rpc -{"jsonrpc":"2.0","id":"3","result":true} +{"jsonrpc":"2.0","id":"1","result":true} ``` The payee requests the consignment for the blinded UTXO: ``` $ curl -X POST -H 'Content-Type: application/json' \ - -d '{"jsonrpc": "2.0", "id": "7", "method": "consignment.get", "params": {"blinded_utxo": "blindTest"} }' \ + -d '{"jsonrpc": "2.0", "id": "2", "method": "consignment.get", "params": {"recipient_id": "blindTest"} }' \ localhost:3000/json-rpc -{"jsonrpc":"2.0","id":"7","result":"Y29uc2lnbm1lbnQgYmluYXJ5IGRhdGEK"} +{"jsonrpc":"2.0","id":"2","result": {"consignment": "Y29uc2lnbm1lbnQgYmluYXJ5IGRhdGEK", "txid": "527f2b2ebb81c873f128848d7226ecdb7cb4a4025222c54bfec7c358d51b9207"}} ``` The file is returned as a base64-encoded string: @@ -81,38 +82,38 @@ $ echo 'Y29uc2lnbm1lbnQgYmluYXJ5IGRhdGEK' | base64 -d consignment binary data ``` -If ok with the consignment (validation passes), the payee calls: +If the consignment is valid, the payee ACKs it: ``` $ curl -X POST -H 'Content-Type: application/json' \ - -d '{"jsonrpc": "2.0", "id": "9", "method": "ack.post", "params": {"blinded_utxo": "blindTest", "ack": true} }' \ + -d '{"jsonrpc": "2.0", "id": "3", "method": "ack.post", "params": {"recipient_id": "blindTest", "ack": true} }' \ localhost:3000/json-rpc -{"jsonrpc":"2.0","id":"9","result":true} +{"jsonrpc":"2.0","id":"3","result":true} ``` -If not ok with the consignment, the payee calls instead: +If the consignment is invalid, the payee NACKs it: ``` $ curl -X POST -H 'Content-Type: application/json' \ - -d '{"jsonrpc": "2.0", "id": "8", "method": "ack.post", "params": {"blinded_utxo": "blindTest", "ack": false} }' \ + -d '{"jsonrpc": "2.0", "id": "4", "method": "ack.post", "params": {"recipient_id": "blindTest", "ack": false} }' \ localhost:3000/json-rpc -{"jsonrpc":"2.0","id":"8","result":true} +{"jsonrpc":"2.0","id":"4","result":true} ``` -The payer recieves the `ack` value (`null` if payee has not called `ack.post` +The payer requests the `ack` value (`null` if payee has not called `ack.post` yet): ``` $ curl -X POST -H 'Content-Type: application/json' \ - -d '{"jsonrpc": "2.0", "id": "1", "method": "ack.get", "params": {"blinded_utxo": "blindTest"} }' \ + -d '{"jsonrpc": "2.0", "id": "5", "method": "ack.get", "params": {"recipient_id": "blindTest"} }' \ localhost:3000/json-rpc -{"jsonrpc":"2.0","id":"1","result":true} +{"jsonrpc":"2.0","id":"5","result":true} ``` In case of approval the transaction can be broadcast, otherwise the two parties need to abort the transfer process and start from scratch. -The consignment or media file for any given blinded UTXO and the related +The consignment or media file for any given recipient ID and the related approval cannot be changed once submitted. diff --git a/package.json b/package.json index 46841ed..2dbc326 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "rgb-proxy-server", - "version": "0.1.0", + "version": "0.2.0", "description": "", "main": "index.js", "scripts": { diff --git a/src/app.ts b/src/app.ts index 818d777..46bad3b 100644 --- a/src/app.ts +++ b/src/app.ts @@ -2,20 +2,13 @@ import cors from "cors"; import express from "express"; import httpContext from "express-http-context"; import morgan from "morgan"; -import { homedir } from "os"; -import path from "path"; -import { logger, oldAPILogger } from "./logger"; -import { setDir } from "./util"; -import { APP_DIR } from "./vars"; +import { logger } from "./logger"; let reqId = 0; // Create Express server const app = express(); -// Create app directory if it doesn't exist -setDir(path.join(homedir(), APP_DIR)); - // Allow request from any origin app.use(cors()); @@ -53,19 +46,6 @@ app.use( }) ); -app.use( - morgan(":status", { - skip: (req, _res) => { - return req.originalUrl.startsWith("/json-rpc"); - }, - stream: { - write: (message: string) => { - oldAPILogger.notice(message.trim(), { isFromMorgan: true }); - }, - }, - }) -); - // Express configuration app.set("port", process.env.PORT || 3000); app.use(express.json()); diff --git a/src/controllers/api.ts b/src/controllers/api.ts index c5fd90f..6ddbcea 100644 --- a/src/controllers/api.ts +++ b/src/controllers/api.ts @@ -17,33 +17,26 @@ import { CannotChangeUploadedFile, InvalidAck, InvalidAttachmentID, - InvalidBlindedUTXO, + InvalidRecipientID, + InvalidTxid, + InvalidVout, MissingAck, MissingAttachmentID, - MissingBlindedUTXO, MissingFile, + MissingRecipientID, + MissingTxid, NotFoundConsignment, NotFoundMedia, } from "../errors"; -import { logger, oldAPILogger } from "../logger"; +import { logger } from "../logger"; import { genHashFromFile, setDir } from "../util"; -import { APP_DIR } from "../vars"; +import { DEFAULT_APP_DIR_NAME } from "../vars"; import { APP_VERSION } from "../version"; -const PROTOCOL_VERSION = "0.1"; +const PROTOCOL_VERSION = "0.2"; const DATABASE_FILE = "app.db"; -const appDir = path.join(homedir(), APP_DIR); -const tempDir = path.join(appDir, "tmp"); -const consignmentDir = path.join(appDir, "consignments"); -const mediaDir = path.join(appDir, "media"); - -// We make sure the directories exist -setDir(tempDir); -setDir(consignmentDir); -setDir(mediaDir); - const storage = multer.diskStorage({ destination: function (_req, _file, cb) { cb(null, tempDir); @@ -52,16 +45,30 @@ const storage = multer.diskStorage({ const upload = multer({ storage }); +let appDir: string; +let ds: Datastore<{ _id: string }>; +let tempDir: string; +let consignmentDir: string; +let mediaDir: string; + interface ServerInfo { version: string; protocol_version: string; uptime: number; } +interface ConsignmentGetRes { + consignment: string; + txid: string; + vout?: number; +} + interface Consignment { _id?: string; filename: string; - blindedutxo: string; + recipient_id: string; + txid: string; + vout?: number; ack?: boolean; nack?: boolean; // to be removed when removing old APIs responded?: boolean; // to be removed when removing old APIs @@ -72,14 +79,6 @@ interface Media { attachment_id: string; } -const ds = Datastore.create(path.join(homedir(), APP_DIR, DATABASE_FILE)); - -const middleware = (req: Request, _res: Response, next: () => void) => { - oldAPILogger.notice("", { req }); - - next(); -}; - function isBoolean(data: unknown): data is boolean { return Boolean(data) === data; } @@ -88,6 +87,10 @@ function isDictionary(data: unknown): data is Record { return typeof data === "object" && !Array.isArray(data) && data !== null; } +function isNumber(data: unknown): data is string { + return Number.isInteger(Number(data)) && data !== null; +} + function isString(data: unknown): data is string { return typeof data === "string"; } @@ -147,26 +150,50 @@ function getAttachmentIDParam( return attachmentID as string; } -function getBlindedUTXOParam( +function getRecipientIDParam( jsonRpcParams: Partial | undefined ) { - const blindedUTXOKey = "blinded_utxo"; - if (!isDictionary(jsonRpcParams) || !(blindedUTXOKey in jsonRpcParams)) { - throw new MissingBlindedUTXO(jsonRpcParams); + const recipientIDKey = "recipient_id"; + if (!isDictionary(jsonRpcParams) || !(recipientIDKey in jsonRpcParams)) { + throw new MissingRecipientID(jsonRpcParams); } - const blindedUTXO = jsonRpcParams[blindedUTXOKey]; - if (!blindedUTXO || !isString(blindedUTXO)) { - throw new InvalidBlindedUTXO(jsonRpcParams); + const recipientID = jsonRpcParams[recipientIDKey]; + if (!recipientID || !isString(recipientID)) { + throw new InvalidRecipientID(jsonRpcParams); } - return blindedUTXO as string; + return recipientID as string; +} + +function getTxidParam(jsonRpcParams: Partial | undefined) { + const txidKey = "txid"; + if (!isDictionary(jsonRpcParams) || !(txidKey in jsonRpcParams)) { + throw new MissingTxid(jsonRpcParams); + } + const txid = jsonRpcParams[txidKey]; + if (!txid || !isString(txid)) { + throw new InvalidTxid(jsonRpcParams); + } + return txid as string; +} + +function getVoutParam(jsonRpcParams: Partial | undefined) { + const voutKey = "vout"; + if (isDictionary(jsonRpcParams) && voutKey in jsonRpcParams) { + const vout = jsonRpcParams[voutKey]; + if (!isNumber(vout)) { + throw new InvalidVout(jsonRpcParams); + } + return vout as unknown as number; + } + return undefined; } async function getConsignment( jsonRpcParams: Partial | undefined ) { - const blindedUTXO = getBlindedUTXOParam(jsonRpcParams); + const recipientID = getRecipientIDParam(jsonRpcParams); const consignment: Consignment | null = await ds.findOne({ - blindedutxo: blindedUTXO, + recipient_id: recipientID, }); if (!consignment) { throw new NotFoundConsignment(jsonRpcParams); @@ -198,12 +225,16 @@ jsonRpcServer.addMethod( jsonRpcServer.addMethod( "consignment.get", - async (jsonRpcParams, _serverParams): Promise => { + async (jsonRpcParams, _serverParams): Promise => { const consignment = await getConsignment(jsonRpcParams); const fileBuffer = fs.readFileSync( path.join(consignmentDir, consignment.filename) ); - return fileBuffer.toString("base64"); + return { + consignment: fileBuffer.toString("base64"), + txid: consignment.txid, + vout: consignment.vout, + }; } ); @@ -212,14 +243,16 @@ jsonRpcServer.addMethod( async (jsonRpcParams, serverParams): Promise => { const file = serverParams?.file; try { - const blindedUTXO = getBlindedUTXOParam(jsonRpcParams); + const recipientID = getRecipientIDParam(jsonRpcParams); + const txid = getTxidParam(jsonRpcParams); + const vout = getVoutParam(jsonRpcParams); if (!file) { throw new MissingFile(jsonRpcParams); } const uploadedFile = path.join(tempDir, file.filename); const fileHash = genHashFromFile(uploadedFile); const prevFile: Consignment | null = await ds.findOne({ - blindedutxo: blindedUTXO, + recipient_id: recipientID, }); if (prevFile) { if (prevFile.filename === fileHash) { @@ -232,7 +265,9 @@ jsonRpcServer.addMethod( fs.renameSync(uploadedFile, path.join(consignmentDir, fileHash)); const consignment: Consignment = { filename: fileHash, - blindedutxo: blindedUTXO, + recipient_id: recipientID, + txid: txid, + vout: vout, }; await ds.insert(consignment); return true; @@ -325,7 +360,7 @@ jsonRpcServer.addMethod( } } await ds.update( - { blindedutxo: consignment.blindedutxo }, + { recipient_id: consignment.recipient_id }, { $set: { ack: ack } }, { multi: false } ); @@ -334,6 +369,18 @@ jsonRpcServer.addMethod( ); export const loadApiEndpoints = (app: Application): void => { + // setup app directories + appDir = process.env.APP_DIR || path.join(homedir(), DEFAULT_APP_DIR_NAME); + setDir(appDir); + ds = Datastore.create(path.join(appDir, DATABASE_FILE)); + tempDir = path.join(appDir, "tmp"); + consignmentDir = path.join(appDir, "consignments"); + mediaDir = path.join(appDir, "media"); + setDir(tempDir); + setDir(consignmentDir); + setDir(mediaDir); + + // setup app route app.post( "/json-rpc", upload.single("file"), @@ -401,296 +448,4 @@ export const loadApiEndpoints = (app: Application): void => { }); } ); - - app.get( - "/consignment/:blindedutxo", - middleware, - async (req: Request, res: Response) => { - try { - if (!!req.params.blindedutxo) { - const c: Consignment | null = await ds.findOne({ - blindedutxo: req.params.blindedutxo, - }); - if (!c) { - return res.status(404).send({ - success: false, - error: "No consignment found!", - }); - } - const file_buffer = fs.readFileSync( - path.join(consignmentDir, c.filename) - ); - - return res.status(200).send({ - success: true, - consignment: file_buffer.toString("base64"), - }); - } - - res.status(400).send({ success: false, error: "blindedutxo missing!" }); - } catch (error) { - res.status(500).send({ success: false }); - } - } - ); - - app.post( - "/consignment", - upload.single("consignment"), - async (req: Request, res: Response) => { - try { - if (!req.body.blindedutxo) { - return res - .status(400) - .send({ success: false, error: "blindedutxo missing!" }); - } - httpContext.set("blindedutxo", req.body.blindedutxo); - oldAPILogger.notice("", { req: req }); - if (!req.file) { - return res - .status(400) - .send({ success: false, error: "Consignment file is missing!" }); - } - const fileHash = genHashFromFile(path.join(tempDir, req.file.filename)); - const prevConsignment: Consignment | null = await ds.findOne({ - blindedutxo: req.body.blindedutxo, - }); - if (prevConsignment) { - if (prevConsignment.filename == fileHash) { - return res.status(200).send({ success: true }); - } else { - return res - .status(403) - .send({ success: false, error: "Cannot change uploaded file!" }); - } - } - // We move the file with the hash as name - fs.renameSync( - path.join(tempDir, req.file.filename), - path.join(consignmentDir, fileHash) - ); - const consignment: Consignment = { - filename: fileHash, - blindedutxo: req.body.blindedutxo, - }; - await ds.insert(consignment); - if (fs.existsSync(path.join(tempDir, req.file.filename))) { - // We delete the file from the uploads directory - fs.unlinkSync(path.join(tempDir, req.file.filename)); - } - - return res.status(200).send({ success: true }); - } catch (error) { - res.status(500).send({ success: false }); - } - } - ); - - app.get( - "/media/:attachment_id", - middleware, - async (req: Request, res: Response) => { - try { - if (!!req.params.attachment_id) { - const media: Media | null = await ds.findOne({ - attachment_id: req.params.attachment_id, - }); - if (!media) { - return res.status(404).send({ - success: false, - error: "No media found!", - }); - } - const file_buffer = fs.readFileSync( - path.join(mediaDir, media.filename) - ); - - return res.status(200).send({ - success: true, - media: file_buffer.toString("base64"), - }); - } - - res - .status(400) - .send({ success: false, error: "attachment_id missing!" }); - } catch (error) { - res.status(500).send({ success: false }); - } - } - ); - - app.post( - "/media", - upload.single("media"), - async (req: Request, res: Response) => { - try { - if (!req.body.attachment_id) { - return res - .status(400) - .send({ success: false, error: "attachment_id missing!" }); - } - httpContext.set("attachment_id", req.body.attachment_id); - oldAPILogger.notice("", { req: req }); - if (!req.file) { - return res - .status(400) - .send({ success: false, error: "Media file is missing!" }); - } - const fileHash = genHashFromFile(path.join(tempDir, req.file.filename)); - const prevMedia: Media | null = await ds.findOne({ - attachment_id: req.body.attachment_id, - }); - if (prevMedia) { - if (prevMedia.filename == fileHash) { - return res.status(200).send({ success: true }); - } else { - return res - .status(403) - .send({ success: false, error: "Cannot change uploaded file!" }); - } - } - // We move the file with the hash as name - fs.renameSync( - path.join(tempDir, req.file.filename), - path.join(mediaDir, fileHash) - ); - const media: Media = { - filename: fileHash, - attachment_id: req.body.attachment_id, - }; - await ds.insert(media); - if (fs.existsSync(path.join(tempDir, req.file.filename))) { - // We delete the file from the uploads directory - fs.unlinkSync(path.join(tempDir, req.file.filename)); - } - - return res.status(200).send({ success: true }); - } catch (error) { - res.status(500).send({ success: false }); - } - } - ); - - app.post("/ack", async (req: Request, res: Response) => { - try { - if (!req.body.blindedutxo) { - return res - .status(400) - .send({ success: false, error: "blindedutxo missing!" }); - } - httpContext.set("blindedutxo", req.body.blindedutxo); - oldAPILogger.notice("", { req: req }); - const c: Consignment | null = await ds.findOne({ - blindedutxo: req.body.blindedutxo, - }); - - if (!c) { - return res - .status(404) - .send({ success: false, error: "No consignment found!" }); - } - if (!!c.responded) { - return res - .status(403) - .send({ success: false, error: "Already responded!" }); - } - await ds.update( - { blindedutxo: req.body.blindedutxo }, - { - $set: { - ack: true, - nack: false, - responded: true, - }, - }, - { multi: false } - ); - - return res.status(200).send({ success: true }); - } catch (error) { - oldAPILogger.error(error); - res.status(500).send({ success: false }); - } - }); - - app.post("/nack", async (req: Request, res: Response) => { - try { - if (!req.body.blindedutxo) { - return res - .status(400) - .send({ success: false, error: "blindedutxo missing!" }); - } - httpContext.set("blindedutxo", req.body.blindedutxo); - oldAPILogger.notice("", { req: req }); - let c: Consignment | null = await ds.findOne({ - blindedutxo: req.body.blindedutxo, - }); - if (!c) { - return res.status(404).send({ success: false }); - } - if (!!c.responded) { - return res - .status(403) - .send({ success: false, error: "Already responded!" }); - } - await ds.update( - { blindedutxo: req.body.blindedutxo }, - { - $set: { - nack: true, - ack: false, - responded: true, - }, - }, - { multi: false } - ); - c = await ds.findOne({ blindedutxo: req.body.blindedutxo }); - - return res.status(200).send({ success: true }); - } catch (error) { - res.status(500).send({ success: false }); - } - }); - - app.get( - "/ack/:blindedutxo", - middleware, - async (req: Request, res: Response) => { - try { - if (!req.params.blindedutxo) { - return res - .status(400) - .send({ success: false, error: "blindedutxo missing!" }); - } - const c: Consignment | null = await ds.findOne({ - blindedutxo: req.params.blindedutxo, - }); - - if (!c) { - return res - .status(404) - .send({ success: false, error: "No consignment found!" }); - } - const ack = !!c.ack; - const nack = !!c.nack; - - return res.status(200).send({ - success: true, - ack, - nack, - }); - } catch (error) { - oldAPILogger.error(error); - res.status(500).send({ success: false }); - } - } - ); - - app.get("/getinfo", middleware, async (_req: Request, res: Response) => { - return res.status(200).send({ - version: APP_VERSION, - uptime: Math.trunc(process.uptime()), - }); - }); }; diff --git a/src/errors.ts b/src/errors.ts index 67161e0..db34a89 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -24,9 +24,21 @@ export class InvalidAttachmentID extends JSONRPCErrorException { } } -export class InvalidBlindedUTXO extends JSONRPCErrorException { +export class InvalidRecipientID extends JSONRPCErrorException { constructor(data?: object) { - super("Invalid blinded UTXO", -202, data); + super("Invalid recipient ID", -202, data); + } +} + +export class InvalidTxid extends JSONRPCErrorException { + constructor(data?: object) { + super("Invalid TXID", -203, data); + } +} + +export class InvalidVout extends JSONRPCErrorException { + constructor(data?: object) { + super("Invalid vout", -204, data); } } @@ -42,9 +54,9 @@ export class MissingAttachmentID extends JSONRPCErrorException { } } -export class MissingBlindedUTXO extends JSONRPCErrorException { +export class MissingRecipientID extends JSONRPCErrorException { constructor(data?: object) { - super("Missing blinded UTXO", -302, data); + super("Missing recipient ID", -302, data); } } @@ -54,6 +66,12 @@ export class MissingFile extends JSONRPCErrorException { } } +export class MissingTxid extends JSONRPCErrorException { + constructor(data?: object) { + super("Missing TXID", -303, data); + } +} + export class NotFoundConsignment extends JSONRPCErrorException { constructor(data?: object) { super("Consignment file not found", -400, data); diff --git a/src/logger.ts b/src/logger.ts index f7aca78..9f28ed4 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -56,42 +56,3 @@ export const logger = winston.createLogger({ ], exitOnError: false, }); - -export const oldAPILogger = winston.createLogger({ - format: winston.format.combine( - winston.format.timestamp({ - format: "YYYY-MM-DDTHH:mm:ss.SSSZ", - }), - winston.format.colorize(), - winston.format.printf((info) => { - if (info.isFromMorgan) { - return `[${info.timestamp}] ${info.level}: [OLD] <- [${ - httpContext.get("reqId") || "-" - }] ${info.message}`; - } else { - if (info.req) { - const ip = info.req.headers["x-forwarded-for"] - ? info.req.headers["x-forwarded-for"] - : info.req.ip; - return `[${info.timestamp}] ${info.level}: [OLD] -> [${ - httpContext.get("reqId") || "-" - }] ${ip || ""} ${info.req.method} ${info.req.originalUrl} ${ - info.req.headers["user-agent"] || "" - } ${httpContext.get("blindedutxo") || ""}`; - } else { - return `[${info.timestamp}] ${info.level}: [OLD] -- [${ - httpContext.get("reqId") || "-" - }] ${info.message}`; - } - } - }) - ), - levels: winston.config.syslog.levels, - level, - transports: [ - new winston.transports.Console({ - handleExceptions: true, - }), - ], - exitOnError: false, -}); diff --git a/src/vars.ts b/src/vars.ts index 3ba547c..a874ec7 100644 --- a/src/vars.ts +++ b/src/vars.ts @@ -1 +1 @@ -export const APP_DIR = process.env.APP_DIR || ".rgb-proxy-server"; +export const DEFAULT_APP_DIR_NAME = ".rgb-proxy-server"; diff --git a/test/api.spec.ts b/test/api.spec.ts index 79d6143..254cc4c 100644 --- a/test/api.spec.ts +++ b/test/api.spec.ts @@ -1,26 +1,222 @@ +import fs from "fs"; +import path from "path"; import request from "supertest"; import app from "../src/app"; import { loadApiEndpoints } from "../src/controllers/api"; +const jsonrpcVersion = "2.0"; +const okStatus = 200; +const contentTypeForm = "multipart/form-data"; + +// temporary test directory +const tempDir = fs.mkdtempSync(path.join(__dirname, "temp-")); +process.env.APP_DIR = path.join(tempDir, "app-dir"); + loadApiEndpoints(app); +afterAll(() => { + // cleanup the temporary directory + return fs.promises.rm(tempDir, { recursive: true, force: true }); +}); + describe("POST /json-rpc", () => { + it("ack.post should succeed on 1st try then return false", async () => { + const consignmentPath = path.join(tempDir, "ack.post"); + fs.writeFileSync(consignmentPath, "consignment ack binary data"); + let reqID = "1"; + const recipientID = "ackTest.post"; + const txid = "aTxid"; + let res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", "consignment.post") + .field("params[recipient_id]", recipientID) + .field("params[txid]", txid) + .attach("file", fs.createReadStream(consignmentPath)) + .expect(okStatus); + expect(res.body.result).toStrictEqual(true); + + reqID = "2"; + const method = "ack.post"; + let req = { + jsonrpc: jsonrpcVersion, + id: reqID, + method: method, + params: { + recipient_id: recipientID, + ack: true, + }, + }; + res = await request(app).post("/json-rpc").send(req).expect(okStatus); + expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(true); + + reqID = "3"; + req = { + jsonrpc: jsonrpcVersion, + id: reqID, + method: method, + params: { + recipient_id: recipientID, + ack: true, + }, + }; + res = await request(app).post("/json-rpc").send(req).expect(okStatus); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(false); + }); + + it("consignment.get should succeed", async () => { + const consignmentPath = path.join(tempDir, "consignment.get"); + fs.writeFileSync(consignmentPath, "consignment get binary data"); + const consignment = fs.readFileSync(consignmentPath); + const consignmentBase64 = consignment.toString("base64"); + let reqID = "1"; + const recipientID = "blindTest.get"; + const txid = "aTxid"; + let res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", "consignment.post") + .field("params[recipient_id]", recipientID) + .field("params[txid]", txid) + .attach("file", fs.createReadStream(consignmentPath)) + .expect(okStatus); + expect(res.body.result).toStrictEqual(true); + + reqID = "2"; + const req = { + jsonrpc: jsonrpcVersion, + id: reqID, + method: "consignment.get", + params: { + recipient_id: recipientID, + }, + }; + res = await request(app).post("/json-rpc").send(req).expect(okStatus); + expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result.consignment).toStrictEqual(consignmentBase64); + expect(res.body.result.txid).toStrictEqual(txid); + }); + + it("consignment.post should succeed on 1st try then return false", async () => { + const consignmentPath = path.join(tempDir, "consignment.post"); + fs.writeFileSync(consignmentPath, "consignment post binary data"); + let reqID = "1"; + const method = "consignment.post"; + const recipientID = "blindTest.post"; + const txid = "aTxid"; + let res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", method) + .field("params[recipient_id]", recipientID) + .field("params[txid]", txid) + .attach("file", fs.createReadStream(consignmentPath)) + .expect(okStatus); + expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(true); + + reqID = "2"; + res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", method) + .field("params[recipient_id]", recipientID) + .field("params[txid]", txid) + .attach("file", fs.createReadStream(consignmentPath)) + .expect(okStatus); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(false); + }); + + it("media.get should succeed", async () => { + const mediaPath = path.join(tempDir, "media.post"); + fs.writeFileSync(mediaPath, "media get binary data"); + const media = fs.readFileSync(mediaPath); + const mediaBase64 = media.toString("base64"); + let reqID = "1"; + const attachmentID = "mediaTest.get"; + let res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", "media.post") + .field("params[attachment_id]", attachmentID) + .attach("file", fs.createReadStream(mediaPath)) + .expect(okStatus); + expect(res.body.result).toStrictEqual(true); + + reqID = "2"; + const req = { + jsonrpc: jsonrpcVersion, + id: reqID, + method: "media.get", + params: { + attachment_id: attachmentID, + }, + }; + res = await request(app).post("/json-rpc").send(req).expect(okStatus); + expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(mediaBase64); + }); + + it("media.post should succeed on 1st try then return false", async () => { + const mediaPath = path.join(tempDir, "media.post"); + fs.writeFileSync(mediaPath, "media post binary data"); + let reqID = "1"; + const method = "media.post"; + const attachmentID = "mediaTest.post"; + let res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", method) + .field("params[attachment_id]", attachmentID) + .attach("file", fs.createReadStream(mediaPath)) + .expect(okStatus); + expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(true); + reqID = "2"; + res = await request(app) + .post("/json-rpc") + .set("Content-type", contentTypeForm) + .field("jsonrpc", jsonrpcVersion) + .field("id", reqID) + .field("method", method) + .field("params[attachment_id]", attachmentID) + .attach("file", fs.createReadStream(mediaPath)) + .expect(okStatus); + expect(res.body.id).toStrictEqual(reqID); + expect(res.body.result).toStrictEqual(false); + }); + it("server.info should succeed", async () => { - const jsonrpcVersion = "2.0"; const reqID = 1; const req = { jsonrpc: jsonrpcVersion, id: reqID, method: "server.info", }; - const res = await request(app) - .post("/json-rpc") - .set("Content-type", "application/json") - .send(req) - .expect(200); + const res = await request(app).post("/json-rpc").send(req).expect(okStatus); expect(res.body.jsonrpc).toStrictEqual(jsonrpcVersion); expect(res.body.id).toStrictEqual(reqID); - expect(res.body.result.protocol_version).toStrictEqual("0.1"); + expect(res.body.result.protocol_version).toStrictEqual("0.2"); }); });