diff --git a/cspell.json b/cspell.json index 21cde3c30..79d46751e 100644 --- a/cspell.json +++ b/cspell.json @@ -73,6 +73,10 @@ "lowlights", "unsanitize", "toastify", - "Toastify" + "Toastify", + "POSTAMBLE", + "INTC", + "INTJ", + "INTE" ] } diff --git a/src/cases/types.ts b/src/cases/types.ts index 3a0f731a8..cb0f225f3 100644 --- a/src/cases/types.ts +++ b/src/cases/types.ts @@ -1,7 +1,7 @@ import { buildTypeCodec } from '../utilities'; import * as t from 'io-ts'; -interface CaseHashBrand { +export interface CaseHashBrand { readonly __CaseHash: unique symbol; } diff --git a/src/components/messageBus.ts b/src/components/messageBus.ts index d85c0a8a7..50ebba12e 100644 --- a/src/components/messageBus.ts +++ b/src/components/messageBus.ts @@ -46,6 +46,8 @@ export const enum MessageKind { mainWebviewViewVisibilityChange = 38, executionQueueChange = 39, + + loadHomeDirectoryData = 40, } export type Command = @@ -166,6 +168,9 @@ export type Message = | Readonly<{ kind: MessageKind.executionQueueChange; queuedCodemodHashes: ReadonlyArray; + }> + | Readonly<{ + kind: MessageKind.loadHomeDirectoryData; }>; type EmitterMap = { diff --git a/src/components/webview/MainProvider.ts b/src/components/webview/MainProvider.ts index 888c68a6c..f7bdeb109 100644 --- a/src/components/webview/MainProvider.ts +++ b/src/components/webview/MainProvider.ts @@ -216,6 +216,15 @@ export class MainViewProvider implements WebviewViewProvider { return; } + if ( + prevProps.activeTabId !== nextProps.activeTabId && + nextProps.activeTabId === 'codemodRuns' + ) { + this.__messageBus.publish({ + kind: MessageKind.loadHomeDirectoryData, + }); + } + prevProps = nextProps; this.__postMessage({ diff --git a/src/data/readHomeDirectoryCases.ts b/src/data/readHomeDirectoryCases.ts new file mode 100644 index 000000000..3ba7b6f51 --- /dev/null +++ b/src/data/readHomeDirectoryCases.ts @@ -0,0 +1,252 @@ +import { createReadStream } from 'fs'; +import { homedir } from 'os'; +import { join } from 'path'; +import { FileType, Uri, workspace } from 'vscode'; +import { readSurfaceAgnosticCase } from './readSurfaceAgnosticCase'; +import { Case, caseHashCodec } from '../cases/types'; +import { Job, JobKind, jobHashCodec } from '../jobs/types'; +import { parseSurfaceAgnosticCase } from './schemata/surfaceAgnosticCaseSchema'; +import { + JOB_KIND, + parseSurfaceAgnosticJob, +} from './schemata/surfaceAgnosticJobSchema'; +import { CodemodEntry } from '../codemods/types'; +import EventEmitter from 'events'; +import { MessageBus, MessageKind } from '../components/messageBus'; +import { Store } from '.'; + +interface HomeDirectoryEventEmitter extends EventEmitter { + emit(event: 'start'): boolean; + emit(event: 'end'): boolean; + emit(event: 'job', kase: Case, jobs: ReadonlyArray): boolean; + + once(event: 'start', listener: () => void): this; + once(event: 'end', listener: () => void): this; + on( + event: 'job', + listener: (kase: Case, jobs: ReadonlyArray) => void, + ): this; +} + +const readHomeDirectoryCase = async ( + homeDirectoryEventEmitter: HomeDirectoryEventEmitter, + rootUri: Uri, + codemodEntities: Record, + caseDataPath: string, +) => { + const readStream = createReadStream(caseDataPath); + + await new Promise((resolve, reject) => { + let timedOut = false; + + const timeout = setTimeout(() => { + timedOut = true; + reject( + `Opening the read stream for ${caseDataPath} timed out after 1s.`, + ); + }, 1000); + + readStream.once('open', () => { + if (timedOut) { + return; + } + + clearTimeout(timeout); + resolve(); + }); + }); + + let kase: Case | null = null; + + const fileEventEmitter = readSurfaceAgnosticCase(readStream); + + fileEventEmitter.once('case', (data: unknown) => { + const surfaceAgnosticCase = parseSurfaceAgnosticCase(data); + + if ( + !surfaceAgnosticCase.absoluteTargetPath.startsWith(rootUri.fsPath) + ) { + console.info( + 'The current case does not belong to the opened workspace', + ); + fileEventEmitter.emit('close'); + return; + } + + if (!caseHashCodec.is(surfaceAgnosticCase.caseHashDigest)) { + console.error('Could not validate the case hash digest'); + fileEventEmitter.emit('close'); + return; + } + + const codemodName = + codemodEntities[surfaceAgnosticCase.codemodHashDigest]?.name ?? + surfaceAgnosticCase.codemodHashDigest; + + kase = { + hash: surfaceAgnosticCase.caseHashDigest, + codemodName: `${codemodName} (CLI)`, + createdAt: Number(surfaceAgnosticCase.createdAt), + path: surfaceAgnosticCase.absoluteTargetPath, + }; + + homeDirectoryEventEmitter.emit('job', kase, []); + }); + + const jobHandler = (data: unknown) => { + const surfaceAgnosticJob = parseSurfaceAgnosticJob(data); + + if (!kase) { + console.error('You need to have a case to create a job'); + fileEventEmitter.emit('close'); + return; + } + + if (!jobHashCodec.is(surfaceAgnosticJob.jobHashDigest)) { + console.error('Could not validate the job hash digest'); + fileEventEmitter.emit('close'); + return; + } + + if (surfaceAgnosticJob.kind === JOB_KIND.REWRITE_FILE) { + const job: Job = { + hash: surfaceAgnosticJob.jobHashDigest, + originalNewContent: null, + codemodName: kase.codemodName, + createdAt: kase.createdAt, + caseHashDigest: kase.hash, + // variant + kind: JobKind.rewriteFile, + oldUri: Uri.file(surfaceAgnosticJob.oldUri), + newContentUri: Uri.file(surfaceAgnosticJob.newUri), + newUri: null, + }; + + homeDirectoryEventEmitter.emit('job', kase, [job]); + } + + // TODO implement more job kinds + }; + + fileEventEmitter.on('job', jobHandler); + + const TIMEOUT = 120_000; + + return new Promise((resolve, reject) => { + let timedOut = false; + + const timeout = setTimeout(() => { + timedOut = true; + + fileEventEmitter.off('job', jobHandler); + fileEventEmitter.emit('close'); + + reject(new Error(`Reading the case timed out after ${TIMEOUT}ms`)); + }, TIMEOUT); + + fileEventEmitter.once('error', (error) => { + if (timedOut) { + return; + } + + fileEventEmitter.off('job', jobHandler); + + clearTimeout(timeout); + reject(error); + }); + + fileEventEmitter.once('end', () => { + if (timedOut) { + return; + } + + fileEventEmitter.off('job', jobHandler); + + clearTimeout(timeout); + + if (kase === null) { + reject(new Error('Could not extract the case')); + return; + } + + resolve(); + }); + }); +}; + +export const readHomeDirectoryCases = async ( + rootUri: Uri, + codemodEntities: Record, +): Promise => { + if (rootUri === null) { + return null; + } + + const eventEmitter: HomeDirectoryEventEmitter = new EventEmitter(); + + eventEmitter.once('start', async () => { + const casesDirectoryPath = join(homedir(), '.intuita', 'cases'); + + const casesDirectoryUri = Uri.file(casesDirectoryPath); + + try { + const entries = await workspace.fs.readDirectory(casesDirectoryUri); + + const caseDataPaths = entries + .filter(([, fileType]) => fileType === FileType.Directory) + .map(([name]) => join(casesDirectoryPath, name, 'case.data')); + + await Promise.allSettled( + caseDataPaths.map((path) => + readHomeDirectoryCase( + eventEmitter, + rootUri, + codemodEntities, + path, + ), + ), + ); + } catch (error) { + console.error(error); + } + + eventEmitter.emit('end'); + }); + + return eventEmitter; +}; + +export class HomeDirectoryService { + public constructor( + private readonly __messageBus: MessageBus, + private readonly __store: Store, + private readonly __rootUri: Uri | null, + ) { + __messageBus.subscribe(MessageKind.loadHomeDirectoryData, async () => { + if (!this.__rootUri) { + return; + } + + const eventEmitter = await readHomeDirectoryCases( + this.__rootUri, + this.__store.getState().codemod.entities, + ); + + const jobHandler = (kase: Case, jobs: ReadonlyArray) => { + this.__messageBus.publish({ + kind: MessageKind.upsertCase, + kase, + jobs, + }); + }; + + eventEmitter?.once('end', () => { + eventEmitter.off('job', jobHandler); + }); + + eventEmitter?.on('job', jobHandler); + + eventEmitter?.emit('start'); + }); + } +} diff --git a/src/data/readSurfaceAgnosticCase.ts b/src/data/readSurfaceAgnosticCase.ts new file mode 100644 index 000000000..a4a4110a5 --- /dev/null +++ b/src/data/readSurfaceAgnosticCase.ts @@ -0,0 +1,472 @@ +import EventEmitter from 'node:events'; +import type { ReadStream } from 'node:fs'; +import { + parseJobKind, + type SurfaceAgnosticJob, +} from './schemata/surfaceAgnosticJobSchema'; +import { SurfaceAgnosticCase } from './schemata/surfaceAgnosticCaseSchema'; +import { createHash, Hash } from 'node:crypto'; +import { parseArgumentRecordSchema } from './schemata/argumentRecordSchema'; + +type OuterData = Readonly<{ + byteLength: number; + hashDigest: Buffer; + innerData: Buffer; +}>; + +type OuterCase = OuterData & { kind: 'case' }; + +type OuterJob = OuterData & { kind: 'job' }; + +const buildCase = (outerCase: OuterCase): SurfaceAgnosticCase => { + const innerDataHashDigest = createHash('ripemd160') + .update(outerCase.innerData) + .digest(); + + if (Buffer.compare(innerDataHashDigest, outerCase.hashDigest) !== 0) { + throw new Error( + "The inner case's hash digest does not match the calculated hash digest", + ); + } + + const caseHashDigest = outerCase.innerData + .subarray(0, 20) + .toString('base64url'); + const codemodHashDigest = outerCase.innerData + .subarray(20, 40) + .toString('base64url'); + + const createdAt = outerCase.innerData.subarray(40, 48).readBigInt64BE(); + + const pathByteLength = outerCase.innerData.subarray(48, 50).readUint16BE(); + + const recordByteLengthStart = 50 + pathByteLength; + + const absoluteTargetPath = outerCase.innerData + .subarray(50, recordByteLengthStart) + .toString(); + + const recordByteLength = outerCase.innerData + .subarray(recordByteLengthStart, recordByteLengthStart + 2) + .readUint16BE(); + + const record = outerCase.innerData + .subarray( + recordByteLengthStart + 2, + recordByteLengthStart + 2 + recordByteLength, + ) + .toString(); + + const argumentRecord = parseArgumentRecordSchema(JSON.parse(record)); + + return { + caseHashDigest, + codemodHashDigest, + createdAt, + absoluteTargetPath, + argumentRecord, + }; +}; + +const buildJob = (outerJob: OuterJob): SurfaceAgnosticJob => { + const innerDataHashDigest = createHash('ripemd160') + .update(outerJob.innerData) + .digest(); + + if (Buffer.compare(innerDataHashDigest, outerJob.hashDigest) !== 0) { + throw new Error( + "The inner job's hash digest does not match the calculated hash digest", + ); + } + + const jobHashDigest = outerJob.innerData + .subarray(0, 20) + .toString('base64url'); + + const kind = parseJobKind(outerJob.innerData.subarray(20).readUInt8()); + const oldUriByteLength = outerJob.innerData.subarray(21, 23).readUint16BE(); + + const newUriByteLengthStart = 23 + oldUriByteLength; + + const oldUri = outerJob.innerData + .subarray(23, newUriByteLengthStart) + .toString(); + + const newUriByteLength = outerJob.innerData + .subarray(newUriByteLengthStart, newUriByteLengthStart + 2) + .readUint16BE(); + + const newUri = outerJob.innerData + .subarray( + newUriByteLengthStart + 2, + newUriByteLengthStart + 2 + newUriByteLength, + ) + .toString(); + + return { + jobHashDigest, + kind, + oldUri, + newUri, + }; +}; + +const enum POSITION { + BEFORE_OUTER_PREAMBLE = 0, + BEFORE_VERSION = 1, + BEFORE_INNER_CASE_BYTE_LENGTH = 2, + BEFORE_INNER_CASE_HASH_DIGEST = 3, + BEFORE_INNER_CASE = 4, + BEFORE_OUTER_JOB_OR_POSTAMBLE = 5, + BEFORE_INNER_JOB_BYTE_LENGTH = 6, + BEFORE_INNER_JOB_HASH_DIGEST = 7, + BEFORE_INNER_JOB = 8, + BEFORE_POSTAMBLE_HASH_DIGEST = 9, +} + +type State = Readonly<{ + position: POSITION; + outerCase: OuterCase | null; + outerJob: OuterJob | null; + hashOfHashDigests: Hash; +}>; + +type StateRecipe = + | Readonly<{ + event: 'error'; + error: Error; + }> + | (Readonly<{ + event: 'case'; + surfaceAgnosticCase: SurfaceAgnosticCase; + }> & + State) + | (Readonly<{ + event: 'job'; + surfaceAgnosticJob: SurfaceAgnosticJob; + }> & + State) + | Readonly<{ + event: 'end'; + }> + | State; + +const read = (readStream: ReadStream, state: State): StateRecipe | null => { + if (state.position === POSITION.BEFORE_OUTER_PREAMBLE) { + const buffer = readStream.read(4); + + if (!Buffer.isBuffer(buffer)) { + return null; + } + + if (Buffer.compare(buffer, Buffer.from('INTC')) !== 0) { + return { + event: 'error', + error: new Error( + 'You tried to read a file that is not Intuita Case', + ), + }; + } + + return { + ...state, + position: POSITION.BEFORE_VERSION, + }; + } + + if (state.position === POSITION.BEFORE_VERSION) { + const buffer = readStream.read(4); + + if (!Buffer.isBuffer(buffer)) { + return null; + } + + if (Buffer.compare(buffer, new Uint8Array([1, 0, 0, 0])) !== 0) { + return { + event: 'error', + error: new Error(), + }; + } + + return { + ...state, + position: POSITION.BEFORE_INNER_CASE_BYTE_LENGTH, + }; + } + + if (state.position === POSITION.BEFORE_INNER_CASE_BYTE_LENGTH) { + const buffer = readStream.read(2); + + if (!Buffer.isBuffer(buffer)) { + return null; + } + + return { + ...state, + outerCase: { + kind: 'case', + byteLength: buffer.readUint16BE(), + hashDigest: Buffer.from([]), + innerData: Buffer.from([]), + }, + position: POSITION.BEFORE_INNER_CASE_HASH_DIGEST, + }; + } + + if ( + state.position === POSITION.BEFORE_INNER_CASE_HASH_DIGEST && + state.outerCase !== null + ) { + const hashDigest = readStream.read(20); + + if (!Buffer.isBuffer(hashDigest)) { + return null; + } + + const hashOfHashDigests = state.hashOfHashDigests.update(hashDigest); + + return { + ...state, + outerCase: { + ...state.outerCase, + hashDigest, + }, + hashOfHashDigests, + position: POSITION.BEFORE_INNER_CASE, + }; + } + + if ( + state.position === POSITION.BEFORE_INNER_CASE && + state.outerCase !== null + ) { + const innerCase = readStream.read(state.outerCase.byteLength); + + if (!Buffer.isBuffer(innerCase)) { + return null; + } + + try { + const surfaceAgnosticCase = buildCase({ + ...state.outerCase, + innerData: innerCase, + }); + + return { + ...state, + outerCase: null, + position: POSITION.BEFORE_OUTER_JOB_OR_POSTAMBLE, + event: 'case', + surfaceAgnosticCase, + }; + } catch (error) { + return { + event: 'error', + error: + error instanceof Error + ? error + : new Error('Unknown case creation error'), + }; + } + } + + if (state.position === POSITION.BEFORE_OUTER_JOB_OR_POSTAMBLE) { + const buffer = readStream.read(4); + + if (!Buffer.isBuffer(buffer)) { + return null; + } + + if (Buffer.compare(buffer, Buffer.from('INTJ')) === 0) { + return { + ...state, + position: POSITION.BEFORE_INNER_JOB_BYTE_LENGTH, + }; + } + + if (Buffer.compare(buffer, Buffer.from('INTE')) === 0) { + return { + ...state, + position: POSITION.BEFORE_POSTAMBLE_HASH_DIGEST, + }; + } + + return { + event: 'error', + error: new Error( + 'Could not recognize neither INTJ or INTE headers', + ), + }; + } + + if (state.position === POSITION.BEFORE_INNER_JOB_BYTE_LENGTH) { + const buffer = readStream.read(2); + + if (!Buffer.isBuffer(buffer)) { + return null; + } + + return { + ...state, + outerJob: { + kind: 'job', + byteLength: buffer.readUint16BE(), + hashDigest: Buffer.from([]), + innerData: Buffer.from([]), + }, + position: POSITION.BEFORE_INNER_JOB_HASH_DIGEST, + }; + } + + if ( + state.position === POSITION.BEFORE_INNER_JOB_HASH_DIGEST && + state.outerJob !== null + ) { + const hashDigest = readStream.read(20); + + if (!Buffer.isBuffer(hashDigest)) { + return null; + } + + const hashOfHashDigests = state.hashOfHashDigests.update(hashDigest); + + return { + ...state, + outerJob: { + ...state.outerJob, + hashDigest, + }, + hashOfHashDigests, + position: POSITION.BEFORE_INNER_JOB, + }; + } + + if ( + state.position === POSITION.BEFORE_INNER_JOB && + state.outerJob !== null + ) { + const innerJob = readStream.read(state.outerJob.byteLength); + + if (!Buffer.isBuffer(innerJob)) { + return null; + } + + try { + const surfaceAgnosticJob = buildJob({ + ...state.outerJob, + innerData: innerJob, + }); + + return { + ...state, + position: POSITION.BEFORE_OUTER_JOB_OR_POSTAMBLE, + outerJob: null, + event: 'job', + surfaceAgnosticJob, + }; + } catch (error) { + return { + event: 'error', + error: + error instanceof Error + ? error + : new Error('Unknown job creation error'), + }; + } + } + + if (state.position === POSITION.BEFORE_POSTAMBLE_HASH_DIGEST) { + const hashDigest = readStream.read(20); + + if (!Buffer.isBuffer(hashDigest)) { + return null; + } + + if ( + Buffer.compare(hashDigest, state.hashOfHashDigests.digest()) !== 0 + ) { + return { + event: 'error', + error: new Error( + 'The read hash of hash digests does not match the calculated one', + ), + }; + } + + return { + event: 'end', + }; + } + + return null; +}; + +export const readSurfaceAgnosticCase = (readStream: ReadStream) => { + const eventEmitter = new EventEmitter(); + + let reading = true; + + let state: State = { + position: POSITION.BEFORE_OUTER_PREAMBLE, + outerCase: null, + outerJob: null, + hashOfHashDigests: createHash('ripemd160'), + }; + + const readableCallback = () => { + try { + while ( + readStream.readableLength !== 0 && + readStream.readable && + reading + ) { + const stateRecipe = read(readStream, state); + + if (stateRecipe === null) { + break; + } + + if ('event' in stateRecipe && stateRecipe.event === 'error') { + readStream.close(); + eventEmitter.emit('error', stateRecipe.error); + return; + } + + if ('event' in stateRecipe && stateRecipe.event === 'end') { + readStream.close(); + eventEmitter.emit('end'); + return; + } + + state = { + position: stateRecipe.position, + outerCase: stateRecipe.outerCase, + outerJob: stateRecipe.outerJob, + hashOfHashDigests: stateRecipe.hashOfHashDigests, + }; + + if ('event' in stateRecipe && stateRecipe.event === 'case') { + eventEmitter.emit('case', stateRecipe.surfaceAgnosticCase); + } + + if ('event' in stateRecipe && stateRecipe.event === 'job') { + eventEmitter.emit('job', stateRecipe.surfaceAgnosticJob); + } + } + + readStream.once('readable', readableCallback); + } catch (error) { + eventEmitter.emit('error', error); + } + }; + + readStream.once('readable', readableCallback); + + eventEmitter.once('close', () => { + reading = false; + + eventEmitter.emit('end'); + }); + + return eventEmitter; +}; diff --git a/src/data/schemata/argumentRecordSchema.ts b/src/data/schemata/argumentRecordSchema.ts new file mode 100644 index 000000000..01ba14612 --- /dev/null +++ b/src/data/schemata/argumentRecordSchema.ts @@ -0,0 +1,10 @@ +import * as S from '@effect/schema/Schema'; + +export const argumentRecordSchema = S.record( + S.string, + S.union(S.string, S.number, S.boolean), +); + +export const parseArgumentRecordSchema = S.parseSync(argumentRecordSchema); + +export type ArgumentRecord = S.To; diff --git a/src/data/schemata/surfaceAgnosticCaseSchema.ts b/src/data/schemata/surfaceAgnosticCaseSchema.ts new file mode 100644 index 000000000..d6af6b423 --- /dev/null +++ b/src/data/schemata/surfaceAgnosticCaseSchema.ts @@ -0,0 +1,14 @@ +import * as S from '@effect/schema/Schema'; +import { argumentRecordSchema } from './argumentRecordSchema'; + +const surfaceAgnosticCaseSchema = S.struct({ + caseHashDigest: S.string, + codemodHashDigest: S.string, + createdAt: S.bigint, + absoluteTargetPath: S.string, + argumentRecord: argumentRecordSchema, +}); + +export const parseSurfaceAgnosticCase = S.parseSync(surfaceAgnosticCaseSchema); + +export type SurfaceAgnosticCase = S.To; diff --git a/src/data/schemata/surfaceAgnosticJobSchema.ts b/src/data/schemata/surfaceAgnosticJobSchema.ts new file mode 100644 index 000000000..421809967 --- /dev/null +++ b/src/data/schemata/surfaceAgnosticJobSchema.ts @@ -0,0 +1,32 @@ +import * as S from '@effect/schema/Schema'; + +export enum JOB_KIND { + REWRITE_FILE = 1, + CREATE_FILE = 2, + DELETE_FILE = 3, + MOVE_FILE = 4, + MOVE_AND_REWRITE_FILE = 5, + COPY_FILE = 6, +} + +const jobKindSchema = S.union( + S.literal(JOB_KIND.REWRITE_FILE), + S.literal(JOB_KIND.CREATE_FILE), + S.literal(JOB_KIND.DELETE_FILE), + S.literal(JOB_KIND.MOVE_FILE), + S.literal(JOB_KIND.MOVE_AND_REWRITE_FILE), + S.literal(JOB_KIND.COPY_FILE), +); + +export const parseJobKind = S.parseSync(jobKindSchema); + +const surfaceAgnosticJobSchema = S.struct({ + jobHashDigest: S.string, + kind: jobKindSchema, + oldUri: S.string, + newUri: S.string, +}); + +export const parseSurfaceAgnosticJob = S.parseSync(surfaceAgnosticJobSchema); + +export type SurfaceAgnosticJob = S.To; diff --git a/src/extension.ts b/src/extension.ts index b0c23b263..05d2d56a9 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -45,6 +45,7 @@ import { } from './data/codemodConfigSchema'; import { parsePrivateCodemodsEnvelope } from './data/privateCodemodsEnvelopeSchema'; import { GlobalStateTokenStorage, UserService } from './components/userService'; +import { HomeDirectoryService } from './data/readHomeDirectoryCases'; export const enum SEARCH_PARAMS_KEYS { ENGINE = 'engine', @@ -1259,4 +1260,10 @@ export async function activate(context: vscode.ExtensionContext) { messageBus.publish({ kind: MessageKind.bootstrapEngine, }); + + new HomeDirectoryService(messageBus, store, rootUri); + + messageBus.publish({ + kind: MessageKind.loadHomeDirectoryData, + }); }