Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add collection management scripts for MOH TOSP #955

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions apps/studio/prisma/scripts/FileLogger.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import fs from "fs"
import path from "path"

export class FileLogger {
private logFilePath: string

constructor(logFilePath: string) {
this.logFilePath = logFilePath

// Ensure the directory for the log file exists
const logDir = path.dirname(logFilePath)
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true })
}
}

private formatLog(level: string, message: string): string {
const timestamp = new Date().toISOString()
return `[${timestamp}] [${level.toUpperCase()}] ${message}\n`
}

private writeLog(logMessage: string): void {
fs.appendFile(this.logFilePath, logMessage, (err) => {
if (err) {
console.error("Failed to write log:", err)
}
})
}

log(level: string, message: string): void {
const logMessage = this.formatLog(level, message)
this.writeLog(logMessage)
}

info(message: string): void {
this.log("info", message)
}

error(message: string): void {
this.log("error", message)
}

debug(message: string): void {
this.log("debug", message)
}
}
82 changes: 82 additions & 0 deletions apps/studio/prisma/scripts/moh-tosp/backupCollectionById.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import fs from "fs/promises" // Use the promise-based version of fs for async/await
import path from "path"

import { db } from "~/server/modules/database"
import { FileLogger } from "../FileLogger"

// Update the logger path if required
const logger = new FileLogger("./backupCollectionById.log")

/**
* Backup a collection and its relevant resources to JSON files.
* @param {string} resourceId - ID of the collection resource to back up.
* @param {string} backupDir - Directory to save the backup files.
*/
export async function backupCollection(
resourceId: string,
backupDir: string,
): Promise<void> {
try {
// Ensure the backup directory exists
await fs.mkdir(backupDir, { recursive: true })

// Fetch the collection resource
const collection = await db
.selectFrom("Resource")
.selectAll()
.where("id", "=", resourceId)
.executeTakeFirst()

if (!collection) {
throw new Error(`Collection with ID ${resourceId} not found.`)
}

// Fetch all child resources
const children = await db
.selectFrom("Resource")
.selectAll()
.where("parentId", "=", resourceId)
.execute()

// Write all the children's published version to the backup directory as JSON files
for (const child of children) {
// fetch the blob
const blob = await db
.selectFrom("Blob")
.select("content")
.innerJoin("Version", "Blob.id", "Version.blobId")
.where("Version.id", "=", child.publishedVersionId)
.executeTakeFirst()

if (!blob) {
throw new Error(
`Published version of child with ID ${child.id} not found.`,
)
}

logger.info(`Writing backup for child with ID ${child.id}`)

// Parse blob content and write to a file
const blobBuffer = blob.content // Assuming blob.content is a buffer
const blobJsonPath = path.join(backupDir, `${child.permalink}.json`)
await fs.writeFile(blobJsonPath, JSON.stringify(blobBuffer, null, 2))
}

logger.info(`Backup completed successfully in directory: ${backupDir}`)
} catch (error) {
if (error instanceof Error) {
logger.error(`Error backing up collection: ${error.message}`)
}
}
}

// Run the backup
// NOTE: TODO: Put in the collection ID to backup
const collectionId = "0"
const backupDirectory = "/Users/XYZ/<your-path>"

await backupCollection(collectionId, backupDirectory).catch((err) => {
if (err instanceof Error) {
logger.error(`Unhandled error: ${err.message}`)
}
})
155 changes: 155 additions & 0 deletions apps/studio/prisma/scripts/moh-tosp/createCollectionFromLocal.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
import fs from "fs/promises"
import path from "path"

import { db, jsonb } from "~/server/modules/database"
import { FileLogger } from "../FileLogger"

// Update the logger path if required
const logger = new FileLogger("./createCollectionFromLocal.log")

export const createCollectionFromLocal = async (
contentDir: string,
siteId: number,
indexPageName: string, // should be placed outside the folder
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not sure if i understand why this should be placed outside the folder?

indexPageTitle: string, // title of the index page
collectionName: string,
nameOfNewCollectionToCreate: string,
) => {
logger.info(`Reading from ${contentDir}`)
const jsonFilePath = path.join(contentDir, indexPageName)
const folderPath = path.join(contentDir, collectionName)

try {
await db.transaction().execute(async (tx) => {
// Step 1: Create a new collection with title "cost-financing-new"
const collection = await tx
.insertInto("Resource")
.values({
title: nameOfNewCollectionToCreate,
permalink: nameOfNewCollectionToCreate,
siteId: siteId,
type: "Collection",
state: "Draft",
createdAt: new Date(),
updatedAt: new Date(),
})
.returning("id")
.executeTakeFirstOrThrow()

const collectionId = collection.id
logger.info(`Collection created with ID: ${collectionId}`)

// Step 2: Insert "cost-financing.json" as an IndexPage with permalink "_index"
const jsonFileContent = await fs.readFile(jsonFilePath, "utf-8")
const indexPageBlob = await tx
.insertInto("Blob")
.values({
content: jsonb(JSON.parse(jsonFileContent)),
})
.returning("id")
.executeTakeFirstOrThrow()

const indexPage = await tx
.insertInto("Resource")
.values({
title: nameOfNewCollectionToCreate,
permalink: "_index",
siteId: siteId,
type: "IndexPage",
parentId: collectionId,
draftBlobId: indexPageBlob.id,
state: "Draft",
createdAt: new Date(),
updatedAt: new Date(),
})
.returning("id")
.executeTakeFirstOrThrow()

const indexPageId = indexPage.id

logger.info(`Index page created with ID: ${indexPageId}`)

// Step 3: Insert files from "cost-financing/" into the DB as Blobs
const folderFiles = await fs.readdir(folderPath)
logger.info(`Reading from folderPath: ${folderPath}`)
logger.info(`Folder files: ${JSON.stringify(folderFiles)}`)
for (const file of folderFiles) {
const filePath = path.join(folderPath, file)
logger.info(`Reading file path: ${filePath}`)

logger.info(`Filename: ${file}`)
//Sometimes might have hidden internal files like .DSStore
if (!file.endsWith(".json")) {
continue
}
const fileContent = await fs.readFile(filePath, "utf-8")

// eslint-disable-next-line @typescript-eslint/no-explicit-any
let parsedFileContent: any
try {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
parsedFileContent = JSON.parse(fileContent)
} catch (error) {
if (error instanceof Error) {
logger.error(`Error parsing JSON file: ${file}`)
}
}

const blob = await tx
.insertInto("Blob")
.values({
content: parsedFileContent as PrismaJson.BlobJsonContent,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning("id")
.executeTakeFirstOrThrow()

const resource = await tx
.insertInto("Resource")
.values({
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
title: parsedFileContent.page.title,
permalink: file.replace(/\.json$/, ""), // remove the .json at the back on permalinks
siteId: siteId, // Replace with appropriate site ID
type: "CollectionPage",
parentId: collectionId,
state: "Draft",
draftBlobId: blob.id,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning("id")
.executeTakeFirstOrThrow()

const resourceId = resource.id

logger.info(
`Blob created for file ${file} with resource ID: ${resourceId}`,
)
}
})

logger.info("All operations completed successfully.")
} catch (error) {
if (error instanceof Error) {
logger.error(`Error during transaction: ${error.message}`)
}
}
}

// NOTE: TODO: Update the content directory and siteId here before usage!
const contentDir = "/Users/XYZ/<your-path>"
const indexPagePath = "cost-financing.json"
const indexPageTitle = "Cost financing"
const collectionName = "cost-financing"
const nameOfNewCollectionToCreate = "cost-financing-new" // will also be the permalink
const siteId = 0
await createCollectionFromLocal(
contentDir,
siteId,
indexPagePath,
indexPageTitle,
collectionName,
nameOfNewCollectionToCreate,
)
107 changes: 107 additions & 0 deletions apps/studio/prisma/scripts/moh-tosp/deleteCollectionById.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import { db } from "~/server/modules/database"
import { FileLogger } from "../FileLogger"

// Update the logger path if required
const logger = new FileLogger("./deleteCollectionById.log")

export const deleteCollectionById = async (
collectionId: string,
siteId: number,
) => {
try {
await db.transaction().execute(async (tx) => {
// Step 1: Find all child resources of the collection
const childResources = await tx
.selectFrom("Resource")
.select(["id", "state", "draftBlobId", "publishedVersionId"])
.where("parentId", "=", collectionId)
.where("siteId", "=", siteId)
.execute()

// Step 2: Handle each child resource
for (const resource of childResources) {
// Delete published version and its blob, if applicable
if (resource.publishedVersionId) {
const publishedVersion = await tx
.selectFrom("Version")
.select(["blobId"])
.where("id", "=", resource.publishedVersionId)
.executeTakeFirst()

const blobIdToDelete = publishedVersion?.blobId

await tx
.deleteFrom("Version")
.where("id", "=", resource.publishedVersionId)
.execute()

if (blobIdToDelete) {
await tx
.deleteFrom("Blob")
.where("id", "=", blobIdToDelete)
.execute()
}
}

// Delete draft blob, if applicable
if (resource.draftBlobId) {
await tx
.deleteFrom("Blob")
.where("id", "=", resource.draftBlobId)
.execute()
}

// Delete the resource itself
await tx.deleteFrom("Resource").where("id", "=", resource.id).execute()

logger.info(`Resource with ID ${resource.id} deleted successfully.`)
}

// Step 3: Delete the collection itself
const collection = await tx
.selectFrom("Resource")
.select(["draftBlobId", "publishedVersionId"])
.where("id", "=", collectionId)
.executeTakeFirst()

if (!collection) {
throw new Error(`Collection with ID ${collectionId} not found.`)
}

// Handle published version and its blob for the collection
if (collection.publishedVersionId) {
const publishedVersion = await tx
.selectFrom("Version")
.select(["blobId"])
.where("id", "=", collection.publishedVersionId)
.executeTakeFirst()

const blobIdToDelete = publishedVersion?.blobId

await tx
.deleteFrom("Version")
.where("id", "=", collection.publishedVersionId)
.execute()

if (blobIdToDelete) {
await tx.deleteFrom("Blob").where("id", "=", blobIdToDelete).execute()
}
}

// Delete the collection resource itself
await tx.deleteFrom("Resource").where("id", "=", collectionId).execute()

logger.info(
`Collection with ID ${collectionId} and all related data deleted successfully.`,
)
})
} catch (error) {
if (error instanceof Error) {
logger.error(`Error deleting collection: ${error.message}`)
}
}
}

const collectionIdToDelete = "0"
const siteId = 0
await deleteCollectionById(collectionIdToDelete, siteId)
Loading
Loading