Skip to content

Commit

Permalink
Merge pull request responsible-ai-collaborative#2441 from clari182/fi…
Browse files Browse the repository at this point in the history
…x/delete-duplicated-subscriptions

Remove subscriptions duplicate - migration
  • Loading branch information
kepae authored Dec 18, 2023
2 parents 3ff641f + 1fcf7c1 commit fbe541c
Show file tree
Hide file tree
Showing 2 changed files with 262 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
const { ObjectID } = require('bson');

const {
up,
} = require('../../../../migrations/2023.11.29T14.56.25.delete-duplicated-subscriptions');

describe('Migration Script - Remove Duplicated Subscriptions', () => {
it('Should remove duplicated subscriptions correctly', () => {
// Mocked data for all three cases
const testSubscriptions = {
incident: [
{
_id: {
type: 'incident',
userId: '642188372947d07020c1319d',
incident_id: 600,
},
uniqueIds: [
new ObjectID('5f9f6b9b5f9c4c0001a3b3a3'),
new ObjectID('5f9f6b9b5f9c4c0001a3b3a4'),
],
},
{
_id: {
type: 'submission-promoted',
userId: '642188372947d07020c1319d',
incident_id: 600,
},
uniqueIds: [
new ObjectID('5f9f6b9b5f9c4c0001a3b3a0'),
new ObjectID('5f9f6b9b5f9c4c0001a3b3a1'),
new ObjectID('5f9f6b9b5f9c4c0001a3b3a2'),
],
},
],
entity: [
{
_id: {
type: 'entity',
userId: '642188372947d07020c1319d',
entity_id: 'trans-women',
},
uniqueIds: [
new ObjectID('5f9f6b9b5f9c4c0001a3b3a5'),
new ObjectID('5f9f6b9b5f9c4c0001a3b3a6'),
],
},
],
'new-incidents': [
{
_id: {
type: 'new-incidents',
userId: '642188372947d07020c1319d',
},
uniqueIds: [
new ObjectID('5f9f6b9b5f9c4c0001a3b3a7'),
new ObjectID('5f9f6b9b5f9c4c0001a3b3a8'),
],
},
],
};

// Mocked collection with stubbed methods
const subscriptionsCollection = {
aggregate: cy.stub().callsFake((query) => {
const type = query[0].$match.type.$in[0];

return {
toArray: cy.stub().resolves(testSubscriptions[type]),
};
}),
deleteMany: cy.stub().callsFake((query) => {
console.log('deleteMany called with:', query._id.$in[0].toString()); // Log the query
return Promise.resolve({ deletedCount: 1 });
}),
};

// Mocked context with database client
const context = {
client: {
db: cy.stub().returns({
collection: cy.stub().withArgs('subscriptions').returns(subscriptionsCollection),
}),
},
};

// Execute the migration script
cy.wrap(up({ context })).then(() => {
// Assertions for each case
const args = subscriptionsCollection.deleteMany.getCall(0).args[0];

const argsSubmissionPromoted = subscriptionsCollection.deleteMany.getCall(1).args[0];

const argsEntity = subscriptionsCollection.deleteMany.getCall(2).args[0];

const argsNewIncidents = subscriptionsCollection.deleteMany.getCall(3).args[0];

let modifiedObjectIncident = {
_id: {
$in: args._id.$in.map((id) => id.toString()),
},
};

let modifiedObjectSubmissionPromoted = {
_id: {
$in: argsSubmissionPromoted._id.$in.map((id) => id.toString()),
},
};

let modifiedObjectEntity = {
_id: {
$in: argsEntity._id.$in.map((id) => id.toString()),
},
};

let modifiedObjectNewIncidents = {
_id: {
$in: argsNewIncidents._id.$in.map((id) => id.toString()),
},
};

expect(modifiedObjectIncident).to.be.deep.equal({
_id: {
$in: testSubscriptions['incident'][0].uniqueIds.slice(0, 1).map((id) => id.toString()),
},
});

expect(modifiedObjectSubmissionPromoted).to.be.deep.equal({
_id: {
$in: testSubscriptions['incident'][1].uniqueIds.slice(0, 2).map((id) => id.toString()),
},
});

expect(modifiedObjectEntity).to.be.deep.equal({
_id: {
$in: testSubscriptions['entity'][0].uniqueIds.slice(0, 1).map((id) => id.toString()),
},
});

expect(modifiedObjectNewIncidents).to.be.deep.equal({
_id: {
$in: testSubscriptions['new-incidents'][0].uniqueIds
.slice(0, 1)
.map((id) => id.toString()),
},
});
});
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
const { ObjectID } = require('bson');

const config = require('../config');

/** @type {import('umzug').MigrationFn<any>} */
exports.up = async ({ context: { client } }) => {
const db = client.db(config.realm.production_db.db_custom_data);

const subscriptions = db.collection('subscriptions');

// Remove duplicates of type "incident" and "submission-promoted", based on type, incident_id and userId

const result = await subscriptions
.aggregate([
{
$match: {
type: { $in: ['incident', 'submission-promoted'] },
},
},
{
$group: {
_id: { incident_id: '$incident_id', type: '$type', userId: '$userId' },
uniqueIds: { $addToSet: '$_id' },
count: { $sum: 1 },
},
},
{
$match: {
count: { $gt: 1 },
},
},
])
.toArray();

await removeDuplicates(subscriptions, result);

// Remove duplicates of type "entity", based on type, entityId and userId
const resultEntities = await subscriptions
.aggregate([
{
$match: {
type: { $in: ['entity'] },
},
},
{
$group: {
_id: { entityId: '$entityId', type: '$type', userId: '$userId' },
uniqueIds: { $addToSet: '$_id' },
count: { $sum: 1 },
},
},
{
$match: {
count: { $gt: 1 },
},
},
])
.toArray();

await removeDuplicates(subscriptions, resultEntities);

// Remove duplicates of type "new-incidents" based on type, incident_id and userId

const resultNewIncidents = await subscriptions
.aggregate([
{
$match: {
type: { $in: ['new-incidents'] },
},
},
{
$group: {
_id: { incident_id: '$incident_id', type: '$type', userId: '$userId' },
uniqueIds: { $addToSet: '$_id' },
count: { $sum: 1 },
},
},
{
$match: {
count: { $gt: 1 },
},
},
])
.toArray();

await removeDuplicates(subscriptions, resultNewIncidents);
};

async function removeDuplicates(subscriptions, results) {
for (const doc of results) {
const uniqueIds = doc.uniqueIds.map((id) => new ObjectID(id));

if (doc._id.type && doc._id.userId) {
console.log(
`Removing ${doc.count - 1} duplicated subscriptions of type ${doc._id.type} for user ${
doc._id.userId
} ${
doc._id.incident_id
? ' and for incident ' + doc._id.incident_id
: doc._id.entityId
? ' and for entity ' + doc._id.entityId
: ''
}`
);
uniqueIds.pop(); // Remove one from the array to keep
const deleteResult = await subscriptions.deleteMany({
_id: { $in: uniqueIds },
});

console.log('Delete Result: ', deleteResult);
}
}
}

0 comments on commit fbe541c

Please sign in to comment.