Skip to content

Commit

Permalink
Merge branch 'devel' into CB-5713-accessing-an-existing-database-conn…
Browse files Browse the repository at this point in the history
…ection-via-a-link-does-not-work
  • Loading branch information
dariamarutkina authored Oct 14, 2024
2 parents a0321ec + 439c806 commit f666258
Show file tree
Hide file tree
Showing 18 changed files with 263 additions and 64 deletions.
2 changes: 1 addition & 1 deletion server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
<id>full-build</id>
<activation><property><name>!plain-api-server</name></property></activation>
<modules>
<module>test</module>
<module>drivers</module>
<!-- Products -->
<module>product</module>
<module>test</module>
</modules>
</profile>
</profiles>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export function useOffsetPagination<TResource extends CachedMapResource<any, any
const resource = useService(ctor);
const pageInfo = resource.offsetPagination.getPageInfo(createPageKey(0, 0, targetKey));
const offset = Math.max(
(pageInfo ? getNextPageOffset(pageInfo) : CACHED_RESOURCE_DEFAULT_PAGE_OFFSET) - pageSize,
(pageInfo ? getNextPageOffset(pageInfo) : CACHED_RESOURCE_DEFAULT_PAGE_OFFSET) - (pageInfo?.end ?? pageSize),
CACHED_RESOURCE_DEFAULT_PAGE_OFFSET,
);

Expand All @@ -86,7 +86,7 @@ export function useOffsetPagination<TResource extends CachedMapResource<any, any
},
loadMore() {
if (this.hasNextPage) {
this._key = createPageKey(this._key.options.offset + this._key.options.limit, this._key.options.limit, this._target);
this._key = createPageKey(this.offset + this._key.options.limit, this._key.options.limit, this._target);
}
},
refresh() {
Expand All @@ -102,7 +102,7 @@ export function useOffsetPagination<TResource extends CachedMapResource<any, any
loadMore: action.bound,
refresh: action.bound,
},
{ resource },
{ resource, offset },
);

if (!resource.isIntersect(targetKey, pagination._target)) {
Expand Down
4 changes: 2 additions & 2 deletions webapp/packages/core-cli/configs/jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ const path = require('path');
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
testEnvironment: require.resolve('../tests/test.environment.js'),
rootDir: path.resolve('./'),
rootDir: path.resolve('.'),
moduleFileExtensions: ['js', 'jsx', 'json'],
testMatch: ['<rootDir>/packages/*/dist/?(*.)+(spec|test).js?(x)', '<rootDir>/dist/?(*.)+(spec|test).js?(x)'],
testMatch: ['<rootDir>/packages/*/dist/**/?(*.)+(spec|test).js?(x)', '<rootDir>/dist/**/?(*.)+(spec|test).js?(x)'],
transformIgnorePatterns: [
'\\.pnp\\.[^\\/]+$',
'node_modules/(?!@ngrx|(?!deck.gl)|ng-dynamic)',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ import { TestService } from './TestService.js';

test('App Initialization', async () => {
const app = new App([manifest]);
const serviceProvider = app.getServiceProvider();

await (app as any).registerServices();
const serviceProvider = app.getServiceProvider();

const service = serviceProvider.getService(TestService);
const bootstrap = serviceProvider.getService(TestBootstrap);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,8 @@ export class DBObjectResource extends CachedMapResource<string, DBObject> {

this.offsetPagination.setPage(
isPageListKey
? CachedResourceOffsetPageListKey(offset, limit).setParent(parentKey || CachedResourceOffsetPageTargetKey(nodeId))
: CachedResourceOffsetPageKey(offset, limit).setParent(parentKey || CachedResourceOffsetPageTargetKey(nodeId)),
? CachedResourceOffsetPageListKey(offset, keys.length).setParent(parentKey || CachedResourceOffsetPageTargetKey(nodeId))
: CachedResourceOffsetPageKey(offset, keys.length).setParent(parentKey || CachedResourceOffsetPageTargetKey(nodeId)),
keys,
keys.length === limit,
);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
/*
* CloudBeaver - Cloud Database Manager
* Copyright (C) 2020-2024 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0.
* you may not use this file except in compliance with the License.
*/
import { describe, expect, test } from '@jest/globals';

import { expandOffsetPageRange, getNextPageOffset, type ICachedResourceOffsetPage } from './CachedResourceOffsetPageKeys.js';
import type { IResourceOffsetPage } from './OffsetPagination/IResourceOffsetPage.js';
import { ResourceOffsetPage } from './OffsetPagination/ResourceOffsetPage.js';

describe('CachedResourceOffsetPageKeys', () => {
describe('expandOffsetPageRange', () => {
test('should add first page', () => {
const randomPage = getRandomPage(0, 100, false);
const pages: IResourceOffsetPage[] = [];
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);

expect(pages).toStrictEqual([randomPage]);
});

test('should add sequential pages', () => {
const pages: IResourceOffsetPage[] = [];
const initialPages: IResourceOffsetPage[] = [];

for (let i = 0; i < 10; i++) {
const randomPage = getRandomPage(i * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);
}

expect(pages).toStrictEqual(initialPages);
});

test('should add sequential pages with gaps', () => {
const pages: IResourceOffsetPage[] = [];
const initialPages: IResourceOffsetPage[] = [];

for (let i = 0; i < 5; i++) {
const randomPage = getRandomPage(i * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);
}

const randomPage = getRandomPage(6 * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);

expect(pages).toStrictEqual(initialPages);
});

test('should add page in a gap', () => {
const pages: IResourceOffsetPage[] = [];
const initialPages: IResourceOffsetPage[] = [];

for (let i = 0; i < 5; i++) {
const randomPage = getRandomPage(i * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);
}

const randomPage = getRandomPage(6 * 100, 100, false);
const gapIndex = initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);

const gapPage = getRandomPage(5 * 100, 100, false);
initialPages.splice(gapIndex - 1, 0, gapPage);
expandOffsetPageRange(pages, { offset: gapPage.from, limit: gapPage.to - gapPage.from }, gapPage.items, false, true);

expect(pages).toStrictEqual(initialPages);
});

test('should shrink pages', () => {
const pages: IResourceOffsetPage[] = [];
const initialPages: IResourceOffsetPage[] = [];

for (let i = 0; i < 10; i++) {
const randomPage = getRandomPage(i * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);
}

const randomPage = getRandomPage(50, 100, false);
initialPages[0]?.setSize(0, 50);
initialPages[1]?.setSize(150, 200);
initialPages.splice(1, 0, randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);

expect(pages).toStrictEqual(initialPages);
});

test('should remove pages after end', () => {
const pages: IResourceOffsetPage[] = [];
const initialPages: IResourceOffsetPage[] = [];

for (let i = 0; i < 10; i++) {
const randomPage = getRandomPage(i * 100, 100, false);
initialPages.push(randomPage);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, true);
}

const randomPage = getRandomPage(300, 100, false);
initialPages.splice(4);
expandOffsetPageRange(pages, { offset: randomPage.from, limit: randomPage.to - randomPage.from }, randomPage.items, false, false);

expect(pages).toStrictEqual(initialPages);
});
});

describe('getNextPageOffset', () => {
test('should return next page offset', () => {
const randomPage = getRandomPage(0, 100, false);
const pageInfo: ICachedResourceOffsetPage = {
pages: [randomPage],
};
expect(getNextPageOffset(pageInfo)).toBe(100);
});
test('should return next page offset with multiple pages', () => {
const pages = [];
for (let i = 0; i < 10; i++) {
pages.push(getRandomPage(i * 100, 100, false));
}
const pageInfo: ICachedResourceOffsetPage = {
pages,
};
expect(getNextPageOffset(pageInfo)).toBe(1000);
});
test('should return next page offset with multiple pages with gaps', () => {
const pages = [];
for (let i = 0; i < 10; i++) {
pages.push(getRandomPage(i * 100, 100, false));
}
pages.push(getRandomPage(11 * 100, 100, false));
const pageInfo: ICachedResourceOffsetPage = {
pages,
};
expect(getNextPageOffset(pageInfo)).toBe(1000);
});
test('should return next page offset with end', () => {
const pages = [];
for (let i = 0; i < 10; i++) {
pages.push(getRandomPage(i * 100, 100, false));
}
pages.push(getRandomPage(10 * 100, 20, false));
const pageInfo: ICachedResourceOffsetPage = {
end: 1020,
pages,
};
expect(getNextPageOffset(pageInfo)).toBe(1020);
});
});
});

function getRandomPage(offset: number, limit: number, outdate: boolean): IResourceOffsetPage {
const page = new ResourceOffsetPage();

page.setSize(offset, offset + limit);
page.setOutdated(outdate);
page.update(
0,
new Array(limit).fill(null).map((_, i) => i),
);

return page;
}
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export function getNextPageOffset(info: ICachedResourceOffsetPage): number {
lastPage = page;
}

return lastPage?.to ?? CACHED_RESOURCE_DEFAULT_PAGE_OFFSET;
return Math.min(info.end ?? Number.MAX_SAFE_INTEGER, lastPage?.to ?? CACHED_RESOURCE_DEFAULT_PAGE_OFFSET);
}

export function isOffsetPageOutdated(pages: IResourceOffsetPage[], info: IOffsetPageInfo): boolean {
Expand Down Expand Up @@ -98,7 +98,7 @@ export function isOffsetPageInRange({ pages, end }: ICachedResourceOffsetPage, i
return true;
}
}
return false;
return end !== undefined && end <= infoTo;
}

export function expandOffsetPageRange(
Expand All @@ -108,41 +108,64 @@ export function expandOffsetPageRange(
outdated: boolean,
hasNextPage: boolean,
): void {
const from = info.offset;
const to = info.offset + info.limit;
const initialFrom = info.offset;
const initialTo = info.offset + info.limit;

let pageInserted = false;
for (const page of pages) {
if (page.to <= from) {
continue;
}
const newPage = new ResourceOffsetPage().setSize(initialFrom, initialTo).update(info.offset, items).setOutdated(outdated);

if (!hasNextPage) {
if (page.from >= to) {
pages.splice(pages.indexOf(page));
break;
const mergedPages: IResourceOffsetPage[] = [];
let i = 0;

// Add all pages before the newPage
while (i < pages.length && pages[i]!.to <= newPage.from) {
mergedPages.push(pages[i]!);
i++;
}

// Adjust overlapping existing pages
while (i < pages.length && pages[i]!.from < newPage.to) {
const current = pages[i]!;
// If existing page starts before newPage
if (current.from < newPage.from) {
// Adjust the existing page to end at newPage.from
current.setSize(current.from, newPage.from);
if (current.to - current.from > 0) {
mergedPages.push(current);
}
}

if (page.from <= from && !pageInserted) {
if (page.from < from) {
page.setSize(page.from, from);
pages.splice(pages.indexOf(page) + 1, 0, new ResourceOffsetPage().setSize(from, to).update(from, items).setOutdated(outdated));
} else {
page.setSize(from, to).update(from, items).setOutdated(outdated);
// If existing page ends after newPage
if (current.to > newPage.to) {
// Adjust the existing page to start at newPage.to
current.setSize(newPage.to, current.to);
// Since we need to remove pages after newPage when hasNextPage is false,
// we only include this adjusted page if hasNextPage is true
if (hasNextPage && current.to - current.from > 0) {
mergedPages.push(current);
}
pageInserted = true;
continue;
}
i++;
}

if (page.isInRange(from, to)) {
pages.splice(pages.indexOf(page), 1);
// Add the newPage
mergedPages.push(newPage);

// Add the remaining pages after newPage if hasNextPage is true
if (hasNextPage) {
while (i < pages.length) {
mergedPages.push(pages[i]!);
i++;
}
} else {
// Since hasNextPage is false, we remove all pages after newPage
// No action needed here as we simply don't add them
}

const lastPage = pages[pages.length - 1];
// Remove zero-length ranges
const filteredPages = mergedPages.filter(range => range.to - range.from > 0);

if (!lastPage || lastPage.to <= from) {
pages.push(new ResourceOffsetPage().setSize(from, to).update(from, items).setOutdated(outdated));
}
// Sort the filtered pages
const sortedPages = filteredPages.sort((a, b) => a.from - b.from);

// Replace pages with the merged and sorted pages
pages.splice(0, pages.length, ...sortedPages);
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export interface IResourceOffsetPage {
get(from: number, to: number): any[];

isOutdated(): boolean;
isHasCommonSegment(range: IResourceOffsetPage): boolean;
isHasCommonSegment(from: number, to: number): boolean;
isInRange(from: number, to: number): boolean;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,14 @@ export class ResourceOffsetPage implements IResourceOffsetPage {
return this.outdated;
}

isHasCommonSegment(from: number, to: number): boolean {
return !(to < this.from || this.to <= from);
isHasCommonSegment(segment: IResourceOffsetPage): boolean;
isHasCommonSegment(from: number, to: number): boolean;
isHasCommonSegment(from: number | IResourceOffsetPage, to?: number): boolean {
if (to === undefined) {
to = (from as IResourceOffsetPage).to;
from = (from as IResourceOffsetPage).from;
}
return !(to < this.from || this.to <= (from as number));
}

isInRange(from: number, to: number): boolean {
Expand Down
Loading

0 comments on commit f666258

Please sign in to comment.