Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support huge amount of datacubes #1004

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 34 additions & 16 deletions src/client/utils/ajax/ajax.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
import axios from "axios";
import { Dataset, DatasetJS, Environment, Executor, Expression } from "plywood";
import { ClientAppSettings } from "../../../common/models/app-settings/app-settings";
import { SerializedDataCube } from "../../../common/models/data-cube/data-cube";
import { isEnabled, Oauth } from "../../../common/models/oauth/oauth";
import { ClientSources, SerializedSources } from "../../../common/models/sources/sources";
import { ClientSources } from "../../../common/models/sources/sources";
import { deserialize } from "../../deserializers/sources";
import { getToken, mapOauthError } from "../../oauth/oauth";

Expand Down Expand Up @@ -54,12 +55,12 @@ export class Ajax {

const headers = Ajax.headers(oauth);
return axios({ method, url, data, timeout, validateStatus, headers })
.then(res => {
return res.data;
})
.catch(error => {
throw mapOauthError(oauth, error);
});
.then(res => {
return res.data;
})
.catch(error => {
throw mapOauthError(oauth, error);
});
}

static queryUrlExecutorFactory(dataCubeName: string, { oauth, clientTimeout: timeout }: ClientAppSettings): Executor {
Expand All @@ -69,17 +70,34 @@ export class Ajax {
const timezone = env ? env.timezone : null;
const data = { dataCube: dataCubeName, expression: ex.toJS(), timezone };
return Ajax.query<{ result: DatasetJS }>({ method, url, timeout, data }, oauth)
.then(res => Dataset.fromJS(res.result));
.then(res => Dataset.fromJS(res.result));
};
}

static sources(appSettings: ClientAppSettings): Promise<ClientSources> {
const headers = Ajax.headers(appSettings.oauth);
return axios.get<SerializedSources>("sources", { headers })
.then(resp => resp.data)
.catch(error => {
throw mapOauthError(appSettings.oauth, error);
})
.then(sourcesJS => deserialize(sourcesJS, appSettings));
static async sources(appSettings: ClientAppSettings): Promise<ClientSources> {
try {
const headers = Ajax.headers(appSettings.oauth);
const [clusters, dataCubesResult] = await Promise.all([this.fetchClusters(headers), this.fetchDataCubes(headers)]);
return deserialize({ clusters, dataCubes: dataCubesResult }, appSettings);
} catch (e) {
throw mapOauthError(appSettings.oauth, e);
}
}

private static async fetchDataCubes(headers: {}) {
let dataCubesResult: SerializedDataCube[] = [];
let isDone = false;
let page = 0;
while (!isDone) {
const { dataCubes, isDone: isDoneResult } = (await axios.get(`sources/dataCubes?page=${page}`, { headers })).data;
dataCubesResult = [...dataCubesResult, ...dataCubes];
isDone = isDoneResult;
page += 1;
}
return dataCubesResult;
}

private static async fetchClusters(headers: {}) {
return (await axios.get("sources/clusters", { headers })).data;
}
}
19 changes: 4 additions & 15 deletions src/common/models/sources/sources.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import {
Cluster,
ClusterJS,
fromConfig as clusterFromConfig,
serialize as serializeCluster,
SerializedCluster
} from "../cluster/cluster";
import { findCluster } from "../cluster/find-cluster";
Expand Down Expand Up @@ -96,20 +95,10 @@ export function fromConfig(config: SourcesJS, logger: Logger): Sources {
};
}

export function serialize({
clusters: serverClusters,
dataCubes: serverDataCubes
}: Sources): SerializedSources {
const clusters = serverClusters.map(serializeCluster);

const dataCubes = serverDataCubes
.filter(dc => isQueryable(dc))
.map(serializeDataCube);

return {
clusters,
dataCubes
};
export function serializeDataCubes(dataCubes: DataCube[]) {
return dataCubes
.filter(isQueryable)
.map(serializeDataCube);
}

export function getDataCubesForCluster(sources: Sources, clusterName: string): DataCube[] {
Expand Down
63 changes: 42 additions & 21 deletions src/server/routes/sources/sources.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,33 +16,54 @@

import { Request, Response, Router } from "express";
import { errorToMessage } from "../../../common/logger/logger";
import { serialize } from "../../../common/models/sources/sources";
import { serialize as serializeCluster } from "../../../common/models/cluster/cluster";
import { serializeDataCubes } from "../../../common/models/sources/sources";
import { checkAccess } from "../../utils/datacube-guard/datacube-guard";
import { SettingsManager } from "../../utils/settings-manager/settings-manager";

export function sourcesRouter(settings: Pick<SettingsManager, "getSources" | "logger">) {

const logger = settings.logger.setLoggerId("Sources");
const logger = settings.logger.setLoggerId("Sources");

const router = Router();
const router = Router();
const MAX_DATA_CUBES_IN_REQUEST = 1000;
router.get("/clusters", async (req: Request, res: Response) => {
try {
const { clusters } = await settings.getSources();
res.json(clusters.map(serializeCluster));
} catch (error) {
logger.error(errorToMessage(error));

router.get("/", async (req: Request, res: Response) => {
res.status(500).send({
error: "Can't fetch settings",
message: error.message
});
}
});
router.get("/dataCubes", async (req: Request, res: Response) => {
try {
const { dataCubes } = await settings.getSources();
const relevantDataCubes = serializeDataCubes(dataCubes.filter(dataCube => checkAccess(dataCube, req.headers)));
if (relevantDataCubes.length < MAX_DATA_CUBES_IN_REQUEST) {
res.json({ dataCubes: relevantDataCubes, isDone: true });
} else {
const currentPageNumber = (req.query["page"] && parseInt(req.query["page"] as string, 10)) || 0;
const dataSourcesStart = currentPageNumber * MAX_DATA_CUBES_IN_REQUEST;
const dataSourcesEnd = dataSourcesStart + MAX_DATA_CUBES_IN_REQUEST;
const isDone = dataSourcesEnd >= dataCubes.length;
res.json({
dataCubes: relevantDataCubes.slice(dataSourcesStart, dataSourcesEnd),
isDone
});
}
} catch (error) {
logger.error(errorToMessage(error));
res.status(500).send({
error: "Can't fetch settings",
message: error.message
});
}
});

try {
const { clusters, dataCubes } = await settings.getSources();
res.json(serialize({
clusters,
dataCubes: dataCubes.filter( dataCube => checkAccess(dataCube, req.headers) )
}));
} catch (error) {
logger.error(errorToMessage(error));

res.status(500).send({
error: "Can't fetch settings",
message: error.message
});
}
});

return router;
return router;
}