Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 0 additions & 62 deletions static/app/actionCreators/projects.spec.tsx

This file was deleted.

80 changes: 1 addition & 79 deletions static/app/actionCreators/projects.tsx
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
import {queryOptions, skipToken, useQueryClient} from '@tanstack/react-query';
import type {Query} from 'history';
import chunk from 'lodash/chunk';
import debounce from 'lodash/debounce';

import {
addErrorMessage,
addLoadingMessage,
addSuccessMessage,
} from 'sentry/actionCreators/indicator';
import type {Client} from 'sentry/api';
import {t, tct} from 'sentry/locale';
import {ProjectsStatsStore} from 'sentry/stores/projectsStatsStore';
import {tct} from 'sentry/locale';
import {ProjectsStore} from 'sentry/stores/projectsStore';
import type {Team} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
Expand All @@ -22,12 +18,9 @@ type UpdateParams = {
orgId: string;
projectId: string;
data?: Record<string, any>;
query?: Query;
};

export function update(api: Client, params: UpdateParams) {
ProjectsStatsStore.onUpdate(params.projectId, params.data as Partial<Project>);

const endpoint = `/projects/${params.orgId}/${params.projectId}/`;
return api
.requestPromise(endpoint, {
Expand All @@ -40,82 +33,11 @@ export function update(api: Client, params: UpdateParams) {
return data;
},
(err: Error) => {
ProjectsStatsStore.onUpdateError(err, params.projectId);
throw err;
}
);
}

// This is going to queue up a list of project ids we need to fetch stats for
// Will be cleared when debounced function fires
export const _projectStatsToFetch = new Set<string>();

// Max projects to query at a time, otherwise if we fetch too many in the same request
// it can timeout
const MAX_PROJECTS_TO_FETCH = 10;

const _queryForStats = (
api: Client,
projects: string[],
orgId: string,
additionalQuery: Query | undefined
) => {
const idQueryParams = projects.map(project => `id:${project}`).join(' ');
const endpoint = `/organizations/${orgId}/projects/`;

const query: Query = {
statsPeriod: '24h',
query: idQueryParams,
...additionalQuery,
};

return api.requestPromise(endpoint, {
query,
});
};

export const _debouncedLoadStats = debounce(
(api: Client, projectSet: Set<string>, params: UpdateParams) => {
const storedProjects = ProjectsStatsStore.getAll();
const existingProjectStats = Object.values(storedProjects).map(({id}) => id);
const projects = Array.from(projectSet).filter(
project => !existingProjectStats.includes(project)
);

if (!projects.length) {
_projectStatsToFetch.clear();
return;
}

// Split projects into more manageable chunks to query, otherwise we can
// potentially face server timeouts
const queries = chunk(projects, MAX_PROJECTS_TO_FETCH).map(chunkedProjects =>
_queryForStats(api, chunkedProjects, params.orgId, params.query)
);

Promise.all(queries)
.then(results => {
ProjectsStatsStore.onStatsLoadSuccess(
results.reduce((acc, result) => acc.concat(result), [])
);
})
.catch(() => {
addErrorMessage(t('Unable to fetch all project stats'));
});

// Reset projects list
_projectStatsToFetch.clear();
},
50
);

export function loadStatsForProject(api: Client, project: string, params: UpdateParams) {
// Queue up a list of projects that we need stats for
// and call a debounced function to fetch stats for list of projects
_projectStatsToFetch.add(project);
_debouncedLoadStats(api, _projectStatsToFetch, params);
}

export function transferProject(
api: Client,
orgId: string,
Expand Down
116 changes: 0 additions & 116 deletions static/app/stores/projectsStatsStore.tsx

This file was deleted.

6 changes: 4 additions & 2 deletions static/app/types/project.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ export type AvatarProject = {
platform?: PlatformKey;
};

export type ProjectStats = TimeseriesValue[];

/**
* Matches the response from `ProjectSummarySerializer` used by
* `GET /organizations/{org}/projects/`.
Expand Down Expand Up @@ -73,8 +75,8 @@ interface ProjectSummary extends AvatarProject {
hasHealthData: boolean;
previousCrashFreeRate: number | null;
};
stats?: TimeseriesValue[];
transactionStats?: TimeseriesValue[];
stats?: ProjectStats;
transactionStats?: ProjectStats;
}

/**
Expand Down
20 changes: 10 additions & 10 deletions static/app/utils/api/useAggregatedQueryKeys.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import {uniq} from 'sentry/utils/array/uniq';

const BUFFER_WAIT_MS = 20;

interface Props<AggregatableQueryKey, Data> {
interface Props<AggregatableQueryKey, Data, ResponseData = Data> {
/**
* The queryKey reducer
*
Expand All @@ -21,14 +21,14 @@ interface Props<AggregatableQueryKey, Data> {
*/
getQueryOptions: (
ids: readonly AggregatableQueryKey[]
) => UseQueryOptions<ApiResponse<Data>, Error, Data, ApiQueryKey>;
) => UseQueryOptions<ApiResponse<ResponseData>, Error, ResponseData, ApiQueryKey>;

/**
* Data reducer, to integrate new requests with the previous state
*/
responseReducer: (
prevState: undefined | Data,
result: ApiResponse<Data>,
result: ApiResponse<ResponseData>,
aggregates: readonly AggregatableQueryKey[]
) => undefined | Data;

Expand Down Expand Up @@ -82,13 +82,13 @@ function isQueryKeyInList(queryList: unknown[]) {
* - You will implement `responseReducer(prev: Data, result: ApiResponse<Data>)` which
* combines `defaultData` with the data that was fetched with the queryKey.
*/
export function useAggregatedQueryKeys<AggregatableQueryKey, Data>({
export function useAggregatedQueryKeys<AggregatableQueryKey, Data, ResponseData = Data>({
cacheKey,
getQueryOptions,
onError,
responseReducer,
bufferLimit = 50,
}: Props<AggregatableQueryKey, Data>) {
}: Props<AggregatableQueryKey, Data, ResponseData>) {
const queryClient = useQueryClient();
const cache = queryClient.getQueryCache();

Expand All @@ -105,14 +105,14 @@ export function useAggregatedQueryKeys<AggregatableQueryKey, Data>({
const prevQueryKeys = useRef<AggregatableQueryKey[]>([]);

const readCache = useCallback(
() =>
(aggregates = prevQueryKeys.current) =>
queryClient
.getQueriesData<ApiResponse<Data>>({
.getQueriesData<ApiResponse<ResponseData>>({
predicate: ({queryKey}) => isApiQueryKeyForUrl(queryKey),
})
.flatMap(([, val]) => (defined(val) ? [val] : []))
.reduce<Data | undefined>(
(prevValue, val) => responseReducer(prevValue, val, prevQueryKeys.current),
(prevValue, val) => responseReducer(prevValue, val, aggregates),
undefined
Comment thread
scttcper marked this conversation as resolved.
),
[isApiQueryKeyForUrl, queryClient, responseReducer]
Expand Down Expand Up @@ -208,7 +208,7 @@ export function useAggregatedQueryKeys<AggregatableQueryKey, Data>({
.forEach(queryKey => queryClient.setQueryData(queryKey, true));

if (newQueryKeys.length) {
setData(readCache());
setData(readCache(queryKeys));
// Grab anything in the queue, including the newQueryKeys
const existingQueuedQueries = cache.findAll({
queryKey: ['aggregate', cacheKey, url, 'queued'],
Expand Down Expand Up @@ -242,5 +242,5 @@ export function useAggregatedQueryKeys<AggregatableQueryKey, Data>({
});
}, [cache, isApiQueryKeyForUrl, readCache]);

return useMemo(() => ({buffer, data}), [buffer, data]);
return useMemo(() => ({buffer, data, read: readCache}), [buffer, data, readCache]);
}
Loading
Loading