diff --git a/packages/analytics/README.md b/packages/analytics/README.md index c51979b37..6dfd78ee7 100644 --- a/packages/analytics/README.md +++ b/packages/analytics/README.md @@ -66,9 +66,9 @@ transformIgnorePatterns: [ You just need to import and use what you need. All imports should be done from the root of the package like in the following example: ```js -import { FromParameterTypes } from '@farfetch/blackout-analytics'; +import { FromParameterType } from '@farfetch/blackout-analytics'; -console.log(FromParameterTypes.BAG); +console.log(FromParameterType.BAG); ``` ## Contributing diff --git a/packages/client/src/__tests__/__snapshots__/index.test.ts.snap b/packages/client/src/__tests__/__snapshots__/index.test.ts.snap index 7978583c7..28f980617 100644 --- a/packages/client/src/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/client/src/__tests__/__snapshots__/index.test.ts.snap @@ -809,6 +809,18 @@ Object { "Merchant": 3, "Other": 4, }, + "SeoFileType": Object { + "Categories": "Categories", + "CustomContentTypes": "CustomContentTypes", + "Homepage": "Homepage", + "None": "None", + "Other": "Other", + "Pages": "Pages", + "Posts": "Posts", + "Products": "Products", + "Sets": "Sets", + "Sitemap": "Sitemap", + }, "SeoPageType": Object { "0": "None", "1": "Default", @@ -1073,6 +1085,7 @@ Object { "getReturnPickupRescheduleRequest": [Function], "getReturnPickupRescheduleRequests": [Function], "getReturnWorkflow": [Function], + "getSEOFiles": [Function], "getSEOMetadata": [Function], "getSearchContents": [Function], "getSearchDidYouMean": [Function], diff --git a/packages/client/src/contents/__fixtures__/seoFiles.fixtures.ts b/packages/client/src/contents/__fixtures__/seoFiles.fixtures.ts new file mode 100644 index 000000000..27758d0b3 --- /dev/null +++ b/packages/client/src/contents/__fixtures__/seoFiles.fixtures.ts @@ -0,0 +1,19 @@ +import { rest, type RestHandler } from 'msw'; +import type { SEOFiles } from '../types/index.js'; + +const path = '/api/content/v1/seoFiles'; + +const fixtures = { + get: { + success: (response: SEOFiles): RestHandler => + rest.get(path, (_req, res, ctx) => + res(ctx.status(200), ctx.json(response)), + ), + failure: (): RestHandler => + rest.get(path, (_req, res, ctx) => + res(ctx.status(404), ctx.json({ message: 'stub error' })), + ), + }, +}; + +export default fixtures; diff --git a/packages/client/src/contents/__tests__/__snapshots__/getSEOFiles.test.ts.snap b/packages/client/src/contents/__tests__/__snapshots__/getSEOFiles.test.ts.snap new file mode 100644 index 000000000..f2759d782 --- /dev/null +++ b/packages/client/src/contents/__tests__/__snapshots__/getSEOFiles.test.ts.snap @@ -0,0 +1,11 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`SEO Files client getSEOFiles() should handle a client request error 1`] = ` +Object { + "code": "-1", + "message": "stub error", + "name": "AxiosError", + "status": 404, + "transportLayerErrorCode": "ERR_BAD_REQUEST", +} +`; diff --git a/packages/client/src/contents/__tests__/getSEOFiles.test.ts b/packages/client/src/contents/__tests__/getSEOFiles.test.ts new file mode 100644 index 000000000..db004efac --- /dev/null +++ b/packages/client/src/contents/__tests__/getSEOFiles.test.ts @@ -0,0 +1,42 @@ +import { getSEOFiles } from '../index.js'; +import { + seoFilesData, + seoFilesQuery, +} from 'tests/__fixtures__/contents/seoFiles.fixtures.mjs'; +import client from '../../helpers/client/index.js'; +import fixtures from '../__fixtures__/seoFiles.fixtures.js'; +import mswServer from '../../../tests/mswServer.js'; + +describe('SEO Files client', () => { + const expectedConfig = undefined; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getSEOFiles()', () => { + const spy = jest.spyOn(client, 'get'); + + it('should handle a client request successfully', async () => { + mswServer.use(fixtures.get.success(seoFilesData)); + + await expect(getSEOFiles(seoFilesQuery)).resolves.toEqual(seoFilesData); + + expect(spy).toHaveBeenCalledWith( + '/content/v1/seofiles?hostId=1234&name=siteSEOFiles&page=1&pageSize=60', + expectedConfig, + ); + }); + + it('should handle a client request error', async () => { + mswServer.use(fixtures.get.failure()); + + await expect(getSEOFiles(seoFilesQuery)).rejects.toMatchSnapshot(); + + expect(spy).toHaveBeenCalledWith( + '/content/v1/seofiles?hostId=1234&name=siteSEOFiles&page=1&pageSize=60', + expectedConfig, + ); + }); + }); +}); diff --git a/packages/client/src/contents/getSEOFiles.ts b/packages/client/src/contents/getSEOFiles.ts new file mode 100644 index 000000000..fa53ee685 --- /dev/null +++ b/packages/client/src/contents/getSEOFiles.ts @@ -0,0 +1,18 @@ +import { adaptError } from '../helpers/client/formatError.js'; +import client from '../helpers/client/index.js'; +import join from 'proper-url-join'; +import type { Config } from '../types/index.js'; +import type { GetSEOFilesQuery, SEOFiles } from './types/seoFiles.types.js'; + +const getSEOFiles = ( + query: GetSEOFilesQuery, + config?: Config, +): Promise => + client + .get(join('/content/v1/seofiles', { query }), config) + .then(response => response.data) + .catch(error => { + throw adaptError(error); + }); + +export default getSEOFiles; diff --git a/packages/client/src/contents/index.ts b/packages/client/src/contents/index.ts index 1cb6509dd..03f4e70ad 100644 --- a/packages/client/src/contents/index.ts +++ b/packages/client/src/contents/index.ts @@ -7,5 +7,6 @@ export { default as getContentPage } from './getContentPage.js'; export { default as getContentTypes } from './getContentTypes.js'; export { default as getSearchContents } from './getSearchContents.js'; export { default as getSEOMetadata } from './getSEOMetadata.js'; +export { default as getSEOFiles } from './getSEOFiles.js'; export * from './types/index.js'; diff --git a/packages/client/src/contents/types/index.ts b/packages/client/src/contents/types/index.ts index 70037ab78..d58d758db 100644 --- a/packages/client/src/contents/types/index.ts +++ b/packages/client/src/contents/types/index.ts @@ -7,3 +7,4 @@ export * from './contents.types.js'; export * from './contentPage.types.js'; export * from './contentTypes.types.js'; export * from './seoMetadata.types.js'; +export * from './seoFiles.types.js'; diff --git a/packages/client/src/contents/types/seoFiles.types.ts b/packages/client/src/contents/types/seoFiles.types.ts new file mode 100644 index 000000000..f1ff6c338 --- /dev/null +++ b/packages/client/src/contents/types/seoFiles.types.ts @@ -0,0 +1,42 @@ +import type { Config, PagedResponse } from '../../types/index.js'; + +export type GetSEOFiles = ( + query: GetSEOFilesQuery, + config?: Config, +) => Promise; + +export enum SeoFileType { + None = 'None', + Sitemap = 'Sitemap', + Homepage = 'Homepage', + Pages = 'Pages', + Posts = 'Posts', + Products = 'Products', + Sets = 'Sets', + Other = 'Other', + CustomContentTypes = 'CustomContentTypes', + Categories = 'Categories', +} + +export type GetSEOFilesQuery = { + // The name (also named as "slug" on legacy CMS) + name: string; + // The hostId + hostId: number; + // Number of the page to get, starting at 1. The default is 1. + page?: number; + // Size of each page, as a number between 1 and 180. The default is 60. + pageSize?: number; +}; + +export type SEOFiles = PagedResponse; + +export type SEOFile = { + name: string; + path?: string; + uploadDate: string; + hostId: number; + subfolderStructure?: string; + type: SeoFileType; + content?: string; +}; diff --git a/packages/react/src/__tests__/__snapshots__/index.test.ts.snap b/packages/react/src/__tests__/__snapshots__/index.test.ts.snap index df360e8e4..28accb812 100644 --- a/packages/react/src/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/react/src/__tests__/__snapshots__/index.test.ts.snap @@ -335,6 +335,7 @@ Object { "useSearchDidYouMean": [Function], "useSearchIntents": [Function], "useSearchSuggestions": [Function], + "useSeoFiles": [Function], "useSeoMetadata": [Function], "useSubscriptionPackages": [Function], "useTopCategories": [Function], diff --git a/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoFiles.fixtures.ts b/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoFiles.fixtures.ts new file mode 100644 index 000000000..04e67e174 --- /dev/null +++ b/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoFiles.fixtures.ts @@ -0,0 +1,83 @@ +import { type BlackoutError, SeoFileType } from '@farfetch/blackout-client'; +import { generateSEOFilesHash } from '@farfetch/blackout-redux'; +import { mockInitialState } from './useSeoMetadata.fixtures.js'; +import { seoFilesQuery } from 'tests/__fixtures__/contents/seoFiles.fixtures.mjs'; + +const contentSEOFilesHash = generateSEOFilesHash(seoFilesQuery); + +export const mockSEOFilesState = { + contents: { + ...mockInitialState.contents, + seoFiles: { + error: {}, + isLoading: {}, + result: { + [contentSEOFilesHash]: { + number: 1, + totalItems: 1, + totalPages: 1, + entries: [ + { + name: 'string', + path: 'string', + uploadDate: '2023-05-23T17:47:02.770Z', + hostId: 0, + subfolderStructure: 'string', + type: SeoFileType.None, + content: 'string', + }, + ], + }, + }, + }, + }, +}; + +export const mockSEOFilesLoadingState = { + contents: { + ...mockInitialState.contents, + seoFiles: { + isLoading: { [contentSEOFilesHash]: true }, + error: {}, + data: undefined, + isFetched: false, + }, + }, +}; + +export const mockSEOFilesErrorState = { + contents: { + ...mockInitialState.contents, + seoFiles: { + isLoading: {}, + error: { [contentSEOFilesHash]: new Error('Error') as BlackoutError }, + data: undefined, + isFetched: true, + }, + }, +}; + +export const result = { + error: undefined, + isLoading: false, + isFetched: true, + data: { + entries: [ + { + name: 'string', + path: 'string', + uploadDate: '2023-05-23T17:47:02.770Z', + hostId: 0, + subfolderStructure: 'string', + type: SeoFileType.None, + content: 'string', + }, + ], + number: 1, + totalItems: 1, + totalPages: 1, + }, + actions: { + fetch: expect.any(Function), + }, +}; diff --git a/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoMetadata.fixtures.ts b/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoMetadata.fixtures.ts index d9cf04d9e..e8b4d32f9 100644 --- a/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoMetadata.fixtures.ts +++ b/packages/react/src/contents/hooks/__tests__/__fixtures__/useSeoMetadata.fixtures.ts @@ -6,7 +6,12 @@ export const mockInitialState = { metadata: { error: {}, isLoading: {}, - result: null, + result: undefined, + }, + seoFiles: { + error: {}, + isLoading: {}, + result: undefined, }, searchResults: {}, contentTypes: { diff --git a/packages/react/src/contents/hooks/__tests__/useSeoFiles.test.tsx b/packages/react/src/contents/hooks/__tests__/useSeoFiles.test.tsx new file mode 100644 index 000000000..9ab03a008 --- /dev/null +++ b/packages/react/src/contents/hooks/__tests__/useSeoFiles.test.tsx @@ -0,0 +1,138 @@ +import { cleanup, renderHook } from '@testing-library/react'; +import { fetchSEOFiles } from '@farfetch/blackout-redux'; +import { mockInitialState } from './__fixtures__/useSeoMetadata.fixtures.js'; +import { + mockSEOFilesErrorState, + mockSEOFilesLoadingState, + mockSEOFilesState, + result, +} from './__fixtures__/useSeoFiles.fixtures.js'; +import { seoFilesQuery as query } from 'tests/__fixtures__/contents/seoFiles.fixtures.mjs'; +import { useSeoFiles } from '../index.js'; +import { withStore } from '../../../../tests/helpers/index.js'; + +jest.mock('@farfetch/blackout-redux', () => { + const original = jest.requireActual('@farfetch/blackout-redux'); + + return { + ...original, + fetchSEOFiles: jest.fn(() => ({ type: 'foo-bar' })), + }; +}); + +describe('useSeoFiles', () => { + const options = { enableAutoFetch: false }; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + afterEach(() => cleanup()); + + it('should return correctly with initial state', () => { + const { + result: { current }, + } = renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockInitialState), + }); + + const emptyResult = { + error: undefined, + isLoading: false, + isFetched: false, + data: undefined, + actions: { + fetch: expect.any(Function), + }, + }; + + expect(current).toEqual(emptyResult); + }); + + it('should return loading state', () => { + const { + result: { current }, + } = renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockSEOFilesLoadingState), + }); + + const { isLoading, error, isFetched } = current; + + expect(isLoading).toBe(true); + expect(error).toBeUndefined(); + expect(isFetched).toBeFalsy(); + }); + + it('should return data', () => { + const { + result: { current }, + } = renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockSEOFilesState), + }); + + expect(current).toEqual(result); + }); + + it('should return error state', () => { + const { + result: { current }, + } = renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockSEOFilesErrorState), + }); + + const { error, isFetched } = current; + + expect(error).toBeTruthy(); + expect(isFetched).toBeTruthy(); + }); + + describe('options', () => { + it('should call fetch data if `enableAutoFetch` option is no passed', () => { + renderHook(() => useSeoFiles(query), { + wrapper: withStore(mockInitialState), + }); + + expect(fetchSEOFiles).toHaveBeenCalledWith({ + hostId: 1234, + name: 'siteSEOFiles', + page: 1, + pageSize: 60, + }); + }); + + it('should not fetch data if `enableAutoFetch` option is false', () => { + const options = { enableAutoFetch: false }; + + renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockInitialState), + }); + + expect(fetchSEOFiles).not.toHaveBeenCalledWith(); + }); + }); + + describe('actions', () => { + it('should call `fetch` action', () => { + const options = { enableAutoFetch: false }; + + const { + result: { + current: { + actions: { fetch }, + }, + }, + } = renderHook(() => useSeoFiles(query, options), { + wrapper: withStore(mockInitialState), + }); + + fetch(); + + expect(fetchSEOFiles).toHaveBeenCalledWith({ + hostId: 1234, + name: 'siteSEOFiles', + page: 1, + pageSize: 60, + }); + }); + }); +}); diff --git a/packages/react/src/contents/hooks/index.ts b/packages/react/src/contents/hooks/index.ts index 798781729..d4ddf11ca 100644 --- a/packages/react/src/contents/hooks/index.ts +++ b/packages/react/src/contents/hooks/index.ts @@ -6,3 +6,4 @@ export { default as useCommercePages } from './useCommercePages.js'; export { default as useContentPage } from './useContentPage.js'; export { default as useContents } from './useContents.js'; export { default as useSeoMetadata } from './useSeoMetadata.js'; +export { default as useSeoFiles } from './useSeoFiles.js'; diff --git a/packages/react/src/contents/hooks/types/useSeoFiles.types.ts b/packages/react/src/contents/hooks/types/useSeoFiles.types.ts new file mode 100644 index 000000000..a706bd2eb --- /dev/null +++ b/packages/react/src/contents/hooks/types/useSeoFiles.types.ts @@ -0,0 +1,3 @@ +export type UseSeoFilesOptions = { + enableAutoFetch?: boolean; +}; diff --git a/packages/react/src/contents/hooks/useSeoFiles.ts b/packages/react/src/contents/hooks/useSeoFiles.ts new file mode 100644 index 000000000..114421a64 --- /dev/null +++ b/packages/react/src/contents/hooks/useSeoFiles.ts @@ -0,0 +1,69 @@ +import { + areSEOFilesFetched, + areSEOFilesLoading, + fetchSEOFiles, + getSEOFilesError, + getSEOFilesResult, + type StoreState, +} from '@farfetch/blackout-redux'; +import { useCallback, useEffect } from 'react'; +import { useSelector } from 'react-redux'; +import useAction from '../../helpers/useAction.js'; +import type { GetSEOFilesQuery } from '@farfetch/blackout-client'; +import type { UseSeoFilesOptions } from './types/useSeoFiles.types.js'; + +/** + * Hook to return SEO Files data. + * + * @param query - object that receives SEO Files query. + * @param options - object that receives option `enableAutoFetch`. By default is `true`. + * + * @returns - Returns actions and SEO Files state for a given path. + */ +const useSeoFiles = ( + query: GetSEOFilesQuery, + options: UseSeoFilesOptions = {}, +) => { + const { enableAutoFetch = true } = options; + + const error = useSelector((state: StoreState) => + getSEOFilesError(state, query), + ); + + const isLoading = useSelector((state: StoreState) => + areSEOFilesLoading(state, query), + ); + + const isFetched = useSelector((state: StoreState) => + areSEOFilesFetched(state, query), + ); + + const data = useSelector((state: StoreState) => + getSEOFilesResult(state, query), + ); + + const fetchSEOFilesAction = useAction(fetchSEOFiles); + + const fetch = useCallback( + () => fetchSEOFilesAction(query), + [fetchSEOFilesAction, query], + ); + + useEffect(() => { + if (enableAutoFetch && !isLoading && !isFetched) { + fetch(); + } + }, [fetch, enableAutoFetch, isLoading, isFetched, query]); + + return { + error, + isLoading, + isFetched, + data, + actions: { + fetch, + }, + }; +}; + +export default useSeoFiles; diff --git a/packages/redux/src/__tests__/__snapshots__/index.test.ts.snap b/packages/redux/src/__tests__/__snapshots__/index.test.ts.snap index 1872e8afd..7264eefce 100644 --- a/packages/redux/src/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/redux/src/__tests__/__snapshots__/index.test.ts.snap @@ -112,6 +112,8 @@ Object { "areRecentlyViewedProductsFetched": [Function], "areRecentlyViewedProductsLoading": [Function], "areRecommendedProductsLoading": [Function], + "areSEOFilesFetched": [Function], + "areSEOFilesLoading": [Function], "areSearchIntentsFetched": [Function], "areSearchIntentsLoading": [Function], "areSearchSuggestionsFetched": [Function], @@ -413,6 +415,9 @@ Object { "FETCH_CONTENT_TYPES_FAILURE": "@farfetch/blackout-redux/FETCH_CONTENT_TYPES_FAILURE", "FETCH_CONTENT_TYPES_REQUEST": "@farfetch/blackout-redux/FETCH_CONTENT_TYPES_REQUEST", "FETCH_CONTENT_TYPES_SUCCESS": "@farfetch/blackout-redux/FETCH_CONTENT_TYPES_SUCCESS", + "FETCH_SEO_FILES_FAILURE": "@farfetch/blackout-redux/FETCH_SEO_FILES_FAILURE", + "FETCH_SEO_FILES_REQUEST": "@farfetch/blackout-redux/FETCH_SEO_FILES_REQUEST", + "FETCH_SEO_FILES_SUCCESS": "@farfetch/blackout-redux/FETCH_SEO_FILES_SUCCESS", "FETCH_SEO_METADATA_FAILURE": "@farfetch/blackout-redux/FETCH_SEO_METADATA_FAILURE", "FETCH_SEO_METADATA_REQUEST": "@farfetch/blackout-redux/FETCH_SEO_METADATA_REQUEST", "FETCH_SEO_METADATA_SUCCESS": "@farfetch/blackout-redux/FETCH_SEO_METADATA_SUCCESS", @@ -740,6 +745,7 @@ Object { "fetchReturnFactory": [Function], "fetchReturnPickupCapability": [Function], "fetchReturnPickupCapabilityFactory": [Function], + "fetchSEOFiles": [Function], "fetchSEOMetadata": [Function], "fetchSearchDidYouMean": [Function], "fetchSearchDidYouMeanFactory": [Function], @@ -810,6 +816,7 @@ Object { "generateBrandsHash": [Function], "generateContentHash": [Function], "generateProductListingHash": [Function], + "generateSEOFilesHash": [Function], "generateSEOPathname": [Function], "generateSearchDidYouMeanHash": [Function], "generateSearchIntentsHash": [Function], @@ -1069,6 +1076,8 @@ Object { "getReturnPickupCapabilityError": [Function], "getReturns": [Function], "getRootCategory": [Function], + "getSEOFilesError": [Function], + "getSEOFilesResult": [Function], "getSEOMetadataError": [Function], "getSEOMetadataResult": [Function], "getSearchDidYouMeanError": [Function], diff --git a/packages/redux/src/contents/__tests__/reducer.test.ts b/packages/redux/src/contents/__tests__/reducer.test.ts index 3cce3ff58..619341444 100644 --- a/packages/redux/src/contents/__tests__/reducer.test.ts +++ b/packages/redux/src/contents/__tests__/reducer.test.ts @@ -94,7 +94,7 @@ describe('contents redux reducer', () => { expect(state).toEqual({ error: undefined, isLoading: false, - result: null, + result: undefined, }); }); @@ -133,7 +133,7 @@ describe('contents redux reducer', () => { expect(state).toEqual({ error: {}, isLoading: {}, - result: null, + result: undefined, }); }); @@ -171,6 +171,52 @@ describe('contents redux reducer', () => { }); }); + describe('seoFiles reducer', () => { + it('should return the initial state', () => { + const state = reducer(INITIAL_STATE_CONTENT, mockAction).seoFiles; + + expect(state).toEqual(initialState.seoFiles); + expect(state).toEqual({ + error: {}, + isLoading: {}, + result: undefined, + }); + }); + + it('should handle FETCH_SEO_FILES_REQUEST action type', () => { + expect( + reducer(undefined, { + type: actionTypes.FETCH_SEO_FILES_REQUEST, + payload: { foo: 'bar', hash: 'files' }, + }).seoFiles.isLoading, + ).toEqual({ files: true }); + }); + + it('should handle FETCH_SEO_FILES_SUCCESS action type', () => { + expect( + reducer(undefined, { + type: actionTypes.FETCH_SEO_FILES_SUCCESS, + payload: { result: { foo: 'bar' }, hash: 'files' }, + }).seoFiles.isLoading, + ).toEqual({ files: false }); + }); + + it('should handle FETCH_SEO_FILES_FAILURE action type', () => { + expect( + reducer(undefined, { + type: actionTypes.FETCH_SEO_FILES_FAILURE, + payload: { result: { foo: 'bar' }, hash: 'files' }, + }).seoFiles.isLoading, + ).toEqual({ files: false }); + expect( + reducer(undefined, { + type: actionTypes.FETCH_SEO_FILES_FAILURE, + payload: { error: '', hash: 'files' }, + }).seoFiles.error, + ).toEqual({ files: '' }); + }); + }); + describe('getContentResult() selector', () => { it('should return the `searchResults` property from a given state', () => { const state = { diff --git a/packages/redux/src/contents/__tests__/selectors.test.ts b/packages/redux/src/contents/__tests__/selectors.test.ts index e6e95fa9b..573f5278e 100644 --- a/packages/redux/src/contents/__tests__/selectors.test.ts +++ b/packages/redux/src/contents/__tests__/selectors.test.ts @@ -6,7 +6,9 @@ import { contentPublicationId, contentQuery, contentTypesResult, + hash, pathname, + seoFilesResponse, seoQuery, seoResponse, } from 'tests/__fixtures__/contents/index.mjs'; @@ -42,6 +44,15 @@ describe('contents redux selectors', () => { }, result: { ...seoResponse }, }, + seoFiles: { + error: { + [hash]: toBlackoutError(new Error('Error - SEO Files not loaded.')), + }, + isLoading: { + [hash]: false, + }, + result: { ...seoFilesResponse }, + }, }, entities: { ...contentNormalizedPayload.entities, diff --git a/packages/redux/src/contents/__tests__/serverInitialState.test.ts b/packages/redux/src/contents/__tests__/serverInitialState.test.ts index e8567faac..0833010f1 100644 --- a/packages/redux/src/contents/__tests__/serverInitialState.test.ts +++ b/packages/redux/src/contents/__tests__/serverInitialState.test.ts @@ -24,12 +24,17 @@ describe('contents serverInitialState()', () => { contentTypes: { error: undefined, isLoading: false, - result: null, + result: undefined, }, metadata: { error: {}, isLoading: {}, - result: null, + result: undefined, + }, + seoFiles: { + error: {}, + isLoading: {}, + result: undefined, }, }, }); diff --git a/packages/redux/src/contents/__tests__/utils.test.ts b/packages/redux/src/contents/__tests__/utils.test.ts index 845693b99..0277e591e 100644 --- a/packages/redux/src/contents/__tests__/utils.test.ts +++ b/packages/redux/src/contents/__tests__/utils.test.ts @@ -1,6 +1,7 @@ import { applyCommercePagesRankingStrategy, generateContentHash, + generateSEOFilesHash, generateSEOPathname, getBestRankedCommercePageUsingDefaultStrategy, getBestRankedCommercePageUsingMergeStrategy, @@ -12,6 +13,7 @@ import { mergeStrategyResult, mergeStrategyResultOneEntry, mockCommercePages, + seoFilesQuery, } from 'tests/__fixtures__/contents/index.mjs'; import { SeoPageType } from '@farfetch/blackout-client'; @@ -162,3 +164,13 @@ describe('generateSEOPathname', () => { expect(result).toBe(expectedResult); }); }); + +describe('generateSEOFilesHash', () => { + it('should correctly construct the correct hash with a query object', () => { + const expectedResult = 'siteSEOFiles!1234'; + + const result = generateSEOFilesHash(seoFilesQuery); + + expect(result).toBe(expectedResult); + }); +}); diff --git a/packages/redux/src/contents/actionTypes.ts b/packages/redux/src/contents/actionTypes.ts index 5efc68420..f3c4d4080 100644 --- a/packages/redux/src/contents/actionTypes.ts +++ b/packages/redux/src/contents/actionTypes.ts @@ -74,6 +74,22 @@ export const FETCH_SEO_METADATA_REQUEST = export const FETCH_SEO_METADATA_SUCCESS = '@farfetch/blackout-redux/FETCH_SEO_METADATA_SUCCESS'; +/** + * Action type dispatched when the fetch SEO Files request fails. + */ +export const FETCH_SEO_FILES_FAILURE = + '@farfetch/blackout-redux/FETCH_SEO_FILES_FAILURE'; +/** + * Action type dispatched when the fetch SEO Files request starts. + */ +export const FETCH_SEO_FILES_REQUEST = + '@farfetch/blackout-redux/FETCH_SEO_FILES_REQUEST'; +/** + * Action type dispatched when the fetch SEO Files request succeeds. + */ +export const FETCH_SEO_FILES_SUCCESS = + '@farfetch/blackout-redux/FETCH_SEO_FILES_SUCCESS'; + /** * Action type dispatched when reset contents. */ diff --git a/packages/redux/src/contents/actions/__tests__/__snapshots__/fetchSEOFiles.test.ts.snap b/packages/redux/src/contents/actions/__tests__/__snapshots__/fetchSEOFiles.test.ts.snap new file mode 100644 index 000000000..1235c1a78 --- /dev/null +++ b/packages/redux/src/contents/actions/__tests__/__snapshots__/fetchSEOFiles.test.ts.snap @@ -0,0 +1,26 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`feachSEOFiles action create should create the correct actions for when the fetch SEO Files procedure is successful: Get SEO Files payload 1`] = ` +Object { + "payload": Object { + "hash": "siteSEOFiles!1234", + "result": Object { + "entries": Array [ + Object { + "content": "string", + "hostId": 1234, + "name": "siteSEOFiles.txt", + "path": "siteSEOFiles.txt", + "subfolderStructure": "/en-US", + "type": "Pages", + "uploadDate": "2023-05-11T15:56:05.712Z", + }, + ], + "number": 0, + "totalItems": 1, + "totalPages": 1, + }, + }, + "type": "@farfetch/blackout-redux/FETCH_SEO_FILES_SUCCESS", +} +`; diff --git a/packages/redux/src/contents/actions/__tests__/fetchCommercePages.test.ts b/packages/redux/src/contents/actions/__tests__/fetchCommercePages.test.ts index b912166a8..10f877642 100644 --- a/packages/redux/src/contents/actions/__tests__/fetchCommercePages.test.ts +++ b/packages/redux/src/contents/actions/__tests__/fetchCommercePages.test.ts @@ -14,6 +14,7 @@ import { mockStore } from '../../../../tests/index.js'; jest.mock('../../utils', () => ({ generateContentHash: () => 'commerce_pages!woman', generateSEOPathname: jest.fn(), + generateSEOFilesHash: jest.fn(), applyCommercePagesRankingStrategy: jest.fn( () => mockCommercePages.entries[0], ), diff --git a/packages/redux/src/contents/actions/__tests__/fetchContentPage.test.ts b/packages/redux/src/contents/actions/__tests__/fetchContentPage.test.ts index 0e475d484..dd63c17fe 100644 --- a/packages/redux/src/contents/actions/__tests__/fetchContentPage.test.ts +++ b/packages/redux/src/contents/actions/__tests__/fetchContentPage.test.ts @@ -16,6 +16,7 @@ import { mockStore } from '../../../../tests/index.js'; jest.mock('../../utils', () => ({ generateContentHash: () => 'content_pages!woman/gucci', generateSEOPathname: jest.fn(), + generateSEOFilesHash: jest.fn(), })); jest.mock('@farfetch/blackout-client/contents', () => ({ diff --git a/packages/redux/src/contents/actions/__tests__/fetchSEOFiles.test.ts b/packages/redux/src/contents/actions/__tests__/fetchSEOFiles.test.ts new file mode 100644 index 000000000..1fe7d84b5 --- /dev/null +++ b/packages/redux/src/contents/actions/__tests__/fetchSEOFiles.test.ts @@ -0,0 +1,77 @@ +import * as actionTypes from '../../actionTypes.js'; +import { fetchSEOFiles } from '../index.js'; +import { find } from 'lodash-es'; +import { getSEOFiles } from '@farfetch/blackout-client'; +import { + hash, + seoFilesData, + seoFilesQuery, +} from 'tests/__fixtures__/contents/seoFiles.fixtures.mjs'; +import { INITIAL_STATE_CONTENT } from '../../reducer.js'; +import { mockStore } from '../../../../tests/index.js'; + +jest.mock('@farfetch/blackout-client', () => ({ + ...jest.requireActual('@farfetch/blackout-client'), + getSEOFiles: jest.fn(), +})); + +const contentsSEOFilesMockStore = (state = {}) => + mockStore({ contents: INITIAL_STATE_CONTENT }, state); + +const expectedConfig = undefined; +let store: ReturnType; + +describe('feachSEOFiles action create', () => { + beforeEach(() => { + jest.clearAllMocks(); + store = contentsSEOFilesMockStore(); + }); + + it('should create the correct actions for when featch SEO procedute fails', async () => { + const expectedError = new Error('Get SEO Files error'); + + (getSEOFiles as jest.Mock).mockRejectedValueOnce(expectedError); + + await expect( + async () => await fetchSEOFiles(seoFilesQuery)(store.dispatch), + ).rejects.toThrow(expectedError); + + expect(getSEOFiles).toHaveBeenCalledTimes(1); + expect(getSEOFiles).toHaveBeenCalledWith(seoFilesQuery, expectedConfig); + expect(store.getActions()).toEqual([ + { + payload: { hash }, + type: actionTypes.FETCH_SEO_FILES_REQUEST, + }, + { + payload: { error: expectedError, hash }, + type: actionTypes.FETCH_SEO_FILES_FAILURE, + }, + ]); + }); + + it('should create the correct actions for when the fetch SEO Files procedure is successful', async () => { + (getSEOFiles as jest.Mock).mockResolvedValueOnce(seoFilesData); + + await fetchSEOFiles(seoFilesQuery)(store.dispatch).then(clientResult => { + expect(clientResult).toBe(seoFilesData); + }); + + const actionResults = store.getActions(); + + expect(getSEOFiles).toHaveBeenCalledWith(seoFilesQuery, expectedConfig); + expect(actionResults).toEqual([ + { + payload: { hash }, + type: actionTypes.FETCH_SEO_FILES_REQUEST, + }, + { + payload: { result: seoFilesData, hash }, + type: actionTypes.FETCH_SEO_FILES_SUCCESS, + }, + ]); + expect( + find(actionResults, { type: actionTypes.FETCH_SEO_FILES_SUCCESS }), + ).toMatchSnapshot('Get SEO Files payload'); + }); +}); diff --git a/packages/redux/src/contents/actions/factories/fetchSEOFilesFactory.ts b/packages/redux/src/contents/actions/factories/fetchSEOFilesFactory.ts new file mode 100644 index 000000000..bf192d313 --- /dev/null +++ b/packages/redux/src/contents/actions/factories/fetchSEOFilesFactory.ts @@ -0,0 +1,58 @@ +import * as actionTypes from '../../actionTypes.js'; + +import { + type Config, + type GetSEOFiles, + type GetSEOFilesQuery, + type SEOFiles, + toBlackoutError, +} from '@farfetch/blackout-client'; +import { generateSEOFilesHash } from '../../utils.js'; +import type { Dispatch } from 'redux'; +import type { FetchSEOFilesAction } from '../../types/index.js'; + +/** + * Fetch SEO files with a specific query object. + * + * @param getSEOFiles - Get SEO files client. + * + * @returns Thunk factory. + */ +const fetchSEOFilesFactory = + (getSEOFiles: GetSEOFiles) => + (query: GetSEOFilesQuery, config?: Config) => + async (dispatch: Dispatch): Promise => { + let hash = ''; + + try { + hash = generateSEOFilesHash(query); + + dispatch({ + payload: { hash }, + type: actionTypes.FETCH_SEO_FILES_REQUEST, + }); + + const result = await getSEOFiles(query, config); + + dispatch({ + payload: { hash, result }, + type: actionTypes.FETCH_SEO_FILES_SUCCESS, + }); + + return result; + } catch (error) { + const errorAsBlackoutError = toBlackoutError(error); + + dispatch({ + payload: { + error: errorAsBlackoutError, + hash, + }, + type: actionTypes.FETCH_SEO_FILES_FAILURE, + }); + + throw errorAsBlackoutError; + } + }; + +export default fetchSEOFilesFactory; diff --git a/packages/redux/src/contents/actions/factories/index.ts b/packages/redux/src/contents/actions/factories/index.ts index 5c53643c2..ea06f1f31 100644 --- a/packages/redux/src/contents/actions/factories/index.ts +++ b/packages/redux/src/contents/actions/factories/index.ts @@ -6,3 +6,4 @@ export { default as fetchContentsFactory } from './fetchContentsFactory.js'; export { default as fetchContentPageFactory } from './fetchContentPageFactory.js'; export { default as fetchContentTypesFactory } from './fetchContentTypesFactory.js'; export { default as fetchSEOMetadataFactory } from './fetchSEOMetadataFactory.js'; +export { default as fetchSEOFilesFactory } from './fetchSEOFilesFactory.js'; diff --git a/packages/redux/src/contents/actions/fetchSEOFiles.ts b/packages/redux/src/contents/actions/fetchSEOFiles.ts new file mode 100644 index 000000000..b4d58cc4c --- /dev/null +++ b/packages/redux/src/contents/actions/fetchSEOFiles.ts @@ -0,0 +1,11 @@ +import { fetchSEOFilesFactory } from './factories/index.js'; +import { getSEOFiles } from '@farfetch/blackout-client'; + +/** + * Fetch SEO files with a specific query object. + * + * @param getSEOFiles - Get SEO files client. + * + * @returns Thunk factory. + */ +export default fetchSEOFilesFactory(getSEOFiles); diff --git a/packages/redux/src/contents/actions/index.ts b/packages/redux/src/contents/actions/index.ts index 5ba7279b0..a7b62f459 100644 --- a/packages/redux/src/contents/actions/index.ts +++ b/packages/redux/src/contents/actions/index.ts @@ -6,4 +6,5 @@ export { default as fetchContents } from './fetchContents.js'; export { default as fetchContentPage } from './fetchContentPage.js'; export { default as fetchContentTypes } from './fetchContentTypes.js'; export { default as fetchSEOMetadata } from './fetchSEOMetadata.js'; +export { default as fetchSEOFiles } from './fetchSEOFiles.js'; export { default as resetContents } from './resetContents.js'; diff --git a/packages/redux/src/contents/index.ts b/packages/redux/src/contents/index.ts index cec5de0e0..77fa6f9ef 100644 --- a/packages/redux/src/contents/index.ts +++ b/packages/redux/src/contents/index.ts @@ -12,4 +12,5 @@ export { generateContentHash, generateSEOPathname, applyCommercePagesRankingStrategy, + generateSEOFilesHash, } from './utils.js'; diff --git a/packages/redux/src/contents/reducer.ts b/packages/redux/src/contents/reducer.ts index 6c49848fa..47e20dd3a 100644 --- a/packages/redux/src/contents/reducer.ts +++ b/packages/redux/src/contents/reducer.ts @@ -7,12 +7,17 @@ export const INITIAL_STATE_CONTENT: ContentsState = { contentTypes: { error: undefined, isLoading: false, - result: null, + result: undefined, }, metadata: { error: {}, isLoading: {}, - result: null, + result: undefined, + }, + seoFiles: { + error: {}, + isLoading: {}, + result: undefined, }, }; @@ -127,6 +132,52 @@ const metadata = ( } }; +const seoFiles = ( + state = INITIAL_STATE_CONTENT.seoFiles, + action: AnyAction, +): ContentsState['seoFiles'] => { + switch (action.type) { + case actionTypes.FETCH_SEO_FILES_REQUEST: + return { + ...state, + isLoading: { + ...state.isLoading, + [action.payload.hash]: true, + }, + error: { + ...state.error, + [action.payload.hash]: null, + }, + }; + case actionTypes.FETCH_SEO_FILES_SUCCESS: + return { + ...state, + result: { + ...state.result, + [action.payload.hash]: action.payload.result, + }, + isLoading: { + ...state.isLoading, + [action.payload.hash]: false, + }, + }; + case actionTypes.FETCH_SEO_FILES_FAILURE: + return { + ...state, + isLoading: { + ...state.isLoading, + [action.payload.hash]: false, + }, + error: { + ...state.error, + [action.payload.hash]: action.payload.error, + }, + }; + default: + return state; + } +}; + export const getContentResult = ( state: ContentsState, ): ContentsState['searchResults'] => state.searchResults; @@ -136,6 +187,8 @@ export const getContentTypes = ( export const getSEOmetadata = ( state: ContentsState, ): ContentsState['metadata'] => state.metadata; +export const getSEOFiles = (state: ContentsState): ContentsState['seoFiles'] => + state.seoFiles; export const getContentTypesError = ( state: ContentsState, ): ContentsState['contentTypes']['error'] => state.contentTypes.error; @@ -144,6 +197,7 @@ const reducers = combineReducers({ searchResults, contentTypes, metadata, + seoFiles, }); /** diff --git a/packages/redux/src/contents/selectors.ts b/packages/redux/src/contents/selectors.ts index 383aa81d5..07fc12cdb 100644 --- a/packages/redux/src/contents/selectors.ts +++ b/packages/redux/src/contents/selectors.ts @@ -2,14 +2,20 @@ * Contents selectors. */ import { type ContentEntity, getEntityById } from '../entities/index.js'; -import { generateContentHash, generateSEOPathname } from './utils.js'; +import { + generateContentHash, + generateSEOFilesHash, + generateSEOPathname, +} from './utils.js'; import { getContentResult, getContentTypes as getContentTypesFromReducer, + getSEOFiles, getSEOmetadata, } from './reducer.js'; import type { ContentsState, Hash } from './types/index.js'; import type { + GetSEOFilesQuery, GetSEOMetadataQuery, QueryCommercePages, QuerySearchContents, @@ -21,7 +27,7 @@ import type { StoreState } from '../types/index.js'; * * @example * ``` - * import { getContentsByHash } from '@bw/redux/contents'; + * import { getContentsByHash } from '@farfetch/blackout-redux'; * * const mapStateToProps = (state, { hash }) => ({ * result: getContentsByHash(state, hash) @@ -96,7 +102,7 @@ export const isContentLoading = ( * * @example * ``` - * import { getContentByQuery } from '@bw/redux/contents'; + * import { getContentByQuery } from '@farfetch/blackout-redux'; * * const mapStateToProps = (state, { query }) => ({ * contentEntry: getContentByQuery(state, query) @@ -322,3 +328,117 @@ export const getSEOMetadataResult = ( */ export const getContent = (state: StoreState, hash: string) => getEntityById(state, 'contents', hash) as ContentEntity | undefined; + +/** + * Returns the error thrown to the getSEOFiles request. + * + * @example + * ``` + * import { getSEOFilesError } from '@farfetch/blackout-redux'; + * + * const mapStateToProps = (state, { query }) => ({ + * seoFilesError: getSEOFilesError(state, query) + * }); + * + * ``` + * + * @param state - Application state. + * @param query - Query applied to search the SEO Files. + * + * @returns - Content error. + */ +export const getSEOFilesError = ( + state: StoreState, + query: GetSEOFilesQuery, +) => { + const hash = generateSEOFilesHash(query); + const error = getSEOFiles(state.contents as ContentsState).error; + + return error && error[hash]; +}; + +/** + * Returns the loading status to the getSEOFiles request. + * + * @example + * ``` + * import { areSEOFilesLoading } from '@farfetch/blackout-redux'; + * + * const mapStateToProps = (state, { query }) => ({ + * areSEOMetadataLoading: areSEOFilesLoading(state, query) + * }); + * + * ``` + * + * @param state - Application state. + * @param query - Query applied to search the SEO Files. + * + * @returns - If the content is loading or not. + */ +export const areSEOFilesLoading = ( + state: StoreState, + query: GetSEOFilesQuery, +) => { + const hash = generateSEOFilesHash(query); + + return !!getSEOFiles(state.contents as ContentsState).isLoading[hash]; +}; + +/** + * Returns the isFetched status to the getSEOFiles request. + * + * @example + * ``` + * import { areSEOFilesFetched } from '@farfetch/blackout-redux'; + * + * const mapStateToProps = (state, { query }) => ({ + * areSEOMetadataFetched: areSEOFilesFetched(state, query) + * }); + * + * ``` + * + * @param state - Application state. + * @param query - Query applied to search the SEO Files. + * + * @returns - If the SEO Files is loading or not. + */ +export const areSEOFilesFetched = ( + state: StoreState, + query: GetSEOFilesQuery, +) => { + const hash = generateSEOFilesHash(query); + + return ( + (!!getSEOFiles(state.contents as ContentsState).result?.[hash] || + !!getSEOFilesError(state, query)) && + !areSEOFilesLoading(state, query) + ); +}; + +/** + * Returns the SEO Files of that page. + * + * @example + * ``` + * import { getSEOFilesResult } from '@farfetch/blackout-redux'; + * + * const mapStateToProps = (state, { query }) => ({ + * seo: getSEOFilesResult(state, query) + * }); + * + * ``` + * + * @param state - Application state. + * @param query - Query applied to search for the SEO Files. + * + * @returns - All SEO Files for that page. + */ +export const getSEOFilesResult = ( + state: StoreState, + query: GetSEOFilesQuery, +) => { + const hash = generateSEOFilesHash(query); + const result = getSEOFiles(state.contents as ContentsState).result; + + return result && result[hash]; +}; diff --git a/packages/redux/src/contents/types/actions.types.ts b/packages/redux/src/contents/types/actions.types.ts index 2f17e5696..bbf374127 100644 --- a/packages/redux/src/contents/types/actions.types.ts +++ b/packages/redux/src/contents/types/actions.types.ts @@ -4,6 +4,7 @@ import type { BlackoutError, Contents, ContentType, + SEOFiles, SEOMetadata, } from '@farfetch/blackout-client'; import type { ContentEntity } from '../../entities/index.js'; @@ -120,6 +121,35 @@ export interface FetchSEOMetadataFailureAction extends Action { type: typeof actionTypes.FETCH_SEO_METADATA_FAILURE; } +/** + * Fetch SEO Files Action + */ +export type FetchSEOFilesAction = + | FetchSEOFilesRequestAction + | FetchSEOFilesSuccessAction + | FetchSEOFilesFailureAction; + +export interface FetchSEOFilesRequestAction extends Action { + payload: { hash: Hash }; + type: typeof actionTypes.FETCH_SEO_FILES_REQUEST; +} + +export interface FetchSEOFilesSuccessAction extends Action { + payload: { + hash: Hash; + result: SEOFiles; + }; + type: typeof actionTypes.FETCH_SEO_FILES_SUCCESS; +} + +export interface FetchSEOFilesFailureAction extends Action { + payload: { + error: BlackoutError; + hash: Hash; + }; + type: typeof actionTypes.FETCH_SEO_FILES_FAILURE; +} + /** * Fetch Content Types Action */ diff --git a/packages/redux/src/contents/types/reducers.types.ts b/packages/redux/src/contents/types/reducers.types.ts index d67b69649..1ba1945fe 100644 --- a/packages/redux/src/contents/types/reducers.types.ts +++ b/packages/redux/src/contents/types/reducers.types.ts @@ -1,6 +1,7 @@ import type { BlackoutError, ContentType, + SEOFiles, SEOMetadata, } from '@farfetch/blackout-client'; import type { CombinedState } from 'redux'; @@ -29,11 +30,18 @@ export type ContentTypesState = { export type MetadataReducer = { error: Record | undefined; isLoading: Record; - result: Record | null; + result?: Record | null; +}; + +export type SEOFilesReducer = { + error: Record | undefined; + isLoading: Record; + result?: Record | null; }; export type ContentsState = CombinedState<{ searchResults: Record; contentTypes: ContentTypesState; metadata: MetadataReducer; + seoFiles: SEOFilesReducer; }>; diff --git a/packages/redux/src/contents/utils.ts b/packages/redux/src/contents/utils.ts index 2208c0012..02112db47 100644 --- a/packages/redux/src/contents/utils.ts +++ b/packages/redux/src/contents/utils.ts @@ -12,6 +12,7 @@ import type { CommercePagesContent, ComponentType, ContentMetadata, + GetSEOFilesQuery, QueryCommercePages, QuerySearchContents, } from '@farfetch/blackout-client'; @@ -328,3 +329,21 @@ export const generateSEOPathname = ( return `${query.path}`; }; + +/** + * Build a hash with query object received to identify the SEO Files. + * + * @example + * ``` + * const pathname = generateSEOFilesHash({ name: 'siteSEOFiles', hostID: 1234}); + * Result of siteSEOFiles!1234; + * ``` + * + * + * @param query - Object with query parameters applied to search for SEO Files. + * + * @returns - Hash built to identify the SEO Files for a specific name and hostId. + */ +export const generateSEOFilesHash = (query: GetSEOFilesQuery) => { + return `${query.name}!${query.hostId}`; +}; diff --git a/tests/__fixtures__/contents/commercePages.fixtures.mts b/tests/__fixtures__/contents/commercePages.fixtures.mts index af1a37578..74188dff7 100644 --- a/tests/__fixtures__/contents/commercePages.fixtures.mts +++ b/tests/__fixtures__/contents/commercePages.fixtures.mts @@ -216,6 +216,11 @@ export const mockCommercePagesInitialState = { isLoading: {}, result: {}, }, + seoFiles: { + error: {}, + isLoading: {}, + result: {}, + }, }, }; diff --git a/tests/__fixtures__/contents/contentPage.fixtures.mts b/tests/__fixtures__/contents/contentPage.fixtures.mts index 1c7e1a0c9..4cd59eb7f 100644 --- a/tests/__fixtures__/contents/contentPage.fixtures.mts +++ b/tests/__fixtures__/contents/contentPage.fixtures.mts @@ -93,6 +93,11 @@ export const mockContentPageInitialState = { isLoading: {}, result: {}, }, + seoFiles: { + error: {}, + isLoading: {}, + result: {}, + }, }, }; diff --git a/tests/__fixtures__/contents/contents.fixtures.mts b/tests/__fixtures__/contents/contents.fixtures.mts index e2fa9c41c..0518966a4 100644 --- a/tests/__fixtures__/contents/contents.fixtures.mts +++ b/tests/__fixtures__/contents/contents.fixtures.mts @@ -357,6 +357,11 @@ export const mockContentsInitialState = { isLoading: {}, result: {}, }, + seoFiles: { + error: {}, + isLoading: {}, + result: {}, + }, }, }; diff --git a/tests/__fixtures__/contents/index.mts b/tests/__fixtures__/contents/index.mts index 41950083d..e55550f11 100644 --- a/tests/__fixtures__/contents/index.mts +++ b/tests/__fixtures__/contents/index.mts @@ -4,3 +4,4 @@ export * from './components.fixtures.mjs'; export * from './contentTypes.fixtures.mjs'; export * from './contents.fixtures.mjs'; export * from './seo.fixtures.mjs'; +export * from './seoFiles.fixtures.mjs'; diff --git a/tests/__fixtures__/contents/seoFiles.fixtures.mts b/tests/__fixtures__/contents/seoFiles.fixtures.mts new file mode 100644 index 000000000..3baf26f31 --- /dev/null +++ b/tests/__fixtures__/contents/seoFiles.fixtures.mts @@ -0,0 +1,32 @@ +import { generateSEOFilesHash } from '@farfetch/blackout-redux'; +import { SeoFileType } from '@farfetch/blackout-client'; + +export const seoFilesQuery = { + name: 'siteSEOFiles', + hostId: 1234, + page: 1, + pageSize: 60, +}; + +export const hash = generateSEOFilesHash(seoFilesQuery); + +export const seoFileEntry = { + name: 'siteSEOFiles.txt', + path: 'siteSEOFiles.txt', + uploadDate: '2023-05-11T15:56:05.712Z', + hostId: 1234, + subfolderStructure: '/en-US', + type: SeoFileType.Pages, + content: 'string', +}; + +export const seoFilesData = { + number: 0, + totalPages: 1, + totalItems: 1, + entries: [seoFileEntry], +}; + +export const seoFilesResponse = { + [hash]: seoFilesData, +};