Sharing redux reducers and actions between client and server

Ron Derksen
wehkamp-techblog
Published in
4 min readMay 17, 2017

One of the challenges of using server-side rendering (SSR) with React/Redux is sharing as much of the code as possible. At Wehkamp, we use a microservices architecture. Behind our firewalls, these services are reachable on separate domains, but from the internet they are only accessible through our main domain on a /service/ path. This allows the services to quickly communicate internally and provide secure access for external requests (e.g., from the customer’s browser).

Asynchronous requests in Redux are usually done in the action creators. The request is fired and on response (or error), an action is dispatched. This might look like this:

// app/actions/basket-actions.jsconst actions = nskeymirror({
request: null,
response: null,
error: null,
}, 'basket');
const getBasketRequest = createAction(actions.request);
const getBasketResponse = createAction(actions.response);
const getBasketError = createAction(actions.error);
function getBasket() {
const url = '/service/basket/basket';
return async (dispatch) => {
dispatch(getBasketRequest());
try {
const response = await fetch(url, { method: 'GET' });
return dispatch(getBasketResponse({ data });
} catch (err) {
return dispatch(getBasketError({ err });
}
}

Of course, if the url and the fetch options are always the same, then this is perfectly fine. However, if you have many async actions, you might be creating a lot of boilerplate. So how can we extract the fetching so that you can determine at runtime what the correct implementation is?

This is why we created the jsonFetcher:

// utils/create-json-fetcher.jsimport { createFetch, createStack, base, header, init } from 'http-client';
import logger from '../logger/logger';
function createJsonFetcher({
baseUrl = '/service/'
}, opts = null) {
const commonStack = createStack(
init('credentials', 'include'),
base(baseUrl)
);
const headerStack = opts ?
createStack(
header('Cookie', opts.cookies)
) : null;
return createFetch(
opts ?
createStack(commonStack, headerStack) :
commonStack,
(fetch, input, options = { method: 'GET' }) => {
if (!options.method) {
options.method = 'GET';
}
return fetch(input, options)
.then((response) => {
if (response.ok) {
logger.debug(`Response received for ${input} (${options.method})`);
if (response.status === 204) {
return Promise.resolve(true);
}
return response.json();
}
return response.text().then((text) => {
try {
const jsonErr = JSON.parse(text);
} catch (e) {
throw e;
}
});
})
.catch((err) => {
const defaultErrorMessage = `Error calling ${input} (${options.method}): ${err.message}`;
logger.error(`${defaultErrorMessage}\n${err.stack}`);
throw new Error(options.errorMessage || defaultErrorMessage);
});
}
);}
export default createJsonFetcher;

On the server, we need to grab some data from the request to create appropriate headers, so there we have a small wrapper around the createJsonFetcher method:

// utils/create-server-json-fetcher.jsfunction createServerJsonFetcher({ serviceName }, req) {
const options = extractHeaders(req);
return createJsonFetcher({
baseUrl: `https://${serviceName}.${settings.getServiceDomain()}`;
}, options);
}This allows us to create a fetcher for each service, like so:
const basketFetcher = createJsonFetcher({
baseUrl: '/service/basket'
});

We also do some other stuff here, like getting data from the request headers and starting a metric to see how long the fetch takes, but I’ve left that out for simplicity’s sake.

Then we create separate fetchers for the client,

// client/fetchers.jsexport const basketFetcher = createJsonFetcher({
baseUrl: '/service/basket/'
});

and the server:

// server/fetchers.jsexport const basketFetcher = createServerJsonFetcher({
serviceName: 'basket'
}, req);

Then we create a file with functions for all service endpoints:

// service/basket-service.jsconst getBasket = jsonFetcher => () => jsonFetcher(
'basket',
{
headers: {
'Accept-Charset': 'utf8'
},
errorMessage: 'Unable to retrieve your basket at this time.'
}
);

This leads to the following action set up:

// actions/basket-actions.jsimport basketService from '../service/basket-service';const actions = nskeymirror({
request: null,
response: null,
error: null,
}, 'basket');
const getBasketRequest = createAction(actions.request);
const getBasketResponse = createAction(actions.response);
const getBasketError = createAction(actions.error);
function getBasket({ basketFetcher }) {
const fetchBasketData = basketService.getBasket(basketFetcher);
return async (dispatch) => {
dispatch(getBasketRequest());
try {
const response = await fetchBasketData();
return dispatch(getBasketResponse({ data });
} catch (err) {
return dispatch(getBasketError({ err });
}
}

To glue this all to Redux, we use a thunk to start things off:

import getBasket from '../actions/basket-actions';
import { basketFetcher } from '../client-fetchers';
store.dispatch(getBasket({ basketFetcher });

We then use the server variant during the initial data retrieval on the server:

import getBasket from '../actions/basket-actions';
import { basketFetcher } from '../server-fetchers';
function getInitialData() {
store.dispatch(getBasket({ basketFetcher });
}

And then Redux takes over and applies the results to your state.

So why is this useful? It allows you to separate the actual fetching from your actions, so that you can keep the actions independent of the environment (NodeJS or browser). If we didn’t need header data from the request, you could even use the same fetcher. As long as the APIs return the data in a consistent way, this approach allows you to focus on the Redux flow and not worry about the way fetch does things.

--

--