mirror of
https://github.com/muerwre/vault-frontend.git
synced 2025-04-25 12:56:41 +07:00
flow: refactored sagas
This commit is contained in:
parent
90ac1fe611
commit
31b03f9eae
5 changed files with 228 additions and 214 deletions
|
@ -1,175 +1,188 @@
|
|||
import { takeLatest, call, put, select, takeLeading, delay, race, take } from 'redux-saga/effects';
|
||||
import { call, delay, put, race, select, take, takeLatest, takeLeading } from 'redux-saga/effects';
|
||||
import { REHYDRATE } from 'redux-persist';
|
||||
import { FLOW_ACTIONS } from './constants';
|
||||
import { getNodeDiff } from '../node/api';
|
||||
import {
|
||||
flowSetNodes,
|
||||
flowSetCellView,
|
||||
flowSetHeroes,
|
||||
flowSetRecent,
|
||||
flowSetUpdated,
|
||||
flowSetFlow,
|
||||
flowChangeSearch,
|
||||
flowSetCellView,
|
||||
flowSetFlow,
|
||||
flowSetHeroes,
|
||||
flowSetNodes,
|
||||
flowSetRecent,
|
||||
flowSetSearch,
|
||||
flowSetUpdated,
|
||||
} from './actions';
|
||||
import { IResultWithStatus, INode, Unwrap } from '../types';
|
||||
import { selectFlowNodes, selectFlow } from './selectors';
|
||||
import { wrap } from '../auth/sagas';
|
||||
import { postCellView, getSearchResults } from './api';
|
||||
import { IFlowState } from './reducer';
|
||||
import { Unwrap } from '../types';
|
||||
import { selectFlow, selectFlowNodes } from './selectors';
|
||||
import { getSearchResults, postCellView } from './api';
|
||||
import { uniq } from 'ramda';
|
||||
|
||||
function hideLoader() {
|
||||
document.getElementById('main_loader').style.display = 'none';
|
||||
}
|
||||
const loader = document.getElementById('main_loader');
|
||||
|
||||
function* onGetFlow() {
|
||||
const {
|
||||
flow: { _persist },
|
||||
} = yield select();
|
||||
|
||||
if (!_persist.rehydrated) return;
|
||||
|
||||
const stored: IFlowState['nodes'] = yield select(selectFlowNodes);
|
||||
|
||||
if (stored.length) {
|
||||
hideLoader();
|
||||
}
|
||||
|
||||
yield put(flowSetFlow({ is_loading: true }));
|
||||
|
||||
const {
|
||||
data: { before = [], after = [], heroes = [], recent = [], updated = [], valid = null },
|
||||
}: IResultWithStatus<{
|
||||
before: IFlowState['nodes'];
|
||||
after: IFlowState['nodes'];
|
||||
heroes: IFlowState['heroes'];
|
||||
recent: IFlowState['recent'];
|
||||
updated: IFlowState['updated'];
|
||||
valid: INode['id'][];
|
||||
}> = yield call(wrap, getNodeDiff, {
|
||||
start: new Date().toISOString(),
|
||||
end: new Date().toISOString(),
|
||||
with_heroes: true,
|
||||
with_updated: true,
|
||||
with_recent: true,
|
||||
with_valid: false,
|
||||
});
|
||||
|
||||
const result = uniq([...(before || []), ...(after || [])]);
|
||||
|
||||
yield put(flowSetFlow({ is_loading: false, nodes: result }));
|
||||
|
||||
if (heroes.length) yield put(flowSetHeroes(heroes));
|
||||
if (recent.length) yield put(flowSetRecent(recent));
|
||||
if (updated.length) yield put(flowSetUpdated(updated));
|
||||
|
||||
if (!stored.length) hideLoader();
|
||||
}
|
||||
|
||||
function* onSetCellView({ id, flow }: ReturnType<typeof flowSetCellView>) {
|
||||
const nodes = yield select(selectFlowNodes);
|
||||
yield put(flowSetNodes(nodes.map(node => (node.id === id ? { ...node, flow } : node))));
|
||||
|
||||
const { data, error } = yield call(wrap, postCellView, { id, flow });
|
||||
|
||||
// TODO: error handling
|
||||
}
|
||||
|
||||
function* getMore() {
|
||||
yield put(flowSetFlow({ is_loading: true }));
|
||||
const nodes: IFlowState['nodes'] = yield select(selectFlowNodes);
|
||||
|
||||
const start = nodes && nodes[0] && nodes[0].created_at;
|
||||
const end = nodes && nodes[nodes.length - 1] && nodes[nodes.length - 1].created_at;
|
||||
|
||||
const { error, data } = yield call(wrap, getNodeDiff, {
|
||||
start,
|
||||
end,
|
||||
with_heroes: false,
|
||||
with_updated: true,
|
||||
with_recent: true,
|
||||
with_valid: true,
|
||||
});
|
||||
|
||||
if (error || !data) return;
|
||||
|
||||
const result = uniq([
|
||||
...(data.before || []),
|
||||
...(data.valid ? nodes.filter(node => data.valid.includes(node.id)) : nodes),
|
||||
...(data.after || []),
|
||||
]);
|
||||
|
||||
yield put(
|
||||
flowSetFlow({
|
||||
is_loading: false,
|
||||
nodes: result,
|
||||
...(data.recent ? { recent: data.recent } : {}),
|
||||
...(data.updated ? { updated: data.updated } : {}),
|
||||
})
|
||||
);
|
||||
|
||||
yield delay(1000);
|
||||
}
|
||||
|
||||
function* changeSearch({ search }: ReturnType<typeof flowChangeSearch>) {
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
...search,
|
||||
is_loading: !!search.text,
|
||||
})
|
||||
);
|
||||
|
||||
if (!search.text) return;
|
||||
|
||||
yield delay(500);
|
||||
|
||||
const { data, error }: Unwrap<typeof getSearchResults> = yield call(wrap, getSearchResults, {
|
||||
...search,
|
||||
});
|
||||
|
||||
if (error) {
|
||||
yield put(flowSetSearch({ is_loading: false, results: [], total: 0 }));
|
||||
if (!loader) {
|
||||
return;
|
||||
}
|
||||
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
is_loading: false,
|
||||
results: data.nodes,
|
||||
total: data.total,
|
||||
})
|
||||
);
|
||||
loader.style.display = 'none';
|
||||
}
|
||||
|
||||
function* onGetFlow() {
|
||||
try {
|
||||
const {
|
||||
flow: { _persist },
|
||||
} = yield select();
|
||||
|
||||
if (!_persist.rehydrated) return;
|
||||
|
||||
const stored: ReturnType<typeof selectFlowNodes> = yield select(selectFlowNodes);
|
||||
|
||||
if (stored.length) {
|
||||
hideLoader();
|
||||
}
|
||||
|
||||
yield put(flowSetFlow({ is_loading: true }));
|
||||
|
||||
const {
|
||||
before = [],
|
||||
after = [],
|
||||
heroes = [],
|
||||
recent = [],
|
||||
updated = [],
|
||||
}: Unwrap<typeof getNodeDiff> = yield call(getNodeDiff, {
|
||||
start: new Date().toISOString(),
|
||||
end: new Date().toISOString(),
|
||||
with_heroes: true,
|
||||
with_updated: true,
|
||||
with_recent: true,
|
||||
with_valid: false,
|
||||
});
|
||||
|
||||
const result = uniq([...(before || []), ...(after || [])]);
|
||||
|
||||
yield put(flowSetFlow({ is_loading: false, nodes: result }));
|
||||
|
||||
if (heroes.length) yield put(flowSetHeroes(heroes));
|
||||
if (recent.length) yield put(flowSetRecent(recent));
|
||||
if (updated.length) yield put(flowSetUpdated(updated));
|
||||
|
||||
if (!stored.length) hideLoader();
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
function* onSetCellView({ id, flow }: ReturnType<typeof flowSetCellView>) {
|
||||
try {
|
||||
const nodes: ReturnType<typeof selectFlowNodes> = yield select(selectFlowNodes);
|
||||
yield put(flowSetNodes(nodes.map(node => (node.id === id ? { ...node, flow } : node))));
|
||||
yield call(postCellView, { id, flow });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
function* getMore() {
|
||||
try {
|
||||
yield put(flowSetFlow({ is_loading: true }));
|
||||
const nodes: ReturnType<typeof selectFlowNodes> = yield select(selectFlowNodes);
|
||||
|
||||
const start = nodes && nodes[0] && nodes[0].created_at;
|
||||
const end = nodes && nodes[nodes.length - 1] && nodes[nodes.length - 1].created_at;
|
||||
|
||||
const data: Unwrap<typeof getNodeDiff> = yield call(getNodeDiff, {
|
||||
start,
|
||||
end,
|
||||
with_heroes: false,
|
||||
with_updated: true,
|
||||
with_recent: true,
|
||||
with_valid: true,
|
||||
});
|
||||
|
||||
const result = uniq([
|
||||
...(data.before || []),
|
||||
...(data.valid ? nodes.filter(node => data.valid.includes(node.id)) : nodes),
|
||||
...(data.after || []),
|
||||
]);
|
||||
|
||||
yield put(
|
||||
flowSetFlow({
|
||||
is_loading: false,
|
||||
nodes: result,
|
||||
...(data.recent ? { recent: data.recent } : {}),
|
||||
...(data.updated ? { updated: data.updated } : {}),
|
||||
})
|
||||
);
|
||||
|
||||
yield delay(1000);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function* changeSearch({ search }: ReturnType<typeof flowChangeSearch>) {
|
||||
try {
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
...search,
|
||||
is_loading: !!search.text,
|
||||
})
|
||||
);
|
||||
|
||||
if (!search.text) return;
|
||||
|
||||
yield delay(500);
|
||||
|
||||
const data: Unwrap<typeof getSearchResults> = yield call(getSearchResults, {
|
||||
text: search.text,
|
||||
});
|
||||
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
results: data.nodes,
|
||||
total: data.total,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
yield put(flowSetSearch({ results: [], total: 0 }));
|
||||
} finally {
|
||||
yield put(flowSetSearch({ is_loading: false }));
|
||||
}
|
||||
}
|
||||
|
||||
function* loadMoreSearch() {
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
is_loading_more: true,
|
||||
})
|
||||
);
|
||||
try {
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
is_loading_more: true,
|
||||
})
|
||||
);
|
||||
|
||||
const { search }: ReturnType<typeof selectFlow> = yield select(selectFlow);
|
||||
const { search }: ReturnType<typeof selectFlow> = yield select(selectFlow);
|
||||
|
||||
const { result, delay }: { result: Unwrap<typeof getSearchResults>; delay: any } = yield race({
|
||||
result: call(wrap, getSearchResults, {
|
||||
...search,
|
||||
skip: search.results.length,
|
||||
}),
|
||||
delay: take(FLOW_ACTIONS.CHANGE_SEARCH),
|
||||
});
|
||||
const { result, delay }: { result: Unwrap<typeof getSearchResults>; delay: any } = yield race({
|
||||
result: call(getSearchResults, {
|
||||
...search,
|
||||
skip: search.results.length,
|
||||
}),
|
||||
delay: take(FLOW_ACTIONS.CHANGE_SEARCH),
|
||||
});
|
||||
|
||||
if (delay || result.error) {
|
||||
return put(flowSetSearch({ is_loading_more: false }));
|
||||
if (delay) {
|
||||
return;
|
||||
}
|
||||
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
results: [...search.results, ...result.nodes],
|
||||
total: result.total,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
is_loading_more: false,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
yield put(
|
||||
flowSetSearch({
|
||||
results: [...search.results, ...result.data.nodes],
|
||||
total: result.data.total,
|
||||
is_loading_more: false,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default function* nodeSaga() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue