'use strict'; exports.__esModule = true; exports.TASK_CANCEL = exports.CHANNEL_END = exports.NOT_ITERATOR_ERROR = undefined; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; exports.default = proc; var _utils = /*#__PURE__*/require('./utils'); var _scheduler = /*#__PURE__*/require('./scheduler'); var _io = /*#__PURE__*/require('./io'); var _channel = /*#__PURE__*/require('./channel'); var _buffers = /*#__PURE__*/require('./buffers'); function _defineEnumerableProperties(obj, descs) { for (var key in descs) { var desc = descs[key]; desc.configurable = desc.enumerable = true; if ("value" in desc) desc.writable = true; Object.defineProperty(obj, key, desc); } return obj; } var NOT_ITERATOR_ERROR = exports.NOT_ITERATOR_ERROR = 'proc first argument (Saga function result) must be an iterator'; var CHANNEL_END = exports.CHANNEL_END = { toString: function toString() { return '@@redux-saga/CHANNEL_END'; } }; var TASK_CANCEL = exports.TASK_CANCEL = { toString: function toString() { return '@@redux-saga/TASK_CANCEL'; } }; var matchers = { wildcard: function wildcard() { return _utils.kTrue; }, default: function _default(pattern) { return (typeof pattern === 'undefined' ? 'undefined' : _typeof(pattern)) === 'symbol' ? function (input) { return input.type === pattern; } : function (input) { return input.type === String(pattern); }; }, array: function array(patterns) { return function (input) { return patterns.some(function (p) { return matcher(p)(input); }); }; }, predicate: function predicate(_predicate) { return function (input) { return _predicate(input); }; } }; function matcher(pattern) { // prettier-ignore return (pattern === '*' ? matchers.wildcard : _utils.is.array(pattern) ? matchers.array : _utils.is.stringableFunc(pattern) ? matchers.default : _utils.is.func(pattern) ? matchers.predicate : matchers.default)(pattern); } /** Used to track a parent task and its forks In the new fork model, forked tasks are attached by default to their parent We model this using the concept of Parent task && main Task main task is the main flow of the current Generator, the parent tasks is the aggregation of the main tasks + all its forked tasks. Thus the whole model represents an execution tree with multiple branches (vs the linear execution tree in sequential (non parallel) programming) A parent tasks has the following semantics - It completes if all its forks either complete or all cancelled - If it's cancelled, all forks are cancelled as well - It aborts if any uncaught error bubbles up from forks - If it completes, the return value is the one returned by the main task **/ function forkQueue(name, mainTask, cb) { var tasks = [], result = void 0, completed = false; addTask(mainTask); function abort(err) { cancelAll(); cb(err, true); } function addTask(task) { tasks.push(task); task.cont = function (res, isErr) { if (completed) { return; } (0, _utils.remove)(tasks, task); task.cont = _utils.noop; if (isErr) { abort(res); } else { if (task === mainTask) { result = res; } if (!tasks.length) { completed = true; cb(result); } } }; // task.cont.cancel = task.cancel } function cancelAll() { if (completed) { return; } completed = true; tasks.forEach(function (t) { t.cont = _utils.noop; t.cancel(); }); tasks = []; } return { addTask: addTask, cancelAll: cancelAll, abort: abort, getTasks: function getTasks() { return tasks; }, taskNames: function taskNames() { return tasks.map(function (t) { return t.name; }); } }; } function createTaskIterator(_ref) { var context = _ref.context, fn = _ref.fn, args = _ref.args; if (_utils.is.iterator(fn)) { return fn; } // catch synchronous failures; see #152 and #441 var result = void 0, error = void 0; try { result = fn.apply(context, args); } catch (err) { error = err; } // i.e. a generator function returns an iterator if (_utils.is.iterator(result)) { return result; } // do not bubble up synchronous failures for detached forks // instead create a failed task. See #152 and #441 return error ? (0, _utils.makeIterator)(function () { throw error; }) : (0, _utils.makeIterator)(function () { var pc = void 0; var eff = { done: false, value: result }; var ret = function ret(value) { return { done: true, value: value }; }; return function (arg) { if (!pc) { pc = true; return eff; } else { return ret(arg); } }; }()); } var wrapHelper = function wrapHelper(helper) { return { fn: helper }; }; function proc(iterator) { var subscribe = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : function () { return _utils.noop; }; var dispatch = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : _utils.noop; var getState = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : _utils.noop; var parentContext = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : {}; var options = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : {}; var parentEffectId = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 0; var name = arguments.length > 7 && arguments[7] !== undefined ? arguments[7] : 'anonymous'; var cont = arguments[8]; (0, _utils.check)(iterator, _utils.is.iterator, NOT_ITERATOR_ERROR); var effectsString = '[...effects]'; var runParallelEffect = (0, _utils.deprecate)(runAllEffect, (0, _utils.updateIncentive)(effectsString, 'all(' + effectsString + ')')); var sagaMonitor = options.sagaMonitor, logger = options.logger, onError = options.onError; var log = logger || _utils.log; var logError = function logError(err) { var message = err.sagaStack; if (!message && err.stack) { message = err.stack.split('\n')[0].indexOf(err.message) !== -1 ? err.stack : 'Error: ' + err.message + '\n' + err.stack; } log('error', 'uncaught at ' + name, message || err.message || err); }; var stdChannel = (0, _channel.stdChannel)(subscribe); var taskContext = Object.create(parentContext); /** Tracks the current effect cancellation Each time the generator progresses. calling runEffect will set a new value on it. It allows propagating cancellation to child effects **/ next.cancel = _utils.noop; /** Creates a new task descriptor for this generator, We'll also create a main task to track the main flow (besides other forked tasks) **/ var task = newTask(parentEffectId, name, iterator, cont); var mainTask = { name: name, cancel: cancelMain, isRunning: true }; var taskQueue = forkQueue(name, mainTask, end); /** cancellation of the main task. We'll simply resume the Generator with a Cancel **/ function cancelMain() { if (mainTask.isRunning && !mainTask.isCancelled) { mainTask.isCancelled = true; next(TASK_CANCEL); } } /** This may be called by a parent generator to trigger/propagate cancellation cancel all pending tasks (including the main task), then end the current task. Cancellation propagates down to the whole execution tree holded by this Parent task It's also propagated to all joiners of this task and their execution tree/joiners Cancellation is noop for terminated/Cancelled tasks tasks **/ function cancel() { /** We need to check both Running and Cancelled status Tasks can be Cancelled but still Running **/ if (iterator._isRunning && !iterator._isCancelled) { iterator._isCancelled = true; taskQueue.cancelAll(); /** Ending with a Never result will propagate the Cancellation to all joiners **/ end(TASK_CANCEL); } } /** attaches cancellation logic to this task's continuation this will permit cancellation to propagate down the call chain **/ cont && (cont.cancel = cancel); // tracks the running status iterator._isRunning = true; // kicks up the generator next(); // then return the task descriptor to the caller return task; /** This is the generator driver It's a recursive async/continuation function which calls itself until the generator terminates or throws **/ function next(arg, isErr) { // Preventive measure. If we end up here, then there is really something wrong if (!mainTask.isRunning) { throw new Error('Trying to resume an already finished generator'); } try { var result = void 0; if (isErr) { result = iterator.throw(arg); } else if (arg === TASK_CANCEL) { /** getting TASK_CANCEL automatically cancels the main task We can get this value here - By cancelling the parent task manually - By joining a Cancelled task **/ mainTask.isCancelled = true; /** Cancels the current effect; this will propagate the cancellation down to any called tasks **/ next.cancel(); /** If this Generator has a `return` method then invokes it This will jump to the finally block **/ result = _utils.is.func(iterator.return) ? iterator.return(TASK_CANCEL) : { done: true, value: TASK_CANCEL }; } else if (arg === CHANNEL_END) { // We get CHANNEL_END by taking from a channel that ended using `take` (and not `takem` used to trap End of channels) result = _utils.is.func(iterator.return) ? iterator.return() : { done: true }; } else { result = iterator.next(arg); } if (!result.done) { runEffect(result.value, parentEffectId, '', next); } else { /** This Generator has ended, terminate the main task and notify the fork queue **/ mainTask.isMainRunning = false; mainTask.cont && mainTask.cont(result.value); } } catch (error) { if (mainTask.isCancelled) { logError(error); } mainTask.isMainRunning = false; mainTask.cont(error, true); } } function end(result, isErr) { iterator._isRunning = false; stdChannel.close(); if (!isErr) { iterator._result = result; iterator._deferredEnd && iterator._deferredEnd.resolve(result); } else { if (result instanceof Error) { Object.defineProperty(result, 'sagaStack', { value: 'at ' + name + ' \n ' + (result.sagaStack || result.stack), configurable: true }); } if (!task.cont) { if (result instanceof Error && onError) { onError(result); } else { logError(result); } } iterator._error = result; iterator._isAborted = true; iterator._deferredEnd && iterator._deferredEnd.reject(result); } task.cont && task.cont(result, isErr); task.joiners.forEach(function (j) { return j.cb(result, isErr); }); task.joiners = null; } function runEffect(effect, parentEffectId) { var label = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : ''; var cb = arguments[3]; var effectId = (0, _utils.uid)(); sagaMonitor && sagaMonitor.effectTriggered({ effectId: effectId, parentEffectId: parentEffectId, label: label, effect: effect }); /** completion callback and cancel callback are mutually exclusive We can't cancel an already completed effect And We can't complete an already cancelled effectId **/ var effectSettled = void 0; // Completion callback passed to the appropriate effect runner function currCb(res, isErr) { if (effectSettled) { return; } effectSettled = true; cb.cancel = _utils.noop; // defensive measure if (sagaMonitor) { isErr ? sagaMonitor.effectRejected(effectId, res) : sagaMonitor.effectResolved(effectId, res); } cb(res, isErr); } // tracks down the current cancel currCb.cancel = _utils.noop; // setup cancellation logic on the parent cb cb.cancel = function () { // prevents cancelling an already completed effect if (effectSettled) { return; } effectSettled = true; /** propagates cancel downward catch uncaught cancellations errors; since we can no longer call the completion callback, log errors raised during cancellations into the console **/ try { currCb.cancel(); } catch (err) { logError(err); } currCb.cancel = _utils.noop; // defensive measure sagaMonitor && sagaMonitor.effectCancelled(effectId); }; /** each effect runner must attach its own logic of cancellation to the provided callback it allows this generator to propagate cancellation downward. ATTENTION! effect runners must setup the cancel logic by setting cb.cancel = [cancelMethod] And the setup must occur before calling the callback This is a sort of inversion of control: called async functions are responsible for completing the flow by calling the provided continuation; while caller functions are responsible for aborting the current flow by calling the attached cancel function Library users can attach their own cancellation logic to promises by defining a promise[CANCEL] method in their returned promises ATTENTION! calling cancel must have no effect on an already completed or cancelled effect **/ var data = void 0; // prettier-ignore return ( // Non declarative effect _utils.is.promise(effect) ? resolvePromise(effect, currCb) : _utils.is.helper(effect) ? runForkEffect(wrapHelper(effect), effectId, currCb) : _utils.is.iterator(effect) ? resolveIterator(effect, effectId, name, currCb) // declarative effects : _utils.is.array(effect) ? runParallelEffect(effect, effectId, currCb) : (data = _io.asEffect.take(effect)) ? runTakeEffect(data, currCb) : (data = _io.asEffect.put(effect)) ? runPutEffect(data, currCb) : (data = _io.asEffect.all(effect)) ? runAllEffect(data, effectId, currCb) : (data = _io.asEffect.race(effect)) ? runRaceEffect(data, effectId, currCb) : (data = _io.asEffect.call(effect)) ? runCallEffect(data, effectId, currCb) : (data = _io.asEffect.cps(effect)) ? runCPSEffect(data, currCb) : (data = _io.asEffect.fork(effect)) ? runForkEffect(data, effectId, currCb) : (data = _io.asEffect.join(effect)) ? runJoinEffect(data, currCb) : (data = _io.asEffect.cancel(effect)) ? runCancelEffect(data, currCb) : (data = _io.asEffect.select(effect)) ? runSelectEffect(data, currCb) : (data = _io.asEffect.actionChannel(effect)) ? runChannelEffect(data, currCb) : (data = _io.asEffect.flush(effect)) ? runFlushEffect(data, currCb) : (data = _io.asEffect.cancelled(effect)) ? runCancelledEffect(data, currCb) : (data = _io.asEffect.getContext(effect)) ? runGetContextEffect(data, currCb) : (data = _io.asEffect.setContext(effect)) ? runSetContextEffect(data, currCb) : /* anything else returned as is */currCb(effect) ); } function resolvePromise(promise, cb) { var cancelPromise = promise[_utils.CANCEL]; if (_utils.is.func(cancelPromise)) { cb.cancel = cancelPromise; } else if (_utils.is.func(promise.abort)) { cb.cancel = function () { return promise.abort(); }; // TODO: add support for the fetch API, whenever they get around to // adding cancel support } promise.then(cb, function (error) { return cb(error, true); }); } function resolveIterator(iterator, effectId, name, cb) { proc(iterator, subscribe, dispatch, getState, taskContext, options, effectId, name, cb); } function runTakeEffect(_ref2, cb) { var channel = _ref2.channel, pattern = _ref2.pattern, maybe = _ref2.maybe; channel = channel || stdChannel; var takeCb = function takeCb(inp) { return inp instanceof Error ? cb(inp, true) : (0, _channel.isEnd)(inp) && !maybe ? cb(CHANNEL_END) : cb(inp); }; try { channel.take(takeCb, matcher(pattern)); } catch (err) { return cb(err, true); } cb.cancel = takeCb.cancel; } function runPutEffect(_ref3, cb) { var channel = _ref3.channel, action = _ref3.action, resolve = _ref3.resolve; /** Schedule the put in case another saga is holding a lock. The put will be executed atomically. ie nested puts will execute after this put has terminated. **/ (0, _scheduler.asap)(function () { var result = void 0; try { result = (channel ? channel.put : dispatch)(action); } catch (error) { // If we have a channel or `put.resolve` was used then bubble up the error. if (channel || resolve) return cb(error, true); logError(error); } if (resolve && _utils.is.promise(result)) { resolvePromise(result, cb); } else { return cb(result); } }); // Put effects are non cancellables } function runCallEffect(_ref4, effectId, cb) { var context = _ref4.context, fn = _ref4.fn, args = _ref4.args; var result = void 0; // catch synchronous failures; see #152 try { result = fn.apply(context, args); } catch (error) { return cb(error, true); } return _utils.is.promise(result) ? resolvePromise(result, cb) : _utils.is.iterator(result) ? resolveIterator(result, effectId, fn.name, cb) : cb(result); } function runCPSEffect(_ref5, cb) { var context = _ref5.context, fn = _ref5.fn, args = _ref5.args; // CPS (ie node style functions) can define their own cancellation logic // by setting cancel field on the cb // catch synchronous failures; see #152 try { var cpsCb = function cpsCb(err, res) { return _utils.is.undef(err) ? cb(res) : cb(err, true); }; fn.apply(context, args.concat(cpsCb)); if (cpsCb.cancel) { cb.cancel = function () { return cpsCb.cancel(); }; } } catch (error) { return cb(error, true); } } function runForkEffect(_ref6, effectId, cb) { var context = _ref6.context, fn = _ref6.fn, args = _ref6.args, detached = _ref6.detached; var taskIterator = createTaskIterator({ context: context, fn: fn, args: args }); try { (0, _scheduler.suspend)(); var _task = proc(taskIterator, subscribe, dispatch, getState, taskContext, options, effectId, fn.name, detached ? null : _utils.noop); if (detached) { cb(_task); } else { if (taskIterator._isRunning) { taskQueue.addTask(_task); cb(_task); } else if (taskIterator._error) { taskQueue.abort(taskIterator._error); } else { cb(_task); } } } finally { (0, _scheduler.flush)(); } // Fork effects are non cancellables } function runJoinEffect(t, cb) { if (t.isRunning()) { var joiner = { task: task, cb: cb }; cb.cancel = function () { return (0, _utils.remove)(t.joiners, joiner); }; t.joiners.push(joiner); } else { t.isAborted() ? cb(t.error(), true) : cb(t.result()); } } function runCancelEffect(taskToCancel, cb) { if (taskToCancel === _utils.SELF_CANCELLATION) { taskToCancel = task; } if (taskToCancel.isRunning()) { taskToCancel.cancel(); } cb(); // cancel effects are non cancellables } function runAllEffect(effects, effectId, cb) { var keys = Object.keys(effects); if (!keys.length) { return cb(_utils.is.array(effects) ? [] : {}); } var completedCount = 0; var completed = void 0; var results = {}; var childCbs = {}; function checkEffectEnd() { if (completedCount === keys.length) { completed = true; cb(_utils.is.array(effects) ? _utils.array.from(_extends({}, results, { length: keys.length })) : results); } } keys.forEach(function (key) { var chCbAtKey = function chCbAtKey(res, isErr) { if (completed) { return; } if (isErr || (0, _channel.isEnd)(res) || res === CHANNEL_END || res === TASK_CANCEL) { cb.cancel(); cb(res, isErr); } else { results[key] = res; completedCount++; checkEffectEnd(); } }; chCbAtKey.cancel = _utils.noop; childCbs[key] = chCbAtKey; }); cb.cancel = function () { if (!completed) { completed = true; keys.forEach(function (key) { return childCbs[key].cancel(); }); } }; keys.forEach(function (key) { return runEffect(effects[key], effectId, key, childCbs[key]); }); } function runRaceEffect(effects, effectId, cb) { var completed = void 0; var keys = Object.keys(effects); var childCbs = {}; keys.forEach(function (key) { var chCbAtKey = function chCbAtKey(res, isErr) { if (completed) { return; } if (isErr) { // Race Auto cancellation cb.cancel(); cb(res, true); } else if (!(0, _channel.isEnd)(res) && res !== CHANNEL_END && res !== TASK_CANCEL) { var _response; cb.cancel(); completed = true; var response = (_response = {}, _response[key] = res, _response); cb(_utils.is.array(effects) ? [].slice.call(_extends({}, response, { length: keys.length })) : response); } }; chCbAtKey.cancel = _utils.noop; childCbs[key] = chCbAtKey; }); cb.cancel = function () { // prevents unnecessary cancellation if (!completed) { completed = true; keys.forEach(function (key) { return childCbs[key].cancel(); }); } }; keys.forEach(function (key) { if (completed) { return; } runEffect(effects[key], effectId, key, childCbs[key]); }); } function runSelectEffect(_ref7, cb) { var selector = _ref7.selector, args = _ref7.args; try { var state = selector.apply(undefined, [getState()].concat(args)); cb(state); } catch (error) { cb(error, true); } } function runChannelEffect(_ref8, cb) { var pattern = _ref8.pattern, buffer = _ref8.buffer; var match = matcher(pattern); match.pattern = pattern; cb((0, _channel.eventChannel)(subscribe, buffer || _buffers.buffers.fixed(), match)); } function runCancelledEffect(data, cb) { cb(!!mainTask.isCancelled); } function runFlushEffect(channel, cb) { channel.flush(cb); } function runGetContextEffect(prop, cb) { cb(taskContext[prop]); } function runSetContextEffect(props, cb) { _utils.object.assign(taskContext, props); cb(); } function newTask(id, name, iterator, cont) { var _done, _ref9, _mutatorMap; iterator._deferredEnd = null; return _ref9 = {}, _ref9[_utils.TASK] = true, _ref9.id = id, _ref9.name = name, _done = 'done', _mutatorMap = {}, _mutatorMap[_done] = _mutatorMap[_done] || {}, _mutatorMap[_done].get = function () { if (iterator._deferredEnd) { return iterator._deferredEnd.promise; } else { var def = (0, _utils.deferred)(); iterator._deferredEnd = def; if (!iterator._isRunning) { iterator._error ? def.reject(iterator._error) : def.resolve(iterator._result); } return def.promise; } }, _ref9.cont = cont, _ref9.joiners = [], _ref9.cancel = cancel, _ref9.isRunning = function isRunning() { return iterator._isRunning; }, _ref9.isCancelled = function isCancelled() { return iterator._isCancelled; }, _ref9.isAborted = function isAborted() { return iterator._isAborted; }, _ref9.result = function result() { return iterator._result; }, _ref9.error = function error() { return iterator._error; }, _ref9.setContext = function setContext(props) { (0, _utils.check)(props, _utils.is.object, (0, _utils.createSetContextWarning)('task', props)); _utils.object.assign(taskContext, props); }, _defineEnumerableProperties(_ref9, _mutatorMap), _ref9; } }