Caches exist to make things faster (at the expense of taking up more memory and possibly outdated results). Our live search is a great use case for implementing a cache, so let's set up a mapBroadcasterCache
operator which can map a string to the result of a broadcaster.
export let mapBroadcasterCache = createBroadcaster => broadcaster => listener => { let cache = new Map() let cancel return broadcaster(value => { // abort previous if (cancel) { cancel() return; } if (cache.has(value)) { listener(cache.get(value)) return } let newBroadcaster = createBroadcaster(value) cancel = newBroadcaster((newValue) => { // Add to cache only if newValue isn't an error if (!(newValue instanceof Error)) { cache.set(value, newValue) } console.log(cache) listener(newValue) }) }) }
export let ignoreError = broadcaster => listener => { return broadcaster((value) => { if (value instanceof Error) { return } listener(value) }) }
Using:
let inputToBooks = pipe( filter(name => name.length > 3), waitFor(150), pipe( map(name => `https://openlibrary.org/search.json?q=${name}`), mapBroadcasterCache(getUrl), ignoreError, map(json => json.docs) ))(inputValue)