mirror of
https://github.com/beestat/app.git
synced 2025-05-24 02:14:03 -04:00
Added currently-unused requestor class
This was an attempt to fix another issue but it didn't work. It should be useful later on, though so keeping it around.
This commit is contained in:
parent
38ad5e1a0f
commit
206af04d46
@ -145,6 +145,28 @@ beestat.api.prototype.set_callback = function(callback) {
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the API calls for this instance.
|
||||
*
|
||||
* @return {array} The API calls.
|
||||
*/
|
||||
beestat.api.prototype.get_api_calls = function() {
|
||||
return this.api_calls_;
|
||||
};
|
||||
|
||||
/**
|
||||
* Force the current API call to send and return as a batch, even if there's
|
||||
* only one. This makes handling responses easier when dynamically generating
|
||||
* API calls.
|
||||
*
|
||||
* @return {beestat.api} This.
|
||||
*/
|
||||
beestat.api.prototype.force_batch = function() {
|
||||
this.force_batch_ = true;
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Fires after an XHR request returns.
|
||||
*
|
||||
@ -243,7 +265,7 @@ beestat.api.prototype.load_ = function(response_text) {
|
||||
* @return {boolean} Whether or not this is a batch API call.
|
||||
*/
|
||||
beestat.api.prototype.is_batch_ = function() {
|
||||
return this.api_calls_.length > 1;
|
||||
return this.force_batch_ === true || this.api_calls_.length > 1;
|
||||
};
|
||||
|
||||
/**
|
||||
|
123
js/beestat/requestor.js
Normal file
123
js/beestat/requestor.js
Normal file
@ -0,0 +1,123 @@
|
||||
/**
|
||||
* If you want some data from the API in the cache this is the preferred way
|
||||
* to get it there. It will queue requests and if two things make the same API
|
||||
* call it will collapse them into a single API call.
|
||||
*
|
||||
* This is helpful for de-duplicating API calls if two cards need the same data.
|
||||
*/
|
||||
beestat.requestor = {};
|
||||
|
||||
beestat.requestor.requested_api_calls_ = [];
|
||||
|
||||
beestat.requestor.sending_ = false;
|
||||
|
||||
beestat.requestor.timeout_ = undefined;
|
||||
|
||||
/**
|
||||
* Adds the requested API calls to the request stack, then waits 100ms for any
|
||||
* more to be added before executing them.
|
||||
*
|
||||
* @param {array} api_calls The API calls to request.
|
||||
*/
|
||||
beestat.requestor.request = function(api_calls) {
|
||||
// Clear the timeout that was set to run the pending API calls.
|
||||
window.clearTimeout(beestat.requestor.timeout_);
|
||||
|
||||
api_calls.forEach(function(api_call) {
|
||||
beestat.requestor.requested_api_calls_.push(api_call);
|
||||
});
|
||||
|
||||
/**
|
||||
* If we aren't already sending, queue up the next API call to go in 100ms.
|
||||
* If we are actively sending, the next API call will get queued up after
|
||||
* it's done.
|
||||
*/
|
||||
if (beestat.requestor.sending_ === false) {
|
||||
beestat.requestor.timeout_ = window.setTimeout(beestat.requestor.send, 3000);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Send all of the pending API calls.
|
||||
*/
|
||||
beestat.requestor.send = function() {
|
||||
beestat.requestor.sending_ = true;
|
||||
|
||||
const api = new beestat.api();
|
||||
|
||||
// Force a batch API call to make the response handling simpler.
|
||||
api.force_batch();
|
||||
|
||||
beestat.requestor.requested_api_calls_.forEach(function(requested_api_call) {
|
||||
api.add_call(
|
||||
requested_api_call.resource,
|
||||
requested_api_call.method,
|
||||
requested_api_call.arguments
|
||||
);
|
||||
});
|
||||
|
||||
api.set_callback(function(response) {
|
||||
beestat.requestor.callback(response, api);
|
||||
});
|
||||
|
||||
api.send();
|
||||
};
|
||||
|
||||
beestat.requestor.callback = function(response, api) {
|
||||
/**
|
||||
* Data from the API calls is first merged into a holding object so it can
|
||||
* be merged into the cache in a single call.
|
||||
*/
|
||||
const data = {};
|
||||
|
||||
// Remove sent API calls from the request stack.
|
||||
api.get_api_calls().forEach(function(sent_api_call, i) {
|
||||
if (data[sent_api_call.resource] === undefined) {
|
||||
data[sent_api_call.resource] = {};
|
||||
}
|
||||
|
||||
console.info('Performance might be better with concat');
|
||||
Object.assign(data[sent_api_call.resource], response[i]);
|
||||
|
||||
/**
|
||||
* Remove API call sfrom the requested_api_calls array that have now been
|
||||
* sent.
|
||||
*/
|
||||
let j = beestat.requestor.requested_api_calls_.length;
|
||||
while (j--) {
|
||||
if (
|
||||
sent_api_call.resource === beestat.requestor.requested_api_calls_[j].resource &&
|
||||
sent_api_call.method === beestat.requestor.requested_api_calls_[j].method &&
|
||||
sent_api_call.arguments === JSON.stringify(beestat.requestor.requested_api_calls_[j].arguments)
|
||||
) {
|
||||
beestat.requestor.requested_api_calls_.splice(j, 1);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Update the cache
|
||||
for (const key in data) {
|
||||
beestat.cache.set(key, data[key]);
|
||||
}
|
||||
|
||||
beestat.requestor.sending_ = false;
|
||||
|
||||
/**
|
||||
* If there are any API calls left to send, queue them up now. These would
|
||||
* have been added between when the API call started and finished.
|
||||
*/
|
||||
if (beestat.requestor.requested_api_calls_.length > 0) {
|
||||
beestat.requestor.timeout_ = window.setTimeout(beestat.requestor.send, 3000);
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
|
||||
|
||||
beestat.requestor.request([{'resource': 'thermostat','method': 'read_id','arguments': {'attributes': {'thermostat_id': 1}}}]);
|
||||
beestat.requestor.request([{'resource': 'thermostat','method': 'read_id','arguments': {'attributes': {'thermostat_id': 1}}}]);
|
||||
beestat.requestor.request([{'resource': 'sensor','method': 'read_id','arguments': {'attributes': {'sensor_id': 1}}}]);
|
||||
beestat.requestor.request([{'resource': 'sensor','method': 'read_id','arguments': {'attributes': {'sensor_id': 2}}}]);
|
||||
|
||||
|
||||
*/
|
Loading…
x
Reference in New Issue
Block a user