ui: Keep track of previous node checks and avoid adding them twice (#9661)
* Keep track of previous node checks and avoid adding them twice
This commit is contained in:
parent
793be59589
commit
4a7e789cac
|
@ -1,8 +1,9 @@
|
|||
import Model, { attr, belongsTo } from '@ember-data/model';
|
||||
import { fragmentArray } from 'ember-data-model-fragments/attributes';
|
||||
import { computed, get, set } from '@ember/object';
|
||||
import { computed, get } from '@ember/object';
|
||||
import { or, filter, alias } from '@ember/object/computed';
|
||||
import { tracked } from '@glimmer/tracking';
|
||||
import mergeChecks from 'consul-ui/utils/merge-checks';
|
||||
|
||||
export const PRIMARY_KEY = 'uid';
|
||||
export const SLUG_KEY = 'Node.Node,Service.ID';
|
||||
|
@ -52,29 +53,15 @@ export default class ServiceInstance extends Model {
|
|||
@filter('Checks.@each.Kind', (item, i, arr) => item.Kind === 'node') NodeChecks;
|
||||
|
||||
// MeshChecks are a concatenation of Checks for the Instance and Checks for
|
||||
// the ProxyInstance. Checks is an ember-data-model-fragment, so we can't just
|
||||
// concat it, we have to loop through all the items in order to merge
|
||||
@computed('Checks', 'ProxyInstance{Checks,ServiceProxy.Expose.Checks}')
|
||||
// the ProxyInstance.
|
||||
@computed('Checks.[]', 'ProxyInstance.{Checks.[],ServiceProxy.Expose.Checks}')
|
||||
get MeshChecks() {
|
||||
return (get(this, 'Checks') || [])
|
||||
.map(item => {
|
||||
set(
|
||||
item,
|
||||
'Exposed',
|
||||
get(this, 'ProxyInstance.ServiceProxy.Expose.Checks') && get(item, 'Exposable')
|
||||
);
|
||||
return item;
|
||||
})
|
||||
.concat(
|
||||
(get(this, 'ProxyInstance.Checks') || []).map(item => {
|
||||
set(
|
||||
item,
|
||||
'Exposed',
|
||||
get(this, 'ProxyInstance.ServiceProxy.Expose.Checks') && get(item, 'Exposable')
|
||||
);
|
||||
return item;
|
||||
})
|
||||
);
|
||||
// merge the instance and proxy checks together, avoiding duplicate node
|
||||
// checks and additionally setting any checks to exposed if required
|
||||
return mergeChecks(
|
||||
[get(this, 'Checks'), get(this, 'ProxyInstance.Checks')],
|
||||
get(this, 'ProxyInstance.ServiceProxy.Expose.Checks')
|
||||
);
|
||||
}
|
||||
|
||||
@computed('Service.Meta')
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
import { get, set } from '@ember/object';
|
||||
import MultiMap from 'mnemonist/multi-map';
|
||||
|
||||
/**
|
||||
* Checks are ember-data-model-fragments, so we can't just
|
||||
* concat it, we have to loop through all the items in order to merge
|
||||
* We also need to avoid repeating Node checks here as the service and the
|
||||
* proxy is likely to be on the same node, without adding something extra here
|
||||
* the node check will likely end up in the list twice.
|
||||
*
|
||||
* @param {array} checks - Multiple lists of healthchecks to merge each one of the items in this array should be a further array of healthchecks
|
||||
* @param {boolean} exposed - Whether the checks should be marked as exposed via the proxy or not
|
||||
* @param {class} MMap - A MultiMap class. This is only exposed to allow for an easier interface but still allow an innjectable MultiMap if we choose to do that during testing
|
||||
* @returns {array} - The final array of all of the healthchecks with any duplicate node checks removed, and also marked as exposed if required
|
||||
*/
|
||||
export default (checks = [], exposed = false, MMap = MultiMap) => {
|
||||
const ids = new MMap();
|
||||
const a = checks.shift();
|
||||
const result = a
|
||||
.map(item => {
|
||||
// its a Node check (ServiceName === ""), record this one so we
|
||||
// don't end up with duplicates of it
|
||||
if (item.ServiceName === '') {
|
||||
ids.set(item.Node, item.CheckID);
|
||||
}
|
||||
return item;
|
||||
})
|
||||
// go through all remaining lists of checks adding each check to the
|
||||
// list if its not a node check that has been already added
|
||||
.concat(
|
||||
checks.reduce((prev, items) => {
|
||||
if (typeof items === 'undefined') {
|
||||
return prev;
|
||||
}
|
||||
return prev.concat(
|
||||
items.reduce((prev, item) => {
|
||||
if (item.ServiceName === '') {
|
||||
if ((ids.get(item.Node) || []).includes(item.CheckID)) {
|
||||
return prev;
|
||||
}
|
||||
// if the node check hasn't been added yet, record this one
|
||||
// so we don't end up with duplicates of it
|
||||
ids.set(item.Node, item.CheckID);
|
||||
}
|
||||
prev.push(item);
|
||||
return prev;
|
||||
}, [])
|
||||
);
|
||||
}, [])
|
||||
);
|
||||
// if checks are exposed via the proxy, find the ones that are exposable
|
||||
// (ones of a certain type) and set them as exposed
|
||||
// TODO: consider moving this out of here so we aren't doing too much in one util
|
||||
if (exposed) {
|
||||
result
|
||||
.filter(item => get(item, 'Exposable'))
|
||||
.forEach(item => {
|
||||
set(item, 'Exposed', exposed);
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
|
@ -0,0 +1,147 @@
|
|||
import mergeChecks from 'consul-ui/utils/merge-checks';
|
||||
import { module, test } from 'qunit';
|
||||
|
||||
module('Unit | Utility | merge-checks', function() {
|
||||
test('it works', function(assert) {
|
||||
[
|
||||
{
|
||||
desc: 'One list of checks, not exposed',
|
||||
exposed: false,
|
||||
checks: [
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: false,
|
||||
},
|
||||
],
|
||||
],
|
||||
expected: [
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
desc: 'One list of checks, exposed',
|
||||
exposed: true,
|
||||
checks: [
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
expected: [
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
Exposed: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
desc: 'Two lists of checks, not exposed',
|
||||
exposed: false,
|
||||
checks: [
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0-proxy',
|
||||
CheckID: 'service-0-proxy-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
expected: [
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
{
|
||||
ServiceName: 'service-0-proxy',
|
||||
CheckID: 'service-0-proxy-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
desc: 'Two lists of checks, with one duplicate node checks, not exposed',
|
||||
exposed: false,
|
||||
checks: [
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
{
|
||||
ServiceName: '',
|
||||
CheckID: 'service-0-check-1',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
ServiceName: 'service-0-proxy',
|
||||
CheckID: 'service-0-proxy-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
{
|
||||
ServiceName: '',
|
||||
CheckID: 'service-0-check-1',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
expected: [
|
||||
{
|
||||
ServiceName: 'service-0',
|
||||
CheckID: 'service-0-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
{
|
||||
ServiceName: '',
|
||||
CheckID: 'service-0-check-1',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
{
|
||||
ServiceName: 'service-0-proxy',
|
||||
CheckID: 'service-0-proxy-check-0',
|
||||
Node: 'node-0',
|
||||
Exposable: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
].forEach(spec => {
|
||||
const actual = mergeChecks(spec.checks, spec.exposed);
|
||||
assert.deepEqual(actual, spec.expected);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue