From 4ced2b51c3a229b8c7efb37e0355ba014c752d41 Mon Sep 17 00:00:00 2001 From: Konstantinos Kagkelidis Date: Thu, 22 Jul 2021 14:05:57 +0300 Subject: [PATCH 001/112] Bump missed junit dep --- flink_jobs/old-models/ams_ingest_sync/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flink_jobs/old-models/ams_ingest_sync/pom.xml b/flink_jobs/old-models/ams_ingest_sync/pom.xml index 05dc7ab0..c455c1a5 100644 --- a/flink_jobs/old-models/ams_ingest_sync/pom.xml +++ b/flink_jobs/old-models/ams_ingest_sync/pom.xml @@ -100,7 +100,7 @@ junit junit - 4.11 + 4.13.1 test From 112e25db30adb0b6a58e83b295513a527dcd36c9 Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Thu, 17 Jun 2021 09:03:31 +0300 Subject: [PATCH 002/112] profile managers to parse and store profile data --- .gitignore | 2 + .../java/operations/OperationsManager.java | 3 - .../operations/OperationsManagerTest.java | 3 +- .../src/test/java/operations/Utils.java | 1 - .../nbproject/project.properties | 0 flink_jobs/ProfilesManager/pom.xml | 60 + .../main/java/argo/avro/GroupEndpoint.java | 337 ++ .../src/main/java/argo/avro/GroupGroup.java | 286 ++ .../main/java/argo/avro/MetricProfile.java | 304 ++ .../AggregationProfileManager.java | 611 ++++ .../profilesmanager/EndpointGroupManager.java | 476 +++ .../profilesmanager/GroupGroupManager.java | 352 ++ .../profilesmanager/MetricProfileManager.java | 434 +++ .../src/main/resources/log4j.properties | 23 + .../main/resources/profiles/aggregations.json | 68 + .../resources/profiles/metricprofile.avro | Bin 0 -> 583 bytes .../resources/profiles/metricprofile.json | 67 + .../resources/profiles/topgroupendpoint.json | 2960 +++++++++++++++++ .../resources/profiles/topgroupgroup.json | 318 ++ .../AggregationProfileManagerTest.java | 558 ++++ .../EndpointGroupManagerTest.java | 339 ++ .../GroupGroupManagerTest.java | 274 ++ .../MetricProfileManagerTest.java | 379 +++ .../java/timelines/TimelineAggregator.java | 5 +- .../src/main/java/timelines/Utils.java | 20 +- .../timelines/TimelineAggregatorTest.java | 501 +++ .../flink/jobs/timelines/TimelineTest.java | 574 ++++ .../flink/jobs/timelines/TimelineUtils.java | 188 ++ .../src/main/java/ops/OpsManager.java | 2 +- .../java/sync/AggregationProfileManager.java | 847 +++-- .../main/java/sync/MetricProfileManager.java | 484 +-- .../java/argo/batch/PickDataPointsTest.java | 4 - .../status_trends/avro/group_endpoint.avsc | 14 + .../status_trends/avro/group_group.avsc | 14 + .../status_trends/avro/metric_data.avsc | 18 + .../status_trends/avro/metric_profile.avsc | 13 + .../main/java/argo/avro/GroupEndpoint.java | 336 ++ .../src/main/java/argo/avro/GroupGroup.java | 286 ++ .../main/java/argo/avro/MetricProfile.java | 304 ++ .../batch/BatchEndpointFlipFlopTrends.java | 13 +- .../argo/batch/BatchFlipFlopCollection.java | 15 +- .../argo/batch/BatchGroupFlipFlopTrends.java | 55 +- .../argo/batch/BatchMetricFlipFlopTrends.java | 48 +- .../batch/BatchServiceFlipFlopTrends.java | 4 +- .../java/argo/batch/BatchStatusTrends.java | 1 + .../java/argo/batch/MongoTrendsOutput.java | 1 - .../flipflops/ZeroEndpointTrendsFilter.java | 6 +- .../flipflops/ZeroGroupFunctionFilter.java | 4 +- .../zero/flipflops/ZeroGroupTrendsFilter.java | 3 +- .../flipflops/ZeroMetricTrendsFilter.java | 7 +- .../flipflops/ZeroServiceTrendsFilter.java | 4 +- .../calctimelines/CalcLastTimeStatus.java | 2 - .../functions/calctimelines/MapServices.java | 24 +- .../calctimelines/ServiceFilter.java | 26 +- .../functions/calctimelines/StatusFilter.java | 3 - .../calctimelines/TopologyMetricFilter.java | 42 +- .../CalcEndpointFlipFlopTrends.java | 14 +- .../calctrends/CalcGroupFlipFlop.java | 18 +- .../calctrends/CalcGroupFunctionFlipFlop.java | 32 +- .../calctrends/CalcMetricFlipFlopTrends.java | 46 +- .../calctrends/CalcServiceFlipFlop.java | 17 +- .../calctrends/CalcStatusTrends.java | 61 +- .../main/java/argo/pojos/EndpointTrends.java | 11 +- .../java/argo/pojos/GroupFunctionTrends.java | 10 +- .../src/main/java/argo/pojos/GroupTrends.java | 8 +- .../main/java/argo/pojos/MetricTrends.java | 20 +- .../main/java/argo/pojos/ServiceTrends.java | 11 +- .../profiles/AggregationProfileManager.java | 605 ++++ .../profiles/AggregationProfileParser.java | 5 +- .../argo/profiles/EndpointGroupManager.java | 477 +++ .../java/argo/profiles/GroupGroupManager.java | 352 ++ .../argo/profiles/MetricProfileManager.java | 367 ++ .../argo/profiles/MetricProfileParser.java | 42 - .../java/argo/profiles/OperationsParser.java | 4 - .../java/argo/profiles/ProfilesLoader.java | 112 +- .../main/java/argo/profiles/ReportParser.java | 15 - .../argo/profiles/TopologyEndpointParser.java | 9 +- .../argo/profiles/TopologyGroupParser.java | 3 - .../src/main/java/argo/utils/EnumStatus.java | 4 - .../main/java/argo/utils/RequestManager.java | 69 +- .../src/main/java/argo/utils/Utils.java | 24 - .../java/timelines/TimelineAggregator.java | 3 - 82 files changed, 13083 insertions(+), 979 deletions(-) create mode 100644 flink_jobs/ProfilesManager/nbproject/project.properties create mode 100644 flink_jobs/ProfilesManager/pom.xml create mode 100644 flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupEndpoint.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupGroup.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/argo/avro/MetricProfile.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/AggregationProfileManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/GroupGroupManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/resources/log4j.properties create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/aggregations.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/metricprofile.avro create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/metricprofile.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/topgroupendpoint.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/topgroupgroup.json create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/AggregationProfileManagerTest.java create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/EndpointGroupManagerTest.java create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/GroupGroupManagerTest.java create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/MetricProfileManagerTest.java create mode 100644 flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineAggregatorTest.java create mode 100644 flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineTest.java create mode 100644 flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineUtils.java create mode 100644 flink_jobs/status_trends/avro/group_endpoint.avsc create mode 100644 flink_jobs/status_trends/avro/group_group.avsc create mode 100644 flink_jobs/status_trends/avro/metric_data.avsc create mode 100644 flink_jobs/status_trends/avro/metric_profile.avsc create mode 100644 flink_jobs/status_trends/src/main/java/argo/avro/GroupEndpoint.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/avro/GroupGroup.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/avro/MetricProfile.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileManager.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/EndpointGroupManager.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/GroupGroupManager.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileManager.java diff --git a/.gitignore b/.gitignore index a51fb346..52f175f5 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,5 @@ conf.cfg .classpath .project .settings/ + +/flink_jobs/ProfilesManager/target/ diff --git a/flink_jobs/OperationsManager/src/main/java/operations/OperationsManager.java b/flink_jobs/OperationsManager/src/main/java/operations/OperationsManager.java index 8a89d992..df2401a3 100644 --- a/flink_jobs/OperationsManager/src/main/java/operations/OperationsManager.java +++ b/flink_jobs/OperationsManager/src/main/java/operations/OperationsManager.java @@ -389,7 +389,4 @@ public static Logger getLOG() { return LOG; } -// public String getUrl() { -// return url; -// } } diff --git a/flink_jobs/OperationsManager/src/test/java/operations/OperationsManagerTest.java b/flink_jobs/OperationsManager/src/test/java/operations/OperationsManagerTest.java index 1e7f2cdf..4d0680f8 100644 --- a/flink_jobs/OperationsManager/src/test/java/operations/OperationsManagerTest.java +++ b/flink_jobs/OperationsManager/src/test/java/operations/OperationsManagerTest.java @@ -21,7 +21,6 @@ import org.junit.Test; /* - * * A unit test class to test OperationsManager */ public class OperationsManagerTest { @@ -311,7 +310,7 @@ public void testAvailableStates() throws IOException { OperationsManager instance = new OperationsManager(); instance.loadJson(new File(OperationsManagerTest.class.getResource("/operations/operations.json").getFile())); - ArrayList expResult = new ArrayList<>(); + ArrayList expResult = new ArrayList(); expResult.add("OK"); expResult.add("WARNING"); expResult.add("UNKNOWN"); diff --git a/flink_jobs/OperationsManager/src/test/java/operations/Utils.java b/flink_jobs/OperationsManager/src/test/java/operations/Utils.java index 750b175b..0eeffc70 100644 --- a/flink_jobs/OperationsManager/src/test/java/operations/Utils.java +++ b/flink_jobs/OperationsManager/src/test/java/operations/Utils.java @@ -19,7 +19,6 @@ import java.util.HashMap; /** - * * A utils class to process resource files for tests and provide the information */ public class Utils { diff --git a/flink_jobs/ProfilesManager/nbproject/project.properties b/flink_jobs/ProfilesManager/nbproject/project.properties new file mode 100644 index 00000000..e69de29b diff --git a/flink_jobs/ProfilesManager/pom.xml b/flink_jobs/ProfilesManager/pom.xml new file mode 100644 index 00000000..f3c19baf --- /dev/null +++ b/flink_jobs/ProfilesManager/pom.xml @@ -0,0 +1,60 @@ + + + 4.0.0 + flink.jobs + ProfilesManager + 1.0-SNAPSHOT + jar + + + junit-addons + junit-addons + 1.4 + test + + + junit + junit + 4.13.1 + test + + + joda-time + joda-time + 1.6 + + + log4j + log4j + ${log4j.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + org.apache.avro + avro + 1.10.2 + + + com.google.code.gson + gson + 2.2.4 + + + commons-io + commons-io + 2.10.0 + + + + + UTF-8 + 1.8 + 1.8 + 1.7.7 + 1.2.17 + + \ No newline at end of file diff --git a/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupEndpoint.java b/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupEndpoint.java new file mode 100644 index 00000000..631aefb5 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupEndpoint.java @@ -0,0 +1,337 @@ +package argo.avro; +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class GroupEndpoint extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GroupEndpoint\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"type\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"group\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"service\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"hostname\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String type; + @Deprecated public java.lang.String group; + @Deprecated public java.lang.String service; + @Deprecated public java.lang.String hostname; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. + */ + public GroupEndpoint() {} + + /** + * All-args constructor. + */ + public GroupEndpoint(java.lang.String type, java.lang.String group, java.lang.String service, java.lang.String hostname, java.util.Map tags) { + this.type = type; + this.group = group; + this.service = service; + this.hostname = hostname; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return type; + case 1: return group; + case 2: return service; + case 3: return hostname; + case 4: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: type = (java.lang.String)value$; break; + case 1: group = (java.lang.String)value$; break; + case 2: service = (java.lang.String)value$; break; + case 3: hostname = (java.lang.String)value$; break; + case 4: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'type' field. + */ + public java.lang.String getType() { + return type; + } + + /** + * Sets the value of the 'type' field. + * @param value the value to set. + */ + public void setType(java.lang.String value) { + this.type = value; + } + + /** + * Gets the value of the 'group' field. + */ + public java.lang.String getGroup() { + return group; + } + + /** + * Sets the value of the 'group' field. + * @param value the value to set. + */ + public void setGroup(java.lang.String value) { + this.group = value; + } + + /** + * Gets the value of the 'service' field. + */ + public java.lang.String getService() { + return service; + } + + /** + * Sets the value of the 'service' field. + * @param value the value to set. + */ + public void setService(java.lang.String value) { + this.service = value; + } + + /** + * Gets the value of the 'hostname' field. + */ + public java.lang.String getHostname() { + return hostname; + } + + /** + * Sets the value of the 'hostname' field. + * @param value the value to set. + */ + public void setHostname(java.lang.String value) { + this.hostname = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new GroupEndpoint RecordBuilder */ + public static argo.avro.GroupEndpoint.Builder newBuilder() { + return new argo.avro.GroupEndpoint.Builder(); + } + + /** Creates a new GroupEndpoint RecordBuilder by copying an existing Builder */ + public static argo.avro.GroupEndpoint.Builder newBuilder(argo.avro.GroupEndpoint.Builder other) { + return new argo.avro.GroupEndpoint.Builder(other); + } + + /** Creates a new GroupEndpoint RecordBuilder by copying an existing GroupEndpoint instance */ + public static argo.avro.GroupEndpoint.Builder newBuilder(argo.avro.GroupEndpoint other) { + return new argo.avro.GroupEndpoint.Builder(other); + } + + /** + * RecordBuilder for GroupEndpoint instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String type; + private java.lang.String group; + private java.lang.String service; + private java.lang.String hostname; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.GroupEndpoint.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.GroupEndpoint.Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing GroupEndpoint instance */ + private Builder(argo.avro.GroupEndpoint other) { + super(argo.avro.GroupEndpoint.SCHEMA$); + if (isValidValue(fields()[0], other.type)) { + this.type = data().deepCopy(fields()[0].schema(), other.type); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.group)) { + this.group = data().deepCopy(fields()[1].schema(), other.group); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.service)) { + this.service = data().deepCopy(fields()[2].schema(), other.service); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.hostname)) { + this.hostname = data().deepCopy(fields()[3].schema(), other.hostname); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.tags)) { + this.tags = data().deepCopy(fields()[4].schema(), other.tags); + fieldSetFlags()[4] = true; + } + } + + /** Gets the value of the 'type' field */ + public java.lang.String getType() { + return type; + } + + /** Sets the value of the 'type' field */ + public argo.avro.GroupEndpoint.Builder setType(java.lang.String value) { + validate(fields()[0], value); + this.type = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'type' field has been set */ + public boolean hasType() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'type' field */ + public argo.avro.GroupEndpoint.Builder clearType() { + type = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'group' field */ + public java.lang.String getGroup() { + return group; + } + + /** Sets the value of the 'group' field */ + public argo.avro.GroupEndpoint.Builder setGroup(java.lang.String value) { + validate(fields()[1], value); + this.group = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'group' field has been set */ + public boolean hasGroup() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'group' field */ + public argo.avro.GroupEndpoint.Builder clearGroup() { + group = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'service' field */ + public java.lang.String getService() { + return service; + } + + /** Sets the value of the 'service' field */ + public argo.avro.GroupEndpoint.Builder setService(java.lang.String value) { + validate(fields()[2], value); + this.service = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'service' field has been set */ + public boolean hasService() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'service' field */ + public argo.avro.GroupEndpoint.Builder clearService() { + service = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'hostname' field */ + public java.lang.String getHostname() { + return hostname; + } + + /** Sets the value of the 'hostname' field */ + public argo.avro.GroupEndpoint.Builder setHostname(java.lang.String value) { + validate(fields()[3], value); + this.hostname = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'hostname' field has been set */ + public boolean hasHostname() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'hostname' field */ + public argo.avro.GroupEndpoint.Builder clearHostname() { + hostname = null; + fieldSetFlags()[3] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.GroupEndpoint.Builder setTags(java.util.Map value) { + validate(fields()[4], value); + this.tags = value; + fieldSetFlags()[4] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[4]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.GroupEndpoint.Builder clearTags() { + tags = null; + fieldSetFlags()[4] = false; + return this; + } + + @Override + public GroupEndpoint build() { + try { + GroupEndpoint record = new GroupEndpoint(); + record.type = fieldSetFlags()[0] ? this.type : (java.lang.String) defaultValue(fields()[0]); + record.group = fieldSetFlags()[1] ? this.group : (java.lang.String) defaultValue(fields()[1]); + record.service = fieldSetFlags()[2] ? this.service : (java.lang.String) defaultValue(fields()[2]); + record.hostname = fieldSetFlags()[3] ? this.hostname : (java.lang.String) defaultValue(fields()[3]); + record.tags = fieldSetFlags()[4] ? this.tags : (java.util.Map) defaultValue(fields()[4]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupGroup.java b/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupGroup.java new file mode 100644 index 00000000..a7712d67 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/argo/avro/GroupGroup.java @@ -0,0 +1,286 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class GroupGroup extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GroupGroup\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"type\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"group\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"subgroup\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String type; + @Deprecated public java.lang.String group; + @Deprecated public java.lang.String subgroup; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. + */ + public GroupGroup() {} + + /** + * All-args constructor. + */ + public GroupGroup(java.lang.String type, java.lang.String group, java.lang.String subgroup, java.util.Map tags) { + this.type = type; + this.group = group; + this.subgroup = subgroup; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return type; + case 1: return group; + case 2: return subgroup; + case 3: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: type = (java.lang.String)value$; break; + case 1: group = (java.lang.String)value$; break; + case 2: subgroup = (java.lang.String)value$; break; + case 3: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'type' field. + */ + public java.lang.String getType() { + return type; + } + + /** + * Sets the value of the 'type' field. + * @param value the value to set. + */ + public void setType(java.lang.String value) { + this.type = value; + } + + /** + * Gets the value of the 'group' field. + */ + public java.lang.String getGroup() { + return group; + } + + /** + * Sets the value of the 'group' field. + * @param value the value to set. + */ + public void setGroup(java.lang.String value) { + this.group = value; + } + + /** + * Gets the value of the 'subgroup' field. + */ + public java.lang.String getSubgroup() { + return subgroup; + } + + /** + * Sets the value of the 'subgroup' field. + * @param value the value to set. + */ + public void setSubgroup(java.lang.String value) { + this.subgroup = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new GroupGroup RecordBuilder */ + public static argo.avro.GroupGroup.Builder newBuilder() { + return new argo.avro.GroupGroup.Builder(); + } + + /** Creates a new GroupGroup RecordBuilder by copying an existing Builder */ + public static argo.avro.GroupGroup.Builder newBuilder(argo.avro.GroupGroup.Builder other) { + return new argo.avro.GroupGroup.Builder(other); + } + + /** Creates a new GroupGroup RecordBuilder by copying an existing GroupGroup instance */ + public static argo.avro.GroupGroup.Builder newBuilder(argo.avro.GroupGroup other) { + return new argo.avro.GroupGroup.Builder(other); + } + + /** + * RecordBuilder for GroupGroup instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String type; + private java.lang.String group; + private java.lang.String subgroup; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.GroupGroup.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.GroupGroup.Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing GroupGroup instance */ + private Builder(argo.avro.GroupGroup other) { + super(argo.avro.GroupGroup.SCHEMA$); + if (isValidValue(fields()[0], other.type)) { + this.type = data().deepCopy(fields()[0].schema(), other.type); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.group)) { + this.group = data().deepCopy(fields()[1].schema(), other.group); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.subgroup)) { + this.subgroup = data().deepCopy(fields()[2].schema(), other.subgroup); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'type' field */ + public java.lang.String getType() { + return type; + } + + /** Sets the value of the 'type' field */ + public argo.avro.GroupGroup.Builder setType(java.lang.String value) { + validate(fields()[0], value); + this.type = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'type' field has been set */ + public boolean hasType() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'type' field */ + public argo.avro.GroupGroup.Builder clearType() { + type = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'group' field */ + public java.lang.String getGroup() { + return group; + } + + /** Sets the value of the 'group' field */ + public argo.avro.GroupGroup.Builder setGroup(java.lang.String value) { + validate(fields()[1], value); + this.group = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'group' field has been set */ + public boolean hasGroup() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'group' field */ + public argo.avro.GroupGroup.Builder clearGroup() { + group = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'subgroup' field */ + public java.lang.String getSubgroup() { + return subgroup; + } + + /** Sets the value of the 'subgroup' field */ + public argo.avro.GroupGroup.Builder setSubgroup(java.lang.String value) { + validate(fields()[2], value); + this.subgroup = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'subgroup' field has been set */ + public boolean hasSubgroup() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'subgroup' field */ + public argo.avro.GroupGroup.Builder clearSubgroup() { + subgroup = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.GroupGroup.Builder setTags(java.util.Map value) { + validate(fields()[3], value); + this.tags = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.GroupGroup.Builder clearTags() { + tags = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public GroupGroup build() { + try { + GroupGroup record = new GroupGroup(); + record.type = fieldSetFlags()[0] ? this.type : (java.lang.String) defaultValue(fields()[0]); + record.group = fieldSetFlags()[1] ? this.group : (java.lang.String) defaultValue(fields()[1]); + record.subgroup = fieldSetFlags()[2] ? this.subgroup : (java.lang.String) defaultValue(fields()[2]); + record.tags = fieldSetFlags()[3] ? this.tags : (java.util.Map) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/ProfilesManager/src/main/java/argo/avro/MetricProfile.java b/flink_jobs/ProfilesManager/src/main/java/argo/avro/MetricProfile.java new file mode 100644 index 00000000..5055e779 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/argo/avro/MetricProfile.java @@ -0,0 +1,304 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class MetricProfile extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"MetricProfile\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"profile\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"service\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"metric\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String profile; + @Deprecated public java.lang.String service; + @Deprecated public java.lang.String metric; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public MetricProfile() {} + + /** + * All-args constructor. + */ + public MetricProfile(java.lang.String profile, java.lang.String service, java.lang.String metric, java.util.Map tags) { + this.profile = profile; + this.service = service; + this.metric = metric; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return profile; + case 1: return service; + case 2: return metric; + case 3: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: profile = (java.lang.String)value$; break; + case 1: service = (java.lang.String)value$; break; + case 2: metric = (java.lang.String)value$; break; + case 3: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'profile' field. + */ + public java.lang.String getProfile() { + return profile; + } + + /** + * Sets the value of the 'profile' field. + * @param value the value to set. + */ + public void setProfile(java.lang.String value) { + this.profile = value; + } + + /** + * Gets the value of the 'service' field. + */ + public java.lang.String getService() { + return service; + } + + /** + * Sets the value of the 'service' field. + * @param value the value to set. + */ + public void setService(java.lang.String value) { + this.service = value; + } + + /** + * Gets the value of the 'metric' field. + */ + public java.lang.String getMetric() { + return metric; + } + + /** + * Sets the value of the 'metric' field. + * @param value the value to set. + */ + public void setMetric(java.lang.String value) { + this.metric = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new MetricProfile RecordBuilder */ + public static argo.avro.MetricProfile.Builder newBuilder() { + return new argo.avro.MetricProfile.Builder(); + } + + /** Creates a new MetricProfile RecordBuilder by copying an existing Builder */ + public static argo.avro.MetricProfile.Builder newBuilder(argo.avro.MetricProfile.Builder other) { + return new argo.avro.MetricProfile.Builder(other); + } + + /** Creates a new MetricProfile RecordBuilder by copying an existing MetricProfile instance */ + public static argo.avro.MetricProfile.Builder newBuilder(argo.avro.MetricProfile other) { + return new argo.avro.MetricProfile.Builder(other); + } + + /** + * RecordBuilder for MetricProfile instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String profile; + private java.lang.String service; + private java.lang.String metric; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.MetricProfile.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.MetricProfile.Builder other) { + super(other); + if (isValidValue(fields()[0], other.profile)) { + this.profile = data().deepCopy(fields()[0].schema(), other.profile); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.service)) { + this.service = data().deepCopy(fields()[1].schema(), other.service); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.metric)) { + this.metric = data().deepCopy(fields()[2].schema(), other.metric); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Creates a Builder by copying an existing MetricProfile instance */ + private Builder(argo.avro.MetricProfile other) { + super(argo.avro.MetricProfile.SCHEMA$); + if (isValidValue(fields()[0], other.profile)) { + this.profile = data().deepCopy(fields()[0].schema(), other.profile); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.service)) { + this.service = data().deepCopy(fields()[1].schema(), other.service); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.metric)) { + this.metric = data().deepCopy(fields()[2].schema(), other.metric); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'profile' field */ + public java.lang.String getProfile() { + return profile; + } + + /** Sets the value of the 'profile' field */ + public argo.avro.MetricProfile.Builder setProfile(java.lang.String value) { + validate(fields()[0], value); + this.profile = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'profile' field has been set */ + public boolean hasProfile() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'profile' field */ + public argo.avro.MetricProfile.Builder clearProfile() { + profile = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'service' field */ + public java.lang.String getService() { + return service; + } + + /** Sets the value of the 'service' field */ + public argo.avro.MetricProfile.Builder setService(java.lang.String value) { + validate(fields()[1], value); + this.service = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'service' field has been set */ + public boolean hasService() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'service' field */ + public argo.avro.MetricProfile.Builder clearService() { + service = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'metric' field */ + public java.lang.String getMetric() { + return metric; + } + + /** Sets the value of the 'metric' field */ + public argo.avro.MetricProfile.Builder setMetric(java.lang.String value) { + validate(fields()[2], value); + this.metric = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'metric' field has been set */ + public boolean hasMetric() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'metric' field */ + public argo.avro.MetricProfile.Builder clearMetric() { + metric = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.MetricProfile.Builder setTags(java.util.Map value) { + validate(fields()[3], value); + this.tags = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.MetricProfile.Builder clearTags() { + tags = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public MetricProfile build() { + try { + MetricProfile record = new MetricProfile(); + record.profile = fieldSetFlags()[0] ? this.profile : (java.lang.String) defaultValue(fields()[0]); + record.service = fieldSetFlags()[1] ? this.service : (java.lang.String) defaultValue(fields()[1]); + record.metric = fieldSetFlags()[2] ? this.metric : (java.lang.String) defaultValue(fields()[2]); + record.tags = fieldSetFlags()[3] ? this.tags : (java.util.Map) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/AggregationProfileManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/AggregationProfileManager.java new file mode 100644 index 00000000..aad0cd2c --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/AggregationProfileManager.java @@ -0,0 +1,611 @@ +package profilesmanager; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.Serializable; +import java.util.Map.Entry; + +/** + * + * + * AggregationProfileManager class implements objects that store the information + * parsed from a json object containing aggregation profile data or loaded from + * an json file + * + * The AggregationProfileManager keeps info of a list of AvProfileItem objects + * each one representing the aggregation profile information that corresponds to + * a specific client's report + */ +public class AggregationProfileManager implements Serializable { + + private HashMap list; + private static final Logger LOG = Logger.getLogger(AggregationProfileManager.class.getName()); + private HashMap profileNameIdMap; + + public AggregationProfileManager() { + + this.list = new HashMap(); + this.profileNameIdMap = new HashMap(); + + } + /** + * + * A AvProfileItem class implements an object containing info of an aggregation profile element + */ + public class AvProfileItem implements Serializable { + + private String id; + private String name; + private String namespace; + private String metricProfile; + private String metricOp; + private String groupType; + private String op; + + private HashMap groups; // a map of function(group) as key and a ServGroupItem as value. ServGroupItem keeps info of each service, operation pair in the profile data + private HashMap serviceIndex; // a map of service as key and an ArrayList of functions(groups) as value that correspond to the specific service + private HashMap serviceOperations; //a map of service as key and operation as value +// private HashMap> serviceFunctions; + private HashMap groupOperations; //a map of function(group) as key and operation as value, corresponding to the specific group + + AvProfileItem() { + this.groups = new HashMap(); + this.serviceIndex = new HashMap(); + this.serviceOperations = new HashMap(); + this.groupOperations = new HashMap(); + } + + public class ServGroupItem implements Serializable { + + String op; + HashMap services; + + ServGroupItem(String op) { + this.op = op; + this.services = new HashMap(); + } + } + + /** + * creates a ServGroupItem object and keeps in a map the pair of group, + * ServGroupItem + * + * @param group, a function e.g "compute" + * @param op , the operation that corresponds to the function e.g "AND" + */ + public void insertGroup(String group, String op) { + if (!this.groups.containsKey(group)) { + this.groups.put(group, new ServGroupItem(op)); + } + } + + /** + * keeps in a map for each function the pair of service, operation + * + * @param group , a function e.g "compute" + * @param service , a service e.g "ARC-CE" + * @param op , an operation e.g "AND" + */ + public void insertService(String group, String service, String op) { + if (this.groups.containsKey(group)) { + this.groups.get(group).services.put(service, op); + + this.serviceIndex.put(service, group); + } + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + public String getMetricProfile() { + return metricProfile; + } + + public void setMetricProfile(String metricProfile) { + this.metricProfile = metricProfile; + } + + public String getMetricOp() { + return metricOp; + } + + public void setMetricOp(String metricOp) { + this.metricOp = metricOp; + } + + public String getGroupType() { + return groupType; + } + + public void setGroupType(String groupType) { + this.groupType = groupType; + } + + public String getOp() { + return op; + } + + public void setOp(String op) { + this.op = op; + } + + public HashMap getGroups() { + return groups; + } + + public void setGroups(HashMap groups) { + this.groups = groups; + } + + public HashMap getServiceIndex() { + return serviceIndex; + } + + public void setServiceIndex(HashMap serviceIndex) { + this.serviceIndex = serviceIndex; + } + + public HashMap getServiceOperations() { + return serviceOperations; + } + + public void setServiceOperations(HashMap serviceOperations) { + this.serviceOperations = serviceOperations; + } + + public HashMap getGroupOperations() { + return groupOperations; + } + + public void setGroupOperationss(HashMap groupOperations) { + this.groupOperations = groupOperations; + } + + } + + /** + * clears the list containing the AvProfileItem + */ + public void clearProfiles() { + this.list.clear(); + } + + /** + * returns the profile operation as exists in the profile data field + * profile_operation + * + * @param avProfile , the name of the aggregation profile name + * @return , the profile operation + */ + public String getTotalOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).op; + } + + return ""; + } + + /** + * returns the metric operation as exists in the profile data field + * metric_operation returns the metric operation as exists in the profile + * data field metric_operation + * + * @param avProfile + * @return + */ + public String getMetricOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricOp; + } + + return ""; + } + + /** + * Return the available Group Names of a profile + * + * @param avProfile , the profile's name + * @return , a list with the groups (functions) + */ + // Return the available Group Names of a profile + public ArrayList getProfileGroups(String avProfile) { + + if (this.list.containsKey(avProfile)) { + ArrayList result = new ArrayList(); + Iterator groupIterator = this.list.get(avProfile).groups.keySet().iterator(); + + while (groupIterator.hasNext()) { + result.add(groupIterator.next()); + } + + return result; + } + + return null; + } + + /** + * Return the available group operation + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return the operation corresponding to the group + */ + public String getProfileGroupOp(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + return this.list.get(avProfile).groups.get(groupName).op; + } + } + + return null; + } + + /** + * Return the available services for the group + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return a list containing the services corresponding to the group + */ + public ArrayList getProfileGroupServices(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + ArrayList result = new ArrayList(); + Iterator srvIterator = this.list.get(avProfile).groups.get(groupName).services.keySet() + .iterator(); + + while (srvIterator.hasNext()) { + result.add(srvIterator.next()); + } + + return result; + } + } + + return null; + } + + public String getProfileGroupServiceOp(String avProfile, String groupName, String service) { + + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + if (this.list.get(avProfile).groups.get(groupName).services.containsKey(service)) { + return this.list.get(avProfile).groups.get(groupName).services.get(service); + } + } + } + + return null; + } + + public ArrayList getAvProfiles() { + + if (this.list.size() > 0) { + ArrayList result = new ArrayList(); + Iterator avpIterator = this.list.keySet().iterator(); + while (avpIterator.hasNext()) { + result.add(avpIterator.next()); + } + + return result; + + } + + return null; + } + + public String getProfileNamespace(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).namespace; + } + + return null; + } + + public String getProfileMetricProfile(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricProfile; + } + + return null; + } + + public String getProfileGroupType(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).groupType; + } + + return null; + } + + public String getGroupByService(String avProfile, String service) { + + if (this.list.containsKey(avProfile)) { + + return this.list.get(avProfile).serviceIndex.get(service); + + } + return null; + + } + + public boolean checkService(String avProfile, String service) { + + if (this.list.containsKey(avProfile)) { + + if (this.list.get(avProfile).serviceIndex.containsKey(service)) { + return true; + } + + } + return false; + + } + + public void loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + readJson(j_element); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + public void loadJsonString(List apsJson) throws IOException { + + try { + + JsonParser jsonParser = new JsonParser(); + JsonElement jRootElement = jsonParser.parse(apsJson.get(0)); + readJson(jRootElement); + + } catch (JsonParseException ex) { + LOG.error("Contents are not valid json"); + throw ex; + } + + } + + /** + * reads from a JsonElement and stores the necessary information to the + * AggregationProfileManager object fields + * + * @param j_element , a JsonElement containing the aggregations profiles data + */ + public void readJson(JsonElement j_element) { + JsonObject jRootObj = j_element.getAsJsonObject(); + + // Create new entry for this availability profile + AvProfileItem tmpAvp = new AvProfileItem(); + + JsonArray apGroups = jRootObj.getAsJsonArray("groups"); + tmpAvp.id = jRootObj.get("id").getAsString(); + tmpAvp.name = jRootObj.get("name").getAsString(); + tmpAvp.namespace = jRootObj.get("namespace").getAsString(); + tmpAvp.metricProfile = jRootObj.get("metric_profile").getAsJsonObject().get("name").getAsString(); + tmpAvp.metricOp = jRootObj.get("metric_operation").getAsString(); + tmpAvp.groupType = jRootObj.get("endpoint_group").getAsString(); + tmpAvp.op = jRootObj.get("profile_operation").getAsString(); + + for (JsonElement item : apGroups) { + // service name + JsonObject itemObj = item.getAsJsonObject(); + String itemName = itemObj.get("name").getAsString(); + String itemOp = itemObj.get("operation").getAsString(); + tmpAvp.groupOperations.put(itemName, itemOp); + JsonArray itemServices = itemObj.get("services").getAsJsonArray(); + tmpAvp.insertGroup(itemName, itemOp); + + for (JsonElement subItem : itemServices) { + JsonObject subObj = subItem.getAsJsonObject(); + String serviceName = subObj.get("name").getAsString(); + String serviceOp = subObj.get("operation").getAsString(); + tmpAvp.insertService(itemName, serviceName, serviceOp); + } + + } + buildExtraData(tmpAvp); + + // Add profile to the list + this.list.put(tmpAvp.name, tmpAvp); + this.profileNameIdMap.put(tmpAvp.id, tmpAvp.name); + + } + + /** + * populates extra information by processing the parsed aggregation profile + * data + * + * @param item, the AvProfileItem created while parsing the aggregation + * profile data input + */ + private void buildExtraData(AvProfileItem item) { + + HashMap groupOperations = new HashMap<>(); + + HashMap serviceOperations = new HashMap<>(); + HashMap> serviceFunctions = new HashMap<>(); + + for (String group : item.groups.keySet()) { + AvProfileItem.ServGroupItem servGroupItem = item.groups.get(group); + + if (groupOperations.containsKey(group)) { + groupOperations.put(group, servGroupItem.op); + } + + for (Entry entry : servGroupItem.services.entrySet()) { + serviceOperations.put(entry.getKey(), entry.getValue()); + + ArrayList functions = new ArrayList<>(); + if (serviceFunctions.containsKey(entry.getKey())) { + functions = serviceFunctions.get(entry.getKey()); + } + functions.add(group); + serviceFunctions.put(entry.getKey(), functions); + } + } + item.serviceOperations = serviceOperations; + // item.serviceFunctions = serviceFunctions; + } + + /** + * Return the available function that correspond to the service input + * + * @param service a service + * @return a function correspondig to the service + */ + public String retrieveFunctionsByService(String service) { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex.get(service); + } + + /** + * Return a map containing all the pairs of service , operation that appear + * in the aggregation profile data + * + * @return the map containing the service , operation pairs + */ + public HashMap retrieveServiceOperations() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceOperations; + } + + /** + * Return a map containing all the pairs of service , group (function) that + * appear in the aggregation profile data + * + * @return a map containing service, group (function) pairs + */ + public HashMap retrieveServiceFunctions() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex; + } + + /** + * Checks the service, group (function) pairs for the existance of the + * service input + * + * @param service a service + * @return true if the service exist in the map , false elsewhere + */ + public boolean checkServiceExistance(String service) { + AvProfileItem item = getAvProfileItem(); + + return item.serviceIndex.keySet().contains(service); + } + + public AvProfileItem getAvProfileItem() { + String name = this.list.keySet().iterator().next(); + return this.list.get(name); + + } + + /** + * Return the profile's metric operation as exist in the aggregation profile + * data field : metric_operation + * + * @return the profile's metric operation + */ + public String getMetricOpByProfile() { + AvProfileItem item = getAvProfileItem(); + + return item.metricOp; + + } + + /** + * Return a map containing all the pairs of group (function) , operation + * that appear in the aggregation profile data + * + * @return the map containing all the pairs of group (function), operation + */ + public HashMap retrieveGroupOperations() { + + AvProfileItem item = getAvProfileItem(); + + return item.groupOperations; + } + + /** + * Return the operation that correspond to the group input + * + * @param group , a group (function) + * @return the operation corresponding to the group + */ + public String retrieveGroupOperation(String group) { + AvProfileItem item = getAvProfileItem(); + + return item.groups.get(group).op; + } + + /** + * Return the profile's profile_operation + * + * @return the profile's operation + */ + public String retrieveProfileOperation() { + AvProfileItem item = getAvProfileItem(); + + return item.op; + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java new file mode 100644 index 00000000..606739bd --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java @@ -0,0 +1,476 @@ +package profilesmanager; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.TreeMap; +import java.util.Map.Entry; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.util.Utf8; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import argo.avro.GroupEndpoint; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * + * + * EndpointGroupManager class implements objects that store the information + * parsed from a json object containing topology group-endpoint profile data or + * loaded from an avro file + * + * The EndpointGroupManager keeps info of a list of EndpointItem objects each + * one representing the current topology information that corresponds to a + * specific client's report or the topology information that is valid for a + * given date , for a specific client's report + * + * Also stores a list of EndpointItem objects that are filtered on the tags + * information, given some criteria on the tags + */ +public class EndpointGroupManager implements Serializable { + + private static final Logger LOG = Logger.getLogger(EndpointGroupManager.class.getName()); + + private ArrayList list; // the list of EndpointItems that are included in the profile data + private ArrayList fList; // the list of the filter EndpointItems that correspond to the criteria + private HashMap> topologyEndpoint = new HashMap<>(); + + //* A EndpointItem class implements an object containing info of an group endpoint included in the topology + protected class EndpointItem implements Serializable { // the object + + String type; // type of group + String group; // name of the group + String service; // type of the service + String hostname; // name of host + HashMap tags; // Tag list + + public EndpointItem() { + // Initializations + this.type = ""; + this.group = ""; + this.service = ""; + this.hostname = ""; + this.tags = new HashMap(); + } + + public EndpointItem(String type, String group, String service, String hostname, HashMap tags) { + this.type = type; + this.group = group; + this.service = service; + this.hostname = hostname; + this.tags = tags; + + } + + @Override + public int hashCode() { + int hash = 7; + hash = 17 * hash + Objects.hashCode(this.type); + hash = 17 * hash + Objects.hashCode(this.group); + hash = 17 * hash + Objects.hashCode(this.service); + hash = 17 * hash + Objects.hashCode(this.hostname); + hash = 17 * hash + Objects.hashCode(this.tags); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final EndpointItem other = (EndpointItem) obj; + if (!Objects.equals(this.type, other.type)) { + return false; + } + if (!Objects.equals(this.group, other.group)) { + return false; + } + if (!Objects.equals(this.service, other.service)) { + return false; + } + if (!Objects.equals(this.hostname, other.hostname)) { + return false; + } + if (!Objects.equals(this.tags, other.tags)) { + return false; + } + return true; + } + + } + + public EndpointGroupManager() { + this.list = new ArrayList<>(); + this.fList = new ArrayList<>(); + + } + + /** + * Inserts into the list a new EndpointItem , that contains the information + * of a group endpoint of the topology + * + * @param type the type of the endpoint group + * @param group the group of the endpoint group + * @param service the service of the endpoint group + * @param hostname the hostname of the endpoint group + * @param tags the tags of the endpoint group + * @return 0 if the item is inserted successfully to the list of the + * EndpointItems + */ + public int insert(String type, String group, String service, String hostname, HashMap tags) { + EndpointItem new_item = new EndpointItem(type, group, service, hostname, tags); + this.list.add(new_item); + return 0; // All good + } + + /** + * Checks the filtered list if group endpoint with a hostname , service + * exists + * + * @param hostname + * @param service + * @return true if the group endpoint exists, else elsewhere + */ + public boolean checkEndpoint(String hostname, String service) { + + for (EndpointItem item : fList) { + if (item.hostname.equals(hostname) && item.service.equals(service)) { + return true; + } + } + + return false; + } + + /** + * Returns the list of groups each one containing the group endpoint with + * the specific info + * + * @param type the type + * @param hostname the hostname + * @param service the service + * @return a list of groups that contain the group endpoint with the + * specific type, hostname, service + */ + public ArrayList getGroup(String type, String hostname, String service) { + + ArrayList results = new ArrayList(); + + for (EndpointItem item : fList) { + if (item.type.equals(type) && item.hostname.equals(hostname) && item.service.equals(service)) { + results.add(item.group); + } + } + + return results; + } + + /** + * Searches the tags of a group endpoint with the specific group, type, + * hostname, service and if the tags map contains keys with "info." in them + * then it returns the values combinations + * + * @param group the group + * @param type the type + * @param hostname the hostname + * @param service the service + * @return a string where all values of a "info." tag are combined and + * represented + */ + public String getInfo(String group, String type, String hostname, String service) { + String info = ""; + boolean first = true; + HashMap tags = this.getGroupTags(group, type, hostname, service); + + if (tags == null) { + return info; + } + + for (String tName : tags.keySet()) { + + if (tName.startsWith("info.")) { + + String infoName = tName.replaceFirst("info.", ""); + + String value = tags.get(tName); + + if (!value.equalsIgnoreCase("")) { + + if (!first) { + info = info + ","; + } else { + first = false; + } + info = info + infoName + ":" + value; + + } + } + } + return info; + } + + /** + * Finds in the filtered EndpointItem fList a EndpointItem with specific + * group, type. service, hostname and return the tags if it is found + * + * @param group a group + * @param type a type + * @param hostname a hostname + * @param service a service + * @return a map of tags for a specific group endpoint or null if there are + * no tags + */ + public HashMap getGroupTags(String group, String type, String hostname, String service) { + + for (EndpointItem item : fList) { + if (item.group.equals(group) && item.type.equals(type) && item.hostname.equals(hostname) && item.service.equals(service)) { + return item.tags; + } + } + + return null; + } + + /** + * counts the items in the filtered list + * + * @return + */ + public int count() { + return this.fList.size(); + } + /** + * Clear the filtered fList and initialize with all the EndpointItems the list includes + */ + public void unfilter() { + this.fList.clear(); + for (EndpointItem item : this.list) { + this.fList.add(item); + } + } +/** + * Applies filter on the tags of the EndpointItems included in the list, based on a map of criteria + * each criteria containing a pair of the tags field name and the value it searches . e.g ("monitored","1") searches tags + * with the specific pair of criteria + * @param fTags a map of criteria + */ + public void filter(TreeMap fTags) { + this.fList.clear(); + boolean trim; + for (EndpointItem item : this.list) { + trim = false; + HashMap itemTags = item.tags; + for (Entry fTagItem : fTags.entrySet()) { + + if (itemTags.containsKey(fTagItem.getKey())) { + // First Check binary tags as Y/N 0/1 + + if (fTagItem.getValue().equalsIgnoreCase("y") || fTagItem.getValue().equalsIgnoreCase("n")) { + String binValue = ""; + if (fTagItem.getValue().equalsIgnoreCase("y")) { + binValue = "1"; + } + if (fTagItem.getValue().equalsIgnoreCase("n")) { + binValue = "0"; + } + + if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(binValue) == false) { + trim = true; + } + } else if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(fTagItem.getValue()) == false) { + trim = true; + } + + } + } + + if (trim == false) { + fList.add(item); + } + } + } + + /** + * Loads endpoint grouping information from an avro file + *

+ * This method loads endpoint grouping information contained in an .avro + * file with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. type: string (describes the type of grouping)
  2. + *
  3. group: string
  4. + *
  5. service: string
  6. + *
  7. hostname: string
  8. + *
  9. tags: hashmap (contains a map of arbitrary key values)
  10. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab Avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String type = avroRow.get("type").toString(); + String group = avroRow.get("group").toString(); + String service = avroRow.get("service").toString(); + String hostname = avroRow.get("hostname").toString(); + + // Insert data to list + this.insert(type, group, service, hostname, tagMap); +// populateTopologyEndpoint(hostname, service, type, group); + } // end of avro rows + + this.unfilter(); + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + public ArrayList getList() { + return this.list; + } + + public ArrayList getfList() { + return fList; + } + + public void setfList(ArrayList fList) { + this.fList = fList; + } + + /** + * Loads information from a list of EndpointGroup objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List egp) { + + // For each endpoint group record + for (GroupEndpoint item : egp) { + String type = item.getType(); + String group = item.getGroup(); + String service = item.getService(); + String hostname = item.getHostname(); + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(type, group, service, hostname, tagMap); +// populateTopologyEndpoint(hostname, service, type, group); + } + + this.unfilter(); + + } + +// private void populateTopologyEndpoint(String hostname, String service, String type, String group) { +// +// String topologyEndpointKey = hostname + "-" + service; +// +// HashMap endpMap = new HashMap(); +// if (topologyEndpoint.get(type) != null) { +// endpMap = topologyEndpoint.get(type); +// } +// +// endpMap.put(topologyEndpointKey, group); +// topologyEndpoint.put(type, endpMap); +// } + + public void loadGroupEndpointProfile(JsonArray element) throws IOException { + GroupEndpoint[] groupEndpoints = readJson(element); + loadFromList(Arrays.asList(groupEndpoints)); + } +/** + * reads from a JsonElement array and stores the necessary information to the + * GroupEndpoint objects and add them to the list + * + * @param jElement , a JsonElement containing the topology group endpoint profiles data + * @return + */ + public GroupEndpoint[] readJson(JsonArray jElement) { + List results = new ArrayList<>(); + + for (int i = 0; i < jElement.size(); i++) { + JsonObject jItem = jElement.get(i).getAsJsonObject(); + String group = jItem.get("group").getAsString(); + String gType = jItem.get("type").getAsString(); + String service = jItem.get("service").getAsString(); + String hostname = jItem.get("hostname").getAsString(); + JsonObject jTags = jItem.get("tags").getAsJsonObject(); + Map tags = new HashMap(); + for (Entry kv : jTags.entrySet()) { + tags.put(kv.getKey(), kv.getValue().getAsString()); + } + GroupEndpoint ge = new GroupEndpoint(gType, group, service, hostname, tags); + results.add(ge); + } + + GroupEndpoint[] rArr = new GroupEndpoint[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/GroupGroupManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/GroupGroupManager.java new file mode 100644 index 00000000..7ab24b3c --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/GroupGroupManager.java @@ -0,0 +1,352 @@ +package profilesmanager; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.TreeMap; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import argo.avro.GroupGroup; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; + +/** + * + * + * GroupGroupManager class implements objects that store the information parsed + * from a json object containing topology group-endpoint profile data or loaded + * from an avro file + * + * The GroupGroupManager keeps info of a list of GroupItem objects each one + * representing the current topology information that corresponds to a specific + * client's report or the topology information that is valid for a given date , + * for a specific client's report + * + * Also stores a list of GroupItem objects that are filtered on the tags + * information, given some criteria on the tags + */ +public class GroupGroupManager implements Serializable { + + static Logger log = Logger.getLogger(GroupGroupManager.class.getName()); + + private ArrayList list; + private ArrayList fList; + + //* A GroupItem class implements an object containing info of an group endpoint included in the topology + protected class GroupItem implements Serializable { + + String type; // type of group + String group; // name of the group + String subgroup; // name of sub-group + HashMap tags; // Tag list + + public GroupItem() { + // Initializations + this.type = ""; + this.group = ""; + this.subgroup = ""; + this.tags = new HashMap(); + } + + public GroupItem(String type, String group, String subgroup, HashMap tags) { + this.type = type; + this.group = group; + this.subgroup = subgroup; + this.tags = tags; + + } + + } + + public GroupGroupManager() { + this.list = new ArrayList(); + this.fList = new ArrayList(); + } + + /** + * Inserts into the list a new EndpointItem , that contains the information + * of a group endpoint of the topology + * + * @param type the type of the group group + * @param group the group of the group group + * @param subgroup the subgroup of the group group + * @param tags the tags of the endpoint group + * @return 0 if the item is inserted successfully to the list of the + * GroupItems + */ + public int insert(String type, String group, String subgroup, HashMap tags) { + GroupItem new_item = new GroupItem(type, group, subgroup, tags); + this.list.add(new_item); + return 0; // All good + } + + /** + * Finds in the filtered GroupItem fList a GroupItem with specific type. + * subgroup and return the tags if it is found + * + * @param type a type + * @param subgroup a subgroup + * @return a map of tags for a specific group group or null if there are no + * tags + */ + public HashMap getGroupTags(String type, String subgroup) { + for (GroupItem item : this.fList) { + if (item.type.equals(type) && item.subgroup.equals(subgroup)) { + return item.tags; + } + } + + return null; + } + + /** + * counts the items in the filtered list + * + * @return + */ + + public int count() { + return this.fList.size(); + } + + /** + * Returns the group corresponding to the specific type, subgroup info + * + * @param type the type + * @param subgroup the subgroup + * @return a group group with the specific type, subgroup + */ + public String getGroup(String type, String subgroup) { + for (GroupItem item : this.fList) { + if (item.type.equals(type) && item.subgroup.equals(subgroup)) { + return item.group; + } + } + + return null; + } + + /** + * Clear the filtered fList and initialize with all the GroupItems the + * list includes + */ + public void unfilter() { + this.fList.clear(); + for (GroupItem item : this.list) { + this.fList.add(item); + } + } + + /** + * Applies filter on the tags of the GroupItems included in the list, based + * on a map of criteria each criteria containing a pair of the tags field + * name and the value it searches . e.g ("monitored","1") searches tags with + * the specific pair of criteria + * + * @param fTags a map of criteria + */ + public void filter(TreeMap fTags) { + this.fList.clear(); + boolean trim; + for (GroupItem item : this.list) { + trim = false; + HashMap itemTags = item.tags; + for (Entry fTagItem : fTags.entrySet()) { + + if (itemTags.containsKey(fTagItem.getKey())) { + if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(fTagItem.getValue()) == false) { + trim = true; + } + + } + } + + if (trim == false) { + fList.add(item); + } + } + } + + /** + * Checks the filtered list if group group with a specific subgroup exists + * + * @param subgroup + * @return true if the group group exists, else elsewhere + */ + public boolean checkSubGroup(String subgroup) { + for (GroupItem item : fList) { + if (item.subgroup.equals(subgroup)) { + return true; + } + } + + return false; + } + + /** + * Loads groups of groups information from an avro file + *

+ * This method loads groups of groups information contained in an .avro file + * with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. type: string (describes the type of grouping)
  2. + *
  3. group: string
  4. + *
  5. subgroup: string
  6. + *
  7. tags: hashmap (contains a map of arbitrary key values)
  8. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) avroRow.get("tags"); + if (tags != null) { + for (Object item : tags.keySet()) { + tagMap.put(item.toString(), tags.get(item).toString()); + } + } + + // Grab 1st level mandatory fields + String type = avroRow.get("type").toString(); + String group = avroRow.get("group").toString(); + String subgroup = avroRow.get("subgroup").toString(); + + // Insert data to list + this.insert(type, group, subgroup, tagMap); + + } // end of avro rows + + this.unfilter(); + + dataFileReader.close(); + + } catch (IOException ex) { + log.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + /** + * Loads group of group information from a list of GroupGroup objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List ggp) { + + // For each group of groups record + for (GroupGroup item : ggp) { + String type = item.getType(); + String group = item.getGroup(); + String subgroup = item.getSubgroup(); + + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(type, group, subgroup, tagMap); + } + + this.unfilter(); + + } + + public void loadGroupGroupProfile(JsonArray element) throws IOException { + GroupGroup[] groupGroups = readJson(element); + loadFromList(Arrays.asList(groupGroups)); + } + + /** + * reads from a JsonElement array and stores the necessary information to + * the GroupGroup objects and add them to the list + * + * @param jElement , a JsonElement containing the topology group endpoint + * profiles data + * @return + */ + public GroupGroup[] readJson(JsonArray jElement) { + List results = new ArrayList<>(); + + for (int i = 0; i < jElement.size(); i++) { + JsonObject jItem = jElement.get(i).getAsJsonObject(); + String group = jItem.get("group").getAsString(); + String gType = jItem.get("type").getAsString(); + String subgroup = jItem.get("subgroup").getAsString(); + JsonObject jTags = jItem.get("tags").getAsJsonObject(); + Map tags = new HashMap(); + for (Entry kv : jTags.entrySet()) { + tags.put(kv.getKey(), kv.getValue().getAsString()); + } + GroupGroup gg = new GroupGroup(gType, group, subgroup, tags); + results.add(gg); + } + + GroupGroup[] rArr = new GroupGroup[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + + public ArrayList getList() { + return list; + } + + public void setList(ArrayList list) { + this.list = list; + } + + public ArrayList getfList() { + return fList; + } + + public void setfList(ArrayList fList) { + this.fList = fList; + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java new file mode 100644 index 00000000..bfe0dea0 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java @@ -0,0 +1,434 @@ +package profilesmanager; + +import argo.avro.MetricProfile; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.util.Utf8; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +/** + * + * MetricProfileManager class implements objects that store the information + * parsed from a json object containing metric profile data or loaded from an + * avro file + * + * The MetricProfileManager keeps info of a list of ProfileItems each one having + * a name(profile), service, metric and a set of tags , also it convert string + * operations and statuses to integer based on their position in the list + * storage + */ +public class MetricProfileManager implements Serializable { + + private static final Logger LOG = Logger.getLogger(MetricProfileManager.class.getName()); + + private ArrayList list; // a list of ProfileItem objects + private Map>> index; //a map that stores as key a profile and as value a map of pairs of service (as key) and list of metrics (as value) + + /** + * + * A ProfileItem class implements an object containing info of a profile, + * service, metric , tags + */ + private class ProfileItem implements Serializable { + + String profile; // Name of the profile + String service; // Name of the service type + String metric; // Name of the metric + HashMap tags; // Tag list + + public ProfileItem() { + // Initializations + this.profile = ""; + this.service = ""; + this.metric = ""; + this.tags = new HashMap(); + } + + public ProfileItem(String profile, String service, String metric, HashMap tags) { + this.profile = profile; + this.service = service; + this.metric = metric; + this.tags = tags; + } + + public String getProfile() { + return profile; + } + + public void setProfile(String profile) { + this.profile = profile; + } + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getMetric() { + return metric; + } + + public void setMetric(String metric) { + this.metric = metric; + } + + public HashMap getTags() { + return tags; + } + + public void setTags(HashMap tags) { + this.tags = tags; + } + + } + + /** + * A constructor of a MetricProfileManager + */ + + public MetricProfileManager() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + // Clear all profile data (both list and indexes) + public void clear() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + /** + * Inserts a profile in the map + * + * @param profile, a profile + * @return 0 if profile is added and -1 if the profile already exists + */ + public int indexInsertProfile(String profile) { + if (!index.containsKey(profile)) { + index.put(profile, new HashMap>()); + return 0; + } + return -1; + } + + /** + * Constructs a ProfileItem, add it at the list and insert into the index + * map + * + * @param profile, a profile + * @param service , a service + * @param metric , a metric + * @param tags , a map of tags + */ + public void insert(String profile, String service, String metric, HashMap tags) { + ProfileItem tmpProfile = new ProfileItem(profile, service, metric, tags); + this.list.add(tmpProfile); + this.indexInsertMetric(profile, service, metric); + } + + /** + * Inserts a service at the index map , based on the profile as a key + * + * @param profile , a profile + * @param service , a service + * @return 0 if the service is added successfully at the index map, -1 if + * the service already exists at the index map + */ + public int indexInsertService(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return -1; + } else { + index.get(profile).put(service, new ArrayList()); + return 0; + } + + } + + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + return 0; + + } + + /** + * Inserts a metric at the index map , based on the profile and service as + * keys + * + * @param profile, a profile + * @param service , a service + * @param metric , a metric + * @return0 if the metric is added successfully at the index map, -1 if the + * service already exists at the index map + */ + public int indexInsertMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + // Metric exists so no insertion + return -1; + } + // Metric doesn't exist and must be added + index.get(profile).get(service).add(metric); + return 0; + } else { + // Create the service and the metric + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + } + + } + // No profile - service - metric so add them all + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + + } + + // Getter Functions + public ArrayList getProfileServices(String profile) { + if (index.containsKey(profile)) { + ArrayList ans = new ArrayList(); + ans.addAll(index.get(profile).keySet()); + return ans; + } + return null; + + } + + public ArrayList getProfiles() { + if (index.size() > 0) { + ArrayList ans = new ArrayList(); + ans.addAll(index.keySet()); + return ans; + } + return null; + } + + public ArrayList getProfileServiceMetrics(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return index.get(profile).get(service); + } + } + return null; + } + + /** + * Checks if a combination of profile, service, metric exists in the index + * map + * + * @param profile , a profile + * @param service , a service + * @param metric , a metric + * @return true if the combination of profile , service, metric exists and + * false if it does not + */ + public boolean checkProfileServiceMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + return true; + } + } + } + + return false; + } + + /** + * Loads metric profile information from an avro file + *

+ * This method loads metric profile information contained in an .avro file + * with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. profile: string
  2. + *
  3. service: string
  4. + *
  5. metric: string
  6. + *
  7. [optional] tags: hashmap (contains a map of arbitrary key values) + *
  8. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String profile = avroRow.get("profile").toString(); + String service = avroRow.get("service").toString(); + String metric = avroRow.get("metric").toString(); + + // Insert data to list + this.insert(profile, service, metric, tagMap); + + } // end of avro rows + + dataFileReader.close(); + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + /** + * Loads metric profile information from a list of MetricProfile objects + * + * @param mps , the list of MetricProfile objects + */ + @SuppressWarnings("unchecked") + public void loadFromList(List mps) { + + // For each metric profile object in list + for (MetricProfile item : mps) { + String profile = item.getProfile(); + String service = item.getService(); + String metric = item.getMetric(); + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(profile, service, metric, tagMap); + } + + } + + public void loadMetricProfile(JsonElement element) throws IOException { + + MetricProfile[] metrics = readJson(element); + loadFromList(Arrays.asList(metrics)); + } + + /** + * Reads from a json element and stores the information to an array of + * MetricProfile objects + * + * @param jElement , a JsonElement containing the metric profile data info + * @return + */ + public MetricProfile[] readJson(JsonElement jElement) { + List results = new ArrayList(); + + JsonObject jRoot = jElement.getAsJsonObject(); + String profileName = jRoot.get("name").getAsString(); + JsonArray jElements = jRoot.get("services").getAsJsonArray(); + for (int i = 0; i < jElements.size(); i++) { + JsonObject jItem = jElements.get(i).getAsJsonObject(); + String service = jItem.get("service").getAsString(); + JsonArray jMetrics = jItem.get("metrics").getAsJsonArray(); + for (int j = 0; j < jMetrics.size(); j++) { + String metric = jMetrics.get(j).getAsString(); + + Map tags = new HashMap(); + MetricProfile mp = new MetricProfile(profileName, service, metric, tags); + results.add(mp); + } + + } + + MetricProfile[] rArr = new MetricProfile[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + + /** + * Checks if a combination of service, metric exists ing the list of + * ProfileItems + * + * @param service, a service + * @param metric, a metric + * @return true if the combination of a service, metric exists in the + * ProfileItem list or false if it does not + */ + public boolean containsMetric(String service, String metric) { + + for (ProfileItem profIt : list) { + if (profIt.service.equals(service) && profIt.metric.equals(metric)) { + return true; + } + } + return false; + } + + public ArrayList getList() { + return list; + } + + public void setList(ArrayList list) { + this.list = list; + } + + public Map>> getIndex() { + return index; + } + + public void setIndex(Map>> index) { + this.index = index; + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/resources/log4j.properties b/flink_jobs/ProfilesManager/src/main/resources/log4j.properties new file mode 100644 index 00000000..da32ea0f --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/log4j.properties @@ -0,0 +1,23 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +log4j.rootLogger=INFO, console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/aggregations.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/aggregations.json new file mode 100644 index 00000000..0e044b1f --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/aggregations.json @@ -0,0 +1,68 @@ + { + "id": "00272336-7199-4ea4-bbe9-043aca02838c", + "date": "2021-02-03", + "name": "sla_test", + "namespace": "", + "endpoint_group": "servicegroups", + "metric_operation": "AND", + "profile_operation": "AND", + "metric_profile": { + "name": "ARGO_MON_CRITICAL", + "id": "5850d7fe-75f5-4aa4-bacc-9a5c10280b59" + }, + "groups": [ + { + "name": "compute", + "operation": "OR", + "services": [ + { + "name": "ARC-CE", + "operation": "OR" + }, + { + "name": "GRAM5", + "operation": "OR" + }, + { + "name": "QCG.Computing", + "operation": "OR" + }, + { + "name": "org.opensciencegrid.htcondorce", + "operation": "OR" + } + ] + }, + { + "name": "storage", + "operation": "OR", + "services": [ + { + "name": "SRM", + "operation": "OR" + } + ] + }, + { + "name": "information", + "operation": "OR", + "services": [ + { + "name": "Site-BDII", + "operation": "OR" + } + ] + }, + { + "name": "cloud", + "operation": "OR", + "services": [ + { + "name": "org.openstack.nova", + "operation": "OR" + } + ] + } + ] + } + \ No newline at end of file diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/metricprofile.avro b/flink_jobs/ProfilesManager/src/main/resources/profiles/metricprofile.avro new file mode 100644 index 0000000000000000000000000000000000000000..ef77c88b6e12dc49753a5c77cff006f188120112 GIT binary patch literal 583 zcmeZI%3@>^ODrqO*DFrWNX<<=!B(wQQdy9yWTjM;nw(#hqNJmgmzWFU`=*u@WhMs{ z<)>xlq(X&@3lft-a*0Lh`FbE@fZ}PHsW~adN>O^wQ$p!m_Vq;1Y(~IGTL@VW$=Hw^=9SaS< z+{6N)WLaWPX(})<;KIas8E<&R*2dN{6g{4w-mq@!fhi}a)K`?+@Uq=t6mSf3_mB7W z_hU;;%v8%SO4rLvOwY_O)(cNfa?VIi&SpSW0gf{7)XL expResult = new ArrayList<>(); + expResult.add("compute"); + expResult.add("cloud"); + expResult.add("information"); + expResult.add("storage"); + + ArrayList result = instance.getProfileGroups(avProfileItem.getName()); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getProfileGroupOp method, of class AggregationProfileManager. + */ + @Test + public void testGetProfileGroupOp() throws IOException { + System.out.println("getProfileGroupOp"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + AggregationProfileManager.AvProfileItem avProfileItem = instance.getAvProfileItem(); + String avProfile = avProfileItem.getName(); + String groupName = "compute"; + + String expResult = "OR"; + String result = instance.getProfileGroupOp(avProfile, groupName); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileGroupServices method, of class + * AggregationProfileManager. + */ + @Test + public void testGetProfileGroupServices() throws IOException { + System.out.println("getProfileGroupServices"); + + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + AggregationProfileManager.AvProfileItem avProfileItem = instance.getAvProfileItem(); + String avProfile = avProfileItem.getName(); + String groupName = "compute"; + ArrayList expResult = new ArrayList<>(); + expResult.add("ARC-CE"); + expResult.add("GRAM5"); + expResult.add("QCG.Computing"); + expResult.add("org.opensciencegrid.htcondorce"); + + ArrayList result = instance.getProfileGroupServices(avProfile, groupName); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileGroupServiceOp method, of class + * AggregationProfileManager. + */ + @Test + public void testGetProfileGroupServiceOp() throws IOException { + System.out.println("getProfileGroupServiceOp"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + AggregationProfileManager.AvProfileItem avProfileItem = instance.getAvProfileItem(); + String avProfile = avProfileItem.getName(); + String groupName = "compute"; + String service = "ARC-CE"; + + String expResult = "OR"; + String result = instance.getProfileGroupServiceOp(avProfile, groupName, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getAvProfiles method, of class AggregationProfileManager. + */ + @Test + public void testGetAvProfiles() throws IOException { + System.out.println("getAvProfiles"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + ArrayList expResult = new ArrayList<>(); + expResult.add("sla_test"); + ArrayList result = instance.getAvProfiles(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileNamespace method, of class AggregationProfileManager. + */ + @Test + public void testGetProfileNamespace() throws IOException { + System.out.println("getProfileNamespace"); + String avProfile = "sla_test"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = ""; + String result = instance.getProfileNamespace(avProfile); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getProfileMetricProfile method, of class + * AggregationProfileManager. + */ + @Test + public void testGetProfileMetricProfile() throws IOException { + System.out.println("getProfileMetricProfile"); + String avProfile = "sla_test"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "ARGO_MON_CRITICAL"; + String result = instance.getProfileMetricProfile(avProfile); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileGroupType method, of class AggregationProfileManager. + */ + @Test + public void testGetProfileGroupType() throws IOException { + System.out.println("getProfileGroupType"); + String avProfile = "sla_test"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "servicegroups"; + String result = instance.getProfileGroupType(avProfile); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getGroupByService method, of class AggregationProfileManager. + */ + @Test + public void testGetGroupByService() throws IOException { + System.out.println("getGroupByService"); + String avProfile = "sla_test"; + String service = "ARC-CE"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "compute"; + String result = instance.getGroupByService(avProfile, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of checkService method, of class AggregationProfileManager. + */ + @Test + public void testCheckService() throws IOException { + System.out.println("checkService"); + String avProfile = "sla_test"; + String service = "ARC-CE"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + boolean expResult = true; + boolean result = instance.checkService(avProfile, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of loadJson method, of class AggregationProfileManager. + */ + @Test + public void testLoadJson() throws Exception { + System.out.println("loadJson"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + +// /** +// * Test of loadJsonString method, of class AggregationProfileManager. +// */ +// @Test +// public void testLoadJsonString() throws Exception { +// System.out.println("loadJsonString"); +// List apsJson = null; +// AggregationProfileManager instance = new AggregationProfileManager(); +// JsonElement jsonElement=instance.loadJson(new File(MetricProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); +// MetricProfile[] metricProfile=instance.readJson(jsonElement); +// List mps = Arrays.asList(metricProfile); +// +// instance.loadJsonString(apsJson); +// //// TODO review the generated test code and remove the default call to fail. +// // fail("The test case is a prototype."); +// } +// /** +// * Test of readJson method, of class AggregationProfileManager. +// */ +// @Test +// public void testReadJson() { +// System.out.println("readJson"); +// JsonElement j_element = null; +// AggregationProfileManager instance = new AggregationProfileManager(); +// instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); +// +// instance.readJson(j_element); +// // TODO review the generated test code and remove the default call to fail. +// // fail("The test case is a prototype."); +// } + /** + * Test of getEndpointGroup method, of class AggregationProfileManager. + */ + @Test + public void testGetEndpointGroup() throws IOException { + System.out.println("getEndpointGroup"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "servicegroups"; + String result = instance.getProfileGroupType("sla_test"); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of retrieveFunctionsByService method, of class + * AggregationProfileManager. + */ +// @Test +// public void testRetrieveFunctionsByService() throws IOException { +// System.out.println("retrieveFunctionsByService"); +// String service = "ARC-CE"; +// AggregationProfileManager instance = new AggregationProfileManager(); +// instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); +// +// ArrayList expResult = new ArrayList<>(); +// expResult.add("compute"); +// ArrayList result = instance.r(service); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// // fail("The test case is a prototype."); +// } + /** + * Test of retrieveServiceOperations method, of class + * AggregationProfileManager. + */ + @Test + public void testRetrieveServiceOperations() throws IOException { + System.out.println("retrieveServiceOperations"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + HashMap expResult = new HashMap(); + expResult.put("ARC-CE", "OR"); + expResult.put("GRAM5", "OR"); + expResult.put("QCG.Computing", "OR"); + expResult.put("org.opensciencegrid.htcondorce", "OR"); + expResult.put("SRM", "OR"); + expResult.put("Site-BDII", "OR"); + expResult.put("org.openstack.nova", "OR"); + + HashMap result = instance.retrieveServiceOperations(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of retrieveServiceFunctions method, of class + * AggregationProfileManager. + */ + @Test + public void testRetrieveServiceFunctions() throws IOException { + System.out.println("retrieveServiceFunctions"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + HashMap expResult = new HashMap<>(); + expResult.put("ARC-CE", "compute"); + expResult.put("GRAM5", "compute"); + expResult.put("QCG.Computing", "compute"); + expResult.put("org.opensciencegrid.htcondorce", "compute"); + expResult.put("SRM", "storage"); + expResult.put("Site-BDII", "information"); + expResult.put("org.openstack.nova", "cloud"); + + HashMap result = instance.retrieveServiceFunctions(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of checkServiceExistance method, of class AggregationProfileManager. + */ + @Test + public void testCheckServiceExistance() throws IOException { + System.out.println("checkServiceExistance"); + String service = "ARC-CE"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + boolean expResult = true; + boolean result = instance.checkServiceExistance(service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getAvProfileItem method, of class AggregationProfileManager. + */ + @Test + public void testGetAvProfileItem() throws IOException { + System.out.println("getAvProfileItem"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + AggregationProfileManager.AvProfileItem expResult = instance.getAvProfileItem(); + AggregationProfileManager.AvProfileItem result = instance.getAvProfileItem(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getMetricOpByProfile method, of class AggregationProfileManager. + */ + @Test + public void testGetMetricOpByProfile() throws IOException { + System.out.println("getMetricOpByProfile"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "AND"; + String result = instance.getMetricOpByProfile(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of retrieveGroupFunctions method, of class + * AggregationProfileManager. + */ + @Test + public void testRetrieveGroupOperations() throws IOException { + System.out.println("retrieveGroupFunctions"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + HashMap expResult = new HashMap<>(); + expResult.put("compute", "OR"); + expResult.put("cloud", "OR"); + expResult.put("information", "OR"); + expResult.put("storage", "OR"); + + HashMap result = instance.retrieveGroupOperations(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of retrieveGroupOperation method, of class + * AggregationProfileManager. + */ + @Test + public void testRetrieveGroupOperation() throws IOException { + System.out.println("retrieveGroupOperation"); + String group = "compute"; + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "OR"; + String result = instance.retrieveGroupOperation(group); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of retrieveProfileOperation method, of class + * AggregationProfileManager. + */ + @Test + public void testRetrieveProfileOperation() throws IOException { + System.out.println("retrieveProfileOperation"); + AggregationProfileManager instance = new AggregationProfileManager(); + instance.loadJson(new File(AggregationProfileManagerTest.class.getResource("/profiles/aggregations.json").getFile())); + + String expResult = "AND"; + String result = instance.retrieveProfileOperation(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + +// /** +// * Test of loadJsonString method, of class AggregationProfileManager. +// */ +// @Test +// public void testLoadJsonString() throws Exception { +// System.out.println("loadJsonString"); +// List apsJson = null; +// AggregationProfileManager instance = new AggregationProfileManager(); +// instance.loadJsonString(apsJson); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } +// +// /** +// * Test of readJson method, of class AggregationProfileManager. +// */ +// @Test +// public void testReadJson() { +// System.out.println("readJson"); +// JsonElement j_element = null; +// AggregationProfileManager instance = new AggregationProfileManager(); +// instance.readJson(j_element); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } +// +// /** +// * Test of retrieveFunctionsByService method, of class AggregationProfileManager. +// */ +// @Test +// public void testRetrieveFunctionsByService() { +// System.out.println("retrieveFunctionsByService"); +// String service = ""; +// AggregationProfileManager instance = new AggregationProfileManager(); +// String expResult = ""; +// String result = instance.retrieveFunctionsByService(service); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } +} diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/EndpointGroupManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/EndpointGroupManagerTest.java new file mode 100644 index 00000000..933d0e90 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/EndpointGroupManagerTest.java @@ -0,0 +1,339 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import argo.avro.GroupEndpoint; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.TreeMap; +import static junit.framework.Assert.assertNotNull; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + + +public class EndpointGroupManagerTest { + + public EndpointGroupManagerTest() { + } + + @BeforeClass + public static void setUpClass() { + assertNotNull("Test file missing", EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json")); + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of insert method, of class EndpointGroupManager. + */ + @Test + public void testInsert() throws IOException { + System.out.println("insert"); + String type = "test"; + String group = "test"; + String service = "test"; + String hostname = "test"; + HashMap tags = new HashMap(); + tags.put("scope", "test"); + EndpointGroupManager instance = new EndpointGroupManager(); + int expResult = 0; + int result = instance.insert(type, group, service, hostname, tags); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of checkEndpoint method, of class EndpointGroupManager. + */ + @Test + public void testCheckEndpoint() throws IOException { + System.out.println("checkEndpoint"); + String hostname = "bdii.marie.hellasgrid.gr"; + String service = "Top-BDII"; + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + boolean expResult = true; + boolean result = instance.checkEndpoint(hostname, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getGroup method, of class EndpointGroupManager. + */ + @Test + public void testGetGroup() throws IOException { + System.out.println("getGroup"); + String type = "SERVICEGROUPS"; + String hostname = "bdii.marie.hellasgrid.gr"; + String service = "Top-BDII"; + + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + ArrayList expResult = new ArrayList<>(); + expResult.add("SLA_TEST"); + ArrayList result = instance.getGroup(type, hostname, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getInfo method, of class EndpointGroupManager. + */ + @Test + public void testGetInfo() throws IOException { + System.out.println("getInfo"); + String group = "SLA_TEST"; + String type = "SERVICEGROUPS"; + String hostname = "bdii.marie.hellasgrid.gr"; + String service = "Top-BDII"; + + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + String expResult = ""; + String result = instance.getInfo(group, type, hostname, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getGroupTags method, of class EndpointGroupManager. + */ + @Test + public void testGetGroupTags() throws IOException { + System.out.println("getGroupTags"); + String group = "SLA_TEST"; + String type = "SERVICEGROUPS"; + String hostname = "bdii.marie.hellasgrid.gr"; + String service = "Top-BDII"; + + EndpointGroupManager instance = new EndpointGroupManager(); + + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + HashMap expResult = new HashMap(); + expResult.put("monitored", "1"); + expResult.put("production", "1"); + expResult.put("scope", "EGI"); + HashMap result = instance.getGroupTags(group, type, hostname, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of count method, of class EndpointGroupManager. + */ + @Test + public void testCount() throws IOException { + System.out.println("count"); + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + int expResult = 246; + int result = instance.count(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of unfilter method, of class EndpointGroupManager. + */ + @Test + public void testUnfilter() throws IOException { + System.out.println("unfilter"); + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + + instance.unfilter(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of filter method, of class EndpointGroupManager. + */ + @Test + public void testFilter() throws IOException { + System.out.println("filter"); + TreeMap fTags = new TreeMap(); + fTags.put("monitored", "1"); + fTags.put("production", "1"); + fTags.put("scope", "EGI, wlcg, tier2, alice, FedCloud"); + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + HashMap tags1 = new HashMap(); + tags1.put("monitored", "1"); + tags1.put("production", "1"); + tags1.put("scope", "EGI, wlcg, tier2, alice, FedCloud"); + EndpointGroupManager.EndpointItem groupEndpoint1 = instance.new EndpointItem("SERVICEGROUPS", "EGI_WENMR_SLA", "org.openstack.nova", "openstack.bitp.kiev.ua", tags1); + EndpointGroupManager.EndpointItem groupEndpoint2 = instance.new EndpointItem("SERVICEGROUPS", "EGI_AoD_SLA", "org.openstack.nova", "openstack.bitp.kiev.ua", tags1); + EndpointGroupManager.EndpointItem groupEndpoint3 = instance.new EndpointItem("SERVICEGROUPS", "EGI_AoD_SLA", "eu.egi.cloud.vm-metadata.vmcatcher", "openstack.bitp.kiev.ua", tags1); + + //ArrayList expResult=instance.getList(); + ArrayList expResult=new ArrayList<>(); + expResult.add(groupEndpoint1); + expResult.add(groupEndpoint2); + expResult.add(groupEndpoint3); + +// Iterator iter=expResult.iterator(); +// while(iter.hasNext()){ +// if(iter.next().equals(groupEndpoint1)|| iter.next().equals(groupEndpoint2)|| iter.next().equals(groupEndpoint3)){ +// iter.remove(); +// } +// } + + instance.filter(fTags); + + ArrayList< EndpointGroupManager.EndpointItem> result = instance.getfList(); + + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + +// /** +// * Test of loadAvro method, of class EndpointGroupManager. +// */ +// @Test +// public void testLoadAvro() throws Exception { +// System.out.println("loadAvro"); +// File avroFile = null; +// EndpointGroupManager instance = new EndpointGroupManager(); +// instance.loadAvro(avroFile); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + /** + * Test of getList method, of class EndpointGroupManager. + */ +// @Test +// public void testGetList() { +// System.out.println("getList"); +// EndpointGroupManager instance = new EndpointGroupManager(); +// ArrayList expResult = null; +// ArrayList result = instance.getList(); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + /** + * Test of loadFromList method, of class EndpointGroupManager. + */ + @Test + public void testLoadFromList() throws IOException { + System.out.println("loadFromList"); + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + + GroupEndpoint[] groupEndpoint=instance.readJson(jsonElement.getAsJsonArray()); + List egp = Arrays.asList(groupEndpoint); + instance.loadFromList(egp); + + // fail("The test case is a prototype."); + + } + + /** + * Test of loadGroupEndpointProfile method, of class EndpointGroupManager. + */ + @Test + public void testLoadGroupEndpointProfile() throws Exception { + System.out.println("loadGroupEndpointProfile"); + EndpointGroupManager instance = new EndpointGroupManager(); + JsonElement jsonElement = loadJson(new File(EndpointGroupManagerTest.class.getResource("/profiles/topgroupendpoint.json").getFile())); + + instance.loadGroupEndpointProfile(jsonElement.getAsJsonArray()); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + +// /** +// * Test of readJson method, of class EndpointGroupManager. +// */ +// @Test +// public void testReadJson() { +// System.out.println("readJson"); +// JsonArray jElement = null; +// EndpointGroupManager instance = new EndpointGroupManager(); +// GroupEndpoint[] expResult = null; +// GroupEndpoint[] result = instance.readJson(jElement); +// assertArrayEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + private JsonElement loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + JsonObject jRoot = j_element.getAsJsonObject(); + JsonArray jData = jRoot.get("data").getAsJsonArray(); +// JsonElement jItem = jData.get(0); + + return jData; + } catch (FileNotFoundException ex) { + // LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + // LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } +} diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/GroupGroupManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/GroupGroupManagerTest.java new file mode 100644 index 00000000..a4e109d3 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/GroupGroupManagerTest.java @@ -0,0 +1,274 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import argo.avro.GroupEndpoint; +import argo.avro.GroupGroup; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.TreeMap; +import static junit.framework.Assert.assertNotNull; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author cthermolia + */ +public class GroupGroupManagerTest { + + public GroupGroupManagerTest() { + } + + @BeforeClass + public static void setUpClass() { + assertNotNull("Test file missing", GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json")); + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of insert method, of class GroupGroupManager. + */ + @Test + public void testInsert() { + System.out.println("insert"); + String type = "test"; + String group = "test"; + String subgroup = "test"; + HashMap tags = new HashMap<>(); + GroupGroupManager instance = new GroupGroupManager(); + + int expResult = 0; + int result = instance.insert(type, group, subgroup, tags); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getGroupTags method, of class GroupGroupManager. + */ + @Test + public void testGetGroupTags() throws IOException { + System.out.println("getGroupTags"); + String type = "PROJECT"; + String subgroup = "SLA_TEST"; + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + HashMap expResult = new HashMap<>(); + expResult.put("monitored", "1"); + expResult.put("scope", "EGI, SLA"); + HashMap result = instance.getGroupTags(type, subgroup); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of count method, of class GroupGroupManager. + */ + @Test + public void testCount() throws IOException { + System.out.println("count"); + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + int expResult = 31; + int result = instance.count(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getGroup method, of class GroupGroupManager. + */ + @Test + public void testGetGroup() throws IOException { + System.out.println("getGroup"); + String type = "PROJECT"; + String subgroup = "SLA_TEST"; + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + String expResult = "EGI"; + String result = instance.getGroup(type, subgroup); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of unfilter method, of class GroupGroupManager. + */ + @Test + public void testUnfilter() throws IOException { + System.out.println("unfilter"); + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + instance.unfilter(); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of filter method, of class GroupGroupManager. + */ + @Test + public void testFilter() throws IOException { + System.out.println("filter"); + TreeMap fTags = new TreeMap<>(); + fTags.put("scope", "test"); + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + instance.filter(fTags); + + ArrayList expResult = new ArrayList<>(); + ArrayList result = instance.getfList(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of checkSubGroup method, of class GroupGroupManager. + */ + @Test + public void testCheckSubGroup() throws IOException { + System.out.println("checkSubGroup"); + String subgroup = "EGI_WENMR_SLA"; + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + boolean expResult = true; + boolean result = instance.checkSubGroup(subgroup); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of loadAvro method, of class GroupGroupManager. + */ +// @Test +// public void testLoadAvro() throws Exception { +// System.out.println("loadAvro"); +// File avroFile = null; +// GroupGroupManager instance = new GroupGroupManager(); +// instance.loadAvro(avroFile); +// // TODO review the generated test code and remove the default call to fail. +// //fail("The test case is a prototype."); +// } + /** + * Test of loadFromList method, of class GroupGroupManager. + */ + @Test + public void testLoadFromList() throws IOException { + System.out.println("loadFromList"); + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + + GroupGroup[] groupGroup = instance.readJson(jsonElement.getAsJsonArray()); + List ggp = Arrays.asList(groupGroup); + instance.loadFromList(ggp); + + } + + /** + * Test of loadGroupGroupProfile method, of class GroupGroupManager. + */ + @Test + public void testLoadGroupGroupProfile() throws Exception { + System.out.println("loadGroupGroupProfile"); + System.out.println("loadGroupEndpointProfile"); + GroupGroupManager instance = new GroupGroupManager(); + JsonElement jsonElement = loadJson(new File(GroupGroupManagerTest.class.getResource("/profiles/topgroupgroup.json").getFile())); + + instance.loadGroupGroupProfile(jsonElement.getAsJsonArray()); + + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of readJson method, of class GroupGroupManager. + */ +// @Test +// public void testReadJson() { +// System.out.println("readJson"); +// JsonArray jElement = null; +// GroupGroupManager instance = new GroupGroupManager(); +// GroupGroup[] expResult = null; +// GroupGroup[] result = instance.readJson(jElement); +// assertArrayEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + private JsonElement loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + JsonObject jRoot = j_element.getAsJsonObject(); + JsonArray jData = jRoot.get("data").getAsJsonArray(); +// JsonElement jItem = jData.get(0); + + return jData; + } catch (FileNotFoundException ex) { + // LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + // LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + +} diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/MetricProfileManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/MetricProfileManagerTest.java new file mode 100644 index 00000000..faca2949 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/MetricProfileManagerTest.java @@ -0,0 +1,379 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import argo.avro.MetricProfile; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import static junit.framework.Assert.assertNotNull; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author cthermolia + */ +public class MetricProfileManagerTest { + + public MetricProfileManagerTest() { + } + + @BeforeClass + public static void setUpClass() { + assertNotNull("Test file missing", MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro")); + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of clear method, of class MetricProfileManager. + */ + @Test + public void testClear() throws IOException { + System.out.println("clear"); + MetricProfileManager instance = new MetricProfileManager(); + + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + instance.clear(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of indexInsertProfile method, of class MetricProfileManager. + */ + @Test + public void testIndexInsertProfile() throws IOException { + System.out.println("indexInsertProfile"); + String profile = "ARGO_MON"; + MetricProfileManager instance = new MetricProfileManager(); + // instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + int expResult = 0; + int result = instance.indexInsertProfile(profile); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of insert method, of class MetricProfileManager. + */ + @Test + public void testInsert() { + System.out.println("insert"); + String profile = "Test_Profile"; + String service = "Test_Service"; + String metric = "Test_Mentric"; + HashMap tags = new HashMap(); + tags.put("scope", "test"); + tags.put("comment", "to test"); + MetricProfileManager instance = new MetricProfileManager(); + instance.insert(profile, service, metric, tags); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of indexInsertService method, of class MetricProfileManager. + */ + @Test + public void testIndexInsertService() throws IOException { + System.out.println("indexInsertService"); + MetricProfileManager instance = new MetricProfileManager(); + // instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + String profile = "ARGO_MON"; + String service = "aai"; + int expResult = 0; + int result = instance.indexInsertService(profile, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of indexInsertMetric method, of class MetricProfileManager. + */ + @Test + public void testIndexInsertMetric() throws IOException { + System.out.println("indexInsertMetric"); + MetricProfileManager instance = new MetricProfileManager(); + // instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + String profile = "ARGO_MON"; + String service = "aai"; + String metric = "org.nagios.WebCheck"; + int expResult = 0; + int result = instance.indexInsertMetric(profile, service, metric); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileServices method, of class MetricProfileManager. + */ + @Test + public void testGetProfileServices() throws IOException { + System.out.println("getProfileServices"); + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + String profile = "ARGO_MON"; + ArrayList expResult = new ArrayList<>(); + expResult.add("nextcloud"); + expResult.add("aai"); + + ArrayList result = instance.getProfileServices(profile); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfiles method, of class MetricProfileManager. + */ + @Test + public void testGetProfiles() throws IOException { + System.out.println("getProfiles"); + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + ArrayList expResult = new ArrayList<>(); + expResult.add("ARGO_MON"); + ArrayList result = instance.getProfiles(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getProfileServiceMetrics method, of class MetricProfileManager. + */ + @Test + public void testGetProfileServiceMetrics() throws IOException { + System.out.println("getProfileServiceMetrics"); + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + String profile = "ARGO_MON"; + String service = "aai"; + + ArrayList expResult = new ArrayList<>(); + expResult.add("org.nagios.WebCheck"); + expResult.add("argo.Keycloak-Check"); + + ArrayList result = instance.getProfileServiceMetrics(profile, service); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of checkProfileServiceMetric method, of class MetricProfileManager. + */ + @Test + public void testCheckProfileServiceMetric() throws IOException { + System.out.println("checkProfileServiceMetric"); + String profile = "ARGO_MON"; + String service = "aai"; + String metric = "org.nagios.WebCheck"; + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + boolean expResult = true; + boolean result = instance.checkProfileServiceMetric(profile, service, metric); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of loadAvro method, of class MetricProfileManager. + */ + @Test + public void testLoadAvro() throws Exception { + System.out.println("loadAvro"); + + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of loadFromList method, of class MetricProfileManager. + */ + @Test + public void testLoadFromList() throws IOException { + System.out.println("loadFromList"); + MetricProfileManager instance = new MetricProfileManager(); + JsonElement jsonElement=loadJson(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.json").getFile())); + MetricProfile[] metricProfile=instance.readJson(jsonElement); + List mps = Arrays.asList(metricProfile); + instance.loadFromList(mps); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of loadMetricProfile method, of class MetricProfileManager. + */ + @Test + public void testLoadMetricProfile() throws Exception { + System.out.println("loadMetricProfile"); + JsonElement jsonElement=loadJson(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.json").getFile())); + // MetricProfile[] metricProfile=instance.readJson(jsonElement); + + MetricProfileManager instance = new MetricProfileManager(); + instance.loadMetricProfile(jsonElement); + // // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of readJson method, of class MetricProfileManager. + */ +// @Test +// public void testReadJson() { +// System.out.println("readJson"); +// JsonElement jElement = null; +// MetricProfileManager instance = new MetricProfileManager(); +// MetricProfile[] expResult = null; +// MetricProfile[] result = instance.readJson(jElement); +// assertArrayEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + /** + * Test of containsMetric method, of class MetricProfileManager. + */ + @Test + public void testContainsMetric() throws IOException { + System.out.println("containsMetric"); + MetricProfileManager instance = new MetricProfileManager(); + instance.loadAvro(new File(MetricProfileManagerTest.class.getResource("/profiles/metricprofile.avro").getFile())); + + String service = "aai"; + String metric = "org.nagios.WebCheck"; + + boolean expResult = true; + boolean result = instance.containsMetric(service, metric); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getList method, of class MetricProfileManager. + */ +// @Test +// public void testGetList() { +// System.out.println("getList"); +// MetricProfileManager instance = new MetricProfileManager(); +// ArrayList expResult = null; +// ArrayList result = instance.getList(); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of setList method, of class MetricProfileManager. + */ +// @Test +// public void testSetList() { +// System.out.println("setList"); +// ArrayList list = null; +// MetricProfileManager instance = new MetricProfileManager(); +// instance.setList(list); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of getIndex method, of class MetricProfileManager. + */ +// @Test +// public void testGetIndex() { +// System.out.println("getIndex"); +// MetricProfileManager instance = new MetricProfileManager(); +// Map>> expResult = null; +// Map>> result = instance.getIndex(); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } +// +// /** +// * Test of setIndex method, of class MetricProfileManager. +// */ +// @Test +// public void testSetIndex() { +// System.out.println("setIndex"); +// Map>> index = null; +// MetricProfileManager instance = new MetricProfileManager(); +// instance.setIndex(index); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + + private JsonElement loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); +// JsonObject jRoot = j_element.getAsJsonObject(); +// JsonArray jData = jRoot.get("data").getAsJsonArray(); +// JsonElement jItem = jData.get(0); + + return j_element; + } catch (FileNotFoundException ex) { + // LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + // LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } +} diff --git a/flink_jobs/Timelines/src/main/java/timelines/TimelineAggregator.java b/flink_jobs/Timelines/src/main/java/timelines/TimelineAggregator.java index 32eb8998..12be1dc8 100644 --- a/flink_jobs/Timelines/src/main/java/timelines/TimelineAggregator.java +++ b/flink_jobs/Timelines/src/main/java/timelines/TimelineAggregator.java @@ -12,10 +12,11 @@ import org.joda.time.format.DateTimeFormatter; /** - TImelineAggregator class implements an aggregator which is able to receive a list of different timelines - and concude into one timeline by aggregating all the timestamps and the statuses +* TimelineAggregator class implements an aggregator which is able to receive a list of different timelines +* and conclude into one timeline by aggregating all the timestamps and the statuses */ + public class TimelineAggregator { private Timeline output; diff --git a/flink_jobs/Timelines/src/main/java/timelines/Utils.java b/flink_jobs/Timelines/src/main/java/timelines/Utils.java index d6ee8f6a..cbddd3b7 100644 --- a/flink_jobs/Timelines/src/main/java/timelines/Utils.java +++ b/flink_jobs/Timelines/src/main/java/timelines/Utils.java @@ -5,14 +5,10 @@ */ package timelines; -import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.TimeZone; @@ -26,18 +22,18 @@ public class Utils { static Logger LOG = LoggerFactory.getLogger(Utils.class); - public static String convertDateToString(String format, DateTime date) throws ParseException { //String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; DateTimeFormatter dtf = DateTimeFormat.forPattern(format); String dateString = date.toString(dtf); return dateString; - } public static DateTime convertStringtoDate(String format, String dateStr) throws ParseException { + DateTimeFormatter formatter = DateTimeFormat.forPattern(format); + DateTime dt = formatter.parseDateTime(dateStr); return dt; @@ -55,8 +51,9 @@ public static DateTime createDate(String format, Date dateStr, int hour, int min newCalendar.set(Calendar.MINUTE, min); newCalendar.set(Calendar.SECOND, sec); newCalendar.set(Calendar.MILLISECOND, 0); - return new DateTime(newCalendar.getTime()); - } + + return new DateTime( newCalendar.getTime()); + } public static boolean isPreviousDate(String format, Date nowDate, Date firstDate) throws ParseException { // String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; @@ -76,7 +73,8 @@ public static boolean isPreviousDate(String format, Date nowDate, Date firstDate } } - public static DateTime createDate(String format, int year, int month, int day, int hour, int min, int sec) throws ParseException { + + public static DateTime createDate(String format, int year, int month, int day, int hour, int min, int sec) throws ParseException { // String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; SimpleDateFormat sdf = new SimpleDateFormat(format); @@ -90,8 +88,6 @@ public static DateTime createDate(String format, int year, int month, int day, i newCalendar.set(Calendar.MINUTE, min); newCalendar.set(Calendar.SECOND, sec); newCalendar.set(Calendar.MILLISECOND, 0); - - return new DateTime(newCalendar.getTime()); + return new DateTime(newCalendar.getTime()); } - } diff --git a/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineAggregatorTest.java b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineAggregatorTest.java new file mode 100644 index 00000000..a989634b --- /dev/null +++ b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineAggregatorTest.java @@ -0,0 +1,501 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package flink.jobs.timelines; + + +import timelines.TimelineAggregator; +import timelines.Utils; +import timelines.Timeline; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; +import org.joda.time.DateTime; +import org.joda.time.LocalDate; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author cthermolia + */ +public class TimelineAggregatorTest { + + public TimelineAggregatorTest() { + } + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of clear method, of class TimelineAggregator. + */ + @Test + public void testClear() { + System.out.println("clear"); + TimelineAggregator instance = new TimelineAggregator(); + instance.clear(); + // TODO review the generated test code and remove the default call to fail. + + } + + /** + * Test of tsFromDate method, of class TimelineAggregator. + */ +// @Test +// public void testTsFromDate() { +// System.out.println("tsFromDate"); +// String date = ""; +// TimelineAggregator instance = new TimelineAggregator(); +// String expResult = ""; +// String result = instance.tsFromDate(date); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// +// } + /** + * Test of createTimeline method, of class TimelineAggregator. + */ + @Test + public void testCreateTimeline() throws ParseException { + System.out.println("createTimeline"); + String name = "test"; + String timestamp = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0)); + int prevState = 0; + TimelineAggregator instance = new TimelineAggregator(); + instance.createTimeline(name, timestamp, prevState); + HashMap expRes = new HashMap<>(); + Timeline exptimeline = new Timeline(timestamp); + exptimeline.insert(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 0, 0, 0), 0); + expRes.put(name, exptimeline); + + assertEquals(expRes.toString(), instance.getInputs().toString()); + // TODO review the generated test code and remove the default call to fail. + + } + + /** + * Test of insert method, of class TimelineAggregator. + */ + @Test + public void testInsert() throws ParseException { + System.out.println("insert"); + String name = "test"; + String timestamp = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0)); + + int status = 0; + TimelineAggregator instance = new TimelineAggregator(); + instance.insert(name, timestamp, status); + HashMap expRes = new HashMap<>(); + Timeline exptimeline = new Timeline(timestamp); + exptimeline.insert(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 0, 0, 0), 0); + expRes.put(name, exptimeline); + + assertEquals(expRes.toString(), instance.getInputs().toString()); + + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setFirst method, of class TimelineAggregator. + */ + @Test + public void testSetFirst() throws ParseException { + System.out.println("setFirst"); + String name = "test1"; + String timestamp = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0)); + String name2 = "test2"; + String timestamp2 = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 21, 50, 0)); + HashMap map = new HashMap(); + map.put(name, new Timeline(timestamp)); + map.put(name2, new Timeline(timestamp2)); + + int status = 0; + TimelineAggregator instance = new TimelineAggregator(map); + instance.insert(name, timestamp, status); + instance.setFirst(name2, timestamp2, status); + // TODO review the generated test code and remove the default call to fail. + + HashMap expRes = new HashMap<>(); + Timeline exptimeline = new Timeline(timestamp); + exptimeline.insert(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 0, 0, 0), 0); + Timeline exptimeline2 = new Timeline(timestamp); + + exptimeline2.insert(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0), 0); + expRes.put(name2, exptimeline); + expRes.put(name, exptimeline2); + + assertEquals(expRes, instance.getInputs()); + } + + /** + * Test of getDate method, of class TimelineAggregator. + */ + @Test + public void testGetDate() throws ParseException { + System.out.println("getDate"); + String name = "test1"; + String timestamp = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0)); + int status = 0; + TimelineAggregator instance = new TimelineAggregator(timestamp); + instance.insert(name, timestamp, status); + + LocalDate expResult = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0).toLocalDate(); + LocalDate result = instance.getDate(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getSamples method, of class TimelineAggregator. + */ + @Test + public void testGetSamples() throws ParseException { + System.out.println("getSamples"); + String name = "test1"; + String timestamp = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0)); + String name2 = "test2"; + String timestamp2 = Utils.convertDateToString("yyyy-MM-dd'T'HH:mm:ss'Z'", Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 21, 50, 0)); + HashMap map = new HashMap(); + map.put(name, new Timeline(timestamp)); + map.put(name2, new Timeline(timestamp2)); + + TimelineAggregator instance = new TimelineAggregator(map); + instance.aggregate(createTruthTable(), 0); + TreeMap expRes = new TreeMap<>(); + Timeline exptimeline = new Timeline(); + exptimeline.insert(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 31, 12, 50, 0), 0); + Set> expResult = expRes.entrySet(); + Set> result = instance.getSamples(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of clearAndSetDate method, of class TimelineAggregator. + */ +// @Test +// public void testClearAndSetDate() { +// System.out.println("clearAndSetDate"); +// String timestamp = ""; +// TimelineAggregator instance = new TimelineAggregator(); +// instance.clearAndSetDate(timestamp); +// // TODO review the generated test code and remove the default call to fail. +// +// } + /** + * Test of aggregate method, of class TimelineAggregator. + */ + @Test + public void testAggregate() throws IOException, FileNotFoundException, org.json.simple.parser.ParseException, ParseException { + System.out.println("aggregate"); + TimelineUtils timelineUtils = new TimelineUtils(); + TimelineUtils.TimelineJson timelinejson = timelineUtils.readTimelines(); + + ArrayList inputTimelines = timelinejson.getInputTimelines(); + int op = timelinejson.getOperation(); + int[][][] truthTable = timelinejson.getTruthTable(); + ArrayList states = timelinejson.getStates(); + + TimelineAggregator instance = new TimelineAggregator(); + + HashMap inputs = new HashMap(); + int counter = 1; + for (TreeMap map : inputTimelines) { + Timeline timeline = new Timeline(); + checkForMissingMidnightStatus(map, states.indexOf("MISSING")); + + timeline.insertDateTimeStamps(map); + inputs.put(timeline + "_" + counter, timeline); + counter++; + } + instance.setInputs(inputs); + + instance.aggregate(truthTable, op); + + Set> expRes = timelinejson.getOutputTimeline().entrySet(); + Set> res = instance.getOutput().getSamples(); + assertEquals(expRes, res); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getOutput method, of class TimelineAggregator. + */ + @Test + public void testGetOutput() { + System.out.println("getOutput"); + TimelineAggregator instance = new TimelineAggregator(); + Timeline expResult = null; + Timeline result = instance.getOutput(); + //assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of setOutput method, of class TimelineAggregator. + */ + @Test + public void testSetOutput() { + System.out.println("setOutput"); + Timeline output = null; + TimelineAggregator instance = new TimelineAggregator(); + instance.setOutput(output); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getInputs method, of class TimelineAggregator. + */ + @Test + public void testGetInputs() { + System.out.println("getInputs"); + TimelineAggregator instance = new TimelineAggregator(); + Map expResult = null; + Map result = instance.getInputs(); +// assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of setInputs method, of class TimelineAggregator. + */ + @Test + public void testSetInputs() { + System.out.println("setInputs"); + Map inputs = null; + TimelineAggregator instance = new TimelineAggregator(); + instance.setInputs(inputs); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + private int[][][] createTruthTable() { + + int[][][] truthtable = new int[2][6][6]; + + truthtable[0][0][0] = 0; + truthtable[0][0][1] = 0; + truthtable[0][0][2] = 0; + truthtable[0][0][3] = 0; + truthtable[0][0][4] = 0; + truthtable[0][0][5] = 0; + + truthtable[0][1][0] = -1; + truthtable[0][1][1] = 1; + truthtable[0][1][2] = 1; + truthtable[0][1][3] = 1; + truthtable[0][1][4] = 1; + truthtable[0][1][5] = 1; + + truthtable[0][2][0] = -1; + truthtable[0][2][1] = -1; + truthtable[0][2][2] = 2; + truthtable[0][2][3] = 2; + truthtable[0][2][4] = 4; + truthtable[0][2][5] = 2; + + truthtable[0][3][0] = -1; + truthtable[0][3][1] = -1; + truthtable[0][3][2] = -1; + truthtable[0][3][3] = 3; + truthtable[0][3][4] = 4; + truthtable[0][3][5] = 5; + + truthtable[0][4][0] = -1; + truthtable[0][4][1] = -1; + truthtable[0][4][2] = -1; + truthtable[0][4][3] = -1; + truthtable[0][4][4] = 4; + truthtable[0][4][5] = 5; + + truthtable[0][5][0] = -1; + truthtable[0][5][1] = -1; + truthtable[0][5][2] = -1; + truthtable[0][5][3] = -1; + truthtable[0][5][4] = -1; + truthtable[0][5][5] = 5; + + truthtable[1][0][0] = 0; + truthtable[1][0][1] = 1; + truthtable[1][0][2] = 2; + truthtable[1][0][3] = 3; + truthtable[1][0][4] = 4; + truthtable[1][0][5] = 5; + + truthtable[1][1][0] = -1; + truthtable[1][1][1] = 1; + truthtable[1][1][2] = 2; + truthtable[1][1][3] = 3; + truthtable[1][1][4] = 4; + truthtable[1][1][5] = 5; + + truthtable[1][2][0] = -1; + truthtable[1][2][1] = -1; + truthtable[1][2][2] = 2; + truthtable[1][2][3] = 3; + truthtable[1][2][4] = 4; + truthtable[1][2][5] = 5; + + truthtable[1][3][0] = -1; + truthtable[1][3][1] = -1; + truthtable[1][3][2] = -1; + truthtable[1][3][3] = 3; + truthtable[1][3][4] = 4; + truthtable[1][3][5] = 5; + + truthtable[1][4][0] = -1; + truthtable[1][4][1] = -1; + truthtable[1][4][2] = -1; + truthtable[1][4][3] = -1; + truthtable[1][4][4] = 4; + truthtable[1][4][5] = 4; + + truthtable[1][5][0] = -1; + truthtable[1][5][1] = -1; + truthtable[1][5][2] = -1; + truthtable[1][5][3] = -1; + truthtable[1][5][4] = -1; + truthtable[1][5][5] = 5; + + return truthtable; + + } + + private void checkForMissingMidnightStatus(TreeMap map, int missingStatus) throws ParseException { + DateTime midnight = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00); + if (!map.containsKey(midnight)) { + map.put(midnight, missingStatus); + } + } + + private TreeMap buildTimeline1() throws ParseException { + System.out.println("timeline 1 :"); + TreeMap map = new TreeMap(); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 15, 50), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 1, 15, 50), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 15, 50), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 15, 50), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 15, 15, 50), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 20, 16, 50), 1); + for (Entry entry : map.entrySet()) { + System.out.println(entry.getKey() + " --- " + entry.getValue()); + } + return map; + } + + private TreeMap buildTimeline2() throws ParseException { + System.out.println("timeline 2 :"); + TreeMap map = new TreeMap(); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 5, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 12, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 14, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 23, 05, 00), 1); + for (Entry entry : map.entrySet()) { + System.out.println(entry.getKey() + " --- " + entry.getValue()); + } + return map; + } + + private TreeMap buildTimeline3() throws ParseException { + System.out.println("timeline 3 :"); + TreeMap map = new TreeMap(); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 5, 00), 2); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 6, 00, 00), 0); + for (Entry entry : map.entrySet()) { + System.out.println(entry.getKey() + " --- " + entry.getValue()); + } + return map; + } + + private TreeMap buildTimeline4() throws ParseException { + System.out.println("timeline 4 :"); + TreeMap map = new TreeMap(); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 20, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 22, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 23, 00, 00), 0); + for (Entry entry : map.entrySet()) { + System.out.println(entry.getKey() + " --- " + entry.getValue()); + } + return map; + } + + private TreeMap buildAggregated() throws ParseException { + System.out.println("timeline all :"); + TreeMap map = new TreeMap(); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 5, 00), 2); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 15, 50), 1); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 15, 15, 50), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 20, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 22, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 23, 00, 00), 1); + + for (Entry entry : map.entrySet()) { + System.out.println(entry.getKey() + " --- " + entry.getValue()); + } + return map; + } + + private TreeMap buildAggregated2() throws ParseException { + + TreeMap map = new TreeMap(); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 5, 00), 2); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 0, 00), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 15, 50), 1); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 15, 15, 50), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 20, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 0, 00), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 22, 0, 00), 4); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 23, 00, 00), 1); + return map; + } +} diff --git a/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineTest.java b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineTest.java new file mode 100644 index 00000000..a3a5e26c --- /dev/null +++ b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineTest.java @@ -0,0 +1,574 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package flink.jobs.timelines; + +import timelines.Utils; +import timelines.Timeline; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import org.joda.time.DateTime; +import org.joda.time.LocalDate; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author cthermolia + */ +public class TimelineTest { + + public TimelineTest() { + } + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of get method, of class Timeline. + */ + @Test + public void testGet_String() throws ParseException { + System.out.println("get"); + + DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); + DateTime timestamp = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1); + String timestampStr = timestamp.toString(dtf); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult = -1; + int result = instance.get(timestampStr); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of get method, of class Timeline. + */ + @Test + public void testGet_DateTime() throws ParseException { + System.out.println("get"); + DateTime point = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult = 1; + int result = instance.get(point); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of insert method, of class Timeline. + */ + @Test + public void testInsert_String_int() throws ParseException { + System.out.println("insert"); + DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); + + DateTime timestamp = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1); + String timestampStr = timestamp.toString(dtf); + + int status = 1; + Timeline instance = new Timeline(); + instance.insert(timestampStr, status); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of insert method, of class Timeline. + */ + @Test + public void testInsert_DateTime_int() throws ParseException { + System.out.println("insert"); + DateTime timestamp = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1); + + int status = 0; + Timeline instance = new Timeline(); + instance.insert(timestamp, status); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of insertStringTimeStamps method, of class Timeline. + */ + @Test + public void testInsertStringTimeStamps() throws ParseException { + System.out.println("insertStringTimeStamps"); + TreeMap timestamps = createStringTimestampList(); + Timeline instance = new Timeline(); + instance.insertStringTimeStamps(timestamps); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of insertDateTimeStamps method, of class Timeline. + */ + @Test + public void testInsertDateTimeStamps() throws ParseException { + System.out.println("insertDateTimeStamps"); + TreeMap timestamps = createTimestampList(); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(timestamps); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of setFirst method, of class Timeline. + */ + @Test + public void testSetFirst() throws ParseException { + System.out.println("setFirst"); + DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); + + DateTime timestamp = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 0, 0, 0, 1); + String timestampStr = timestamp.toString(dtf); + + int state = 0; + Timeline instance = new Timeline(); + instance.setFirst(timestampStr, state); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of clear method, of class Timeline. + */ + @Test + public void testClear() throws ParseException { + System.out.println("clear"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + instance.clear(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of bulkInsert method, of class Timeline. + */ + @Test + public void testBulkInsert() throws ParseException { + System.out.println("bulkInsert"); + Set> samples = createTimestampList().entrySet(); + Timeline instance = new Timeline(); + instance.bulkInsert(samples); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getSamples method, of class Timeline. + */ + @Test + public void testGetSamples() throws ParseException { + System.out.println("getSamples"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + Set> expResult = instance.getSamples(); + Set> result = instance.getSamples(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDate method, of class Timeline. + */ + @Test + public void testGetDate() throws ParseException { + System.out.println("getDate"); + DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); + + Timeline instance = new Timeline(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 1, 0, 0, 0, 0).toString(dtf)); + + LocalDate expResult = new LocalDate(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 1, 0, 0, 0, 0)); + LocalDate result = instance.getDate(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getLength method, of class Timeline. + */ + @Test + public void testGetLength() throws ParseException { + System.out.println("getLength"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult = 2; + int result = instance.getLength(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of isEmpty method, of class Timeline. + */ + @Test + public void testIsEmpty() throws ParseException { + System.out.println("isEmpty"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + + boolean expResult = false; + boolean result = instance.isEmpty(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of optimize method, of class Timeline. + */ + @Test + public void testOptimize() throws ParseException { + System.out.println("optimize"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + instance.optimize(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getPoints method, of class Timeline. + */ + @Test + public void testGetPoints() throws ParseException { + System.out.println("getPoints"); + Timeline instance = new Timeline(); + TreeMap map = createTimestampList(); + instance.insertDateTimeStamps(map); + Set expResult=new TreeSet<>(); + expResult.add(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 5, 20, 15)); + expResult.add(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 12, 23)); + + Set result = instance.getPoints(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of aggregate method, of class Timeline. + */ + @Test + public void testAggregate() throws ParseException { + System.out.println("aggregate"); + Timeline second = new Timeline(); + second.insertDateTimeStamps(createSecondTimeline()); + int[][][] truthTable = createTruthTable(); + int op = 0; + Timeline instance = new Timeline(); + instance.aggregate(second, truthTable, op); + Set> expResult = createMerged().entrySet(); + Set> result = instance.getSamples(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of buildStringTimeStampMap method, of class Timeline. + */ +// @Test +// public void testBuildStringTimeStampMap() { +// System.out.println("buildStringTimeStampMap"); +// ArrayList timestampList = null; +// OperationsParser op = null; +// Timeline instance = new Timeline(); +// TreeMap expResult = null; +// TreeMap result = instance.buildStringTimeStampMap(timestampList, op); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// //fail("The test case is a prototype."); +// } +// +// /** +// * Test of buildDateTimeStampMap method, of class Timeline. +// */ +// @Test +// public void testBuildDateTimeStampMap() { +// System.out.println("buildDateTimeStampMap"); +// ArrayList timestampList = null; +// OperationsParser op = null; +// Timeline instance = new Timeline(); +// TreeMap expResult = null; +// TreeMap result = instance.buildDateTimeStampMap(timestampList, op); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } +// +// /** +// * Test of removeTimeStamp method, of class Timeline. +// */ +// @Test +// public void testRemoveTimeStamp() { +// System.out.println("removeTimeStamp"); +// DateTime timestamp = null; +// Timeline instance = new Timeline(); +// instance.removeTimeStamp(timestamp); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of calcStatusChanges method, of class Timeline. + */ + @Test + public void testCalcStatusChanges() throws ParseException { + System.out.println("calcStatusChanges"); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult = 1; + int result = instance.calcStatusChanges(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of replacePreviousDateStatus method, of class Timeline. + */ + @Test + public void testReplacePreviousDateStatus() throws ParseException { + System.out.println("replacePreviousDateStatus"); + DateTime date = Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 0, 0); + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + ArrayList availStates = new ArrayList<>(); + availStates.add("OK"); + availStates.add("WARNING"); + availStates.add("UKNOWN"); + availStates.add("MISSING"); + availStates.add("CRITICAL"); + availStates.add("DOWNTIME"); + + instance.replacePreviousDateStatus(date, availStates); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of hashCode method, of class Timeline. + */ +// @Test +// public void testHashCode() { +// System.out.println("hashCode"); +// Timeline instance = new Timeline(); +// int expResult = 0; +// int result = instance.hashCode(); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of equals method, of class Timeline. + */ +// @Test +// public void testEquals() { +// System.out.println("equals"); +// Object obj = null; +// Timeline instance = new Timeline(); +// boolean expResult = false; +// boolean result = instance.equals(obj); +// assertEquals(expResult, result); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of opInt method, of class Timeline. + */ + @Test + public void testOpInt() throws ParseException { + System.out.println("opInt"); + int[][][] truthTable = createTruthTable(); + int op = 0; + int a = 0; + int b = 0; + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult = 0; + int result = instance.opInt(truthTable, op, a, b); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + private TreeMap createTimestampList() throws ParseException { + TreeMap map = new TreeMap<>(); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 12, 23), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 1, 5, 10), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 5, 20, 15), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 50, 4), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 3, 5), 0); + return map; +// + } + + private TreeMap createStringTimestampList() throws ParseException { + TreeMap map = new TreeMap<>(); + + DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 12, 23).toString(dtf), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 1, 5, 10).toString(dtf), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 5, 20, 15).toString(dtf), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1).toString(dtf), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 50, 4).toString(dtf), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 3, 5).toString(dtf), 0); + return map; +// + } + + private TreeMap createSecondTimeline() throws ParseException { + TreeMap map = new TreeMap<>(); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 15, 50), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 5, 10), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 5, 20, 15), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 4, 31, 1), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 50, 4), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 21, 3, 5), 0); + return map; +// + } + + private int[][][] createTruthTable() { + + int[][][] truthtable = new int[2][6][6]; + + truthtable[0][0][0] = 0; + truthtable[0][0][1] = 0; + truthtable[0][0][2] = 0; + truthtable[0][0][3] = 0; + truthtable[0][0][4] = 0; + truthtable[0][0][5] = 0; + + truthtable[0][1][0] = -1; + truthtable[0][1][1] = 1; + truthtable[0][1][2] = 1; + truthtable[0][1][3] = 1; + truthtable[0][1][4] = 1; + truthtable[0][1][5] = 1; + + truthtable[0][2][0] = -1; + truthtable[0][2][1] = -1; + truthtable[0][2][2] = 2; + truthtable[0][2][3] = 2; + truthtable[0][2][4] = 4; + truthtable[0][2][5] = 2; + + truthtable[0][3][0] = -1; + truthtable[0][3][1] = -1; + truthtable[0][3][2] = -1; + truthtable[0][3][3] = 3; + truthtable[0][3][4] = 4; + truthtable[0][3][5] = 5; + + truthtable[0][4][0] = -1; + truthtable[0][4][1] = -1; + truthtable[0][4][2] = -1; + truthtable[0][4][3] = -1; + truthtable[0][4][4] = 4; + truthtable[0][4][5] = 5; + + truthtable[0][5][0] = -1; + truthtable[0][5][1] = -1; + truthtable[0][5][2] = -1; + truthtable[0][5][3] = -1; + truthtable[0][5][4] = -1; + truthtable[0][5][5] = 5; + + truthtable[1][0][0] = 0; + truthtable[1][0][1] = 1; + truthtable[1][0][2] = 2; + truthtable[1][0][3] = 3; + truthtable[1][0][4] = 4; + truthtable[1][0][5] = 5; + + truthtable[1][1][0] = -1; + truthtable[1][1][1] = 1; + truthtable[1][1][2] = 2; + truthtable[1][1][3] = 3; + truthtable[1][1][4] = 4; + truthtable[1][1][5] = 5; + + truthtable[1][2][0] = -1; + truthtable[1][2][1] = -1; + truthtable[1][2][2] = 2; + truthtable[1][2][3] = 3; + truthtable[1][2][4] = 4; + truthtable[1][2][5] = 5; + + truthtable[1][3][0] = -1; + truthtable[1][3][1] = -1; + truthtable[1][3][2] = -1; + truthtable[1][3][3] = 3; + truthtable[1][3][4] = 4; + truthtable[1][3][5] = 5; + + truthtable[1][4][0] = -1; + truthtable[1][4][1] = -1; + truthtable[1][4][2] = -1; + truthtable[1][4][3] = -1; + truthtable[1][4][4] = 4; + truthtable[1][4][5] = 4; + + truthtable[1][5][0] = -1; + truthtable[1][5][1] = -1; + truthtable[1][5][2] = -1; + truthtable[1][5][3] = -1; + truthtable[1][5][4] = -1; + truthtable[1][5][5] = 5; + + return truthtable; + + } + + private TreeMap createMerged() throws ParseException { + TreeMap map = new TreeMap(); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 0, 15, 50), 1); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 2, 5, 10), 0); + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 3, 50, 4), 1); + + map.put(Utils.createDate("yyyy-MM-dd'T'HH:mm:ss'Z'", 2021, 0, 15, 5, 20, 15), 0); + return map; + } + +} diff --git a/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineUtils.java b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineUtils.java new file mode 100644 index 00000000..d065dd7e --- /dev/null +++ b/flink_jobs/Timelines/src/test/java/flink/jobs/timelines/TimelineUtils.java @@ -0,0 +1,188 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package flink.jobs.timelines; + +import timelines.Utils; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.TreeMap; +import org.joda.time.DateTime; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; +import org.json.simple.parser.ParseException; + +/** + * + * @author cthermolia + */ +public class TimelineUtils { + + public TimelineJson readTimelines() throws IOException, FileNotFoundException, ParseException, java.text.ParseException { + + JSONObject timelineJSONObj = readJsonFromFile(TimelineUtils.class.getResource("/timelines/timeline.json").getFile()); + TimelineJson timelinejson = buildTimelines(timelineJSONObj); + return timelinejson; + } + + private JSONObject readJsonFromFile(String path) throws FileNotFoundException, IOException, org.json.simple.parser.ParseException { + JSONParser parser = new JSONParser(); + URL url = TimelineUtils.class.getResource(path); + Object obj = parser.parse(new FileReader(path)); + + JSONObject jsonObject = (JSONObject) obj; + + return jsonObject; + } + + public TimelineJson buildTimelines(JSONObject jsonObject) throws java.text.ParseException { + + ArrayList states = new ArrayList<>(); + ArrayList operations = new ArrayList<>(); + ArrayList inputTimelines = new ArrayList<>(); + TreeMap outputTimeline = new TreeMap(); + JSONObject dataObject = (JSONObject) jsonObject.get("data"); + + JSONArray stateList = (JSONArray) dataObject.get("available_states"); + JSONArray operationList = (JSONArray) dataObject.get("operations"); + String operation = (String) dataObject.get("operation"); + Iterator operationsIter = operationList.iterator(); + + while (operationsIter.hasNext()) { + String op = operationsIter.next(); + operations.add(op); + } + JSONArray inputs = (JSONArray) dataObject.get("inputs"); + JSONObject output = (JSONObject) dataObject.get("output"); + Iterator stateIter = stateList.iterator(); + while (stateIter.hasNext()) { + String state = stateIter.next(); + states.add(state); + } + + Iterator inputIter = inputs.iterator(); + while (inputIter.hasNext()) { + JSONObject timelineJSONObj = inputIter.next(); + JSONArray timestampList = (JSONArray) timelineJSONObj.get("timestamps"); + Iterator timeIter = timestampList.iterator(); + TreeMap map = new TreeMap(); + while (timeIter.hasNext()) { + JSONObject timestatus = (JSONObject) timeIter.next(); + String time = (String) timestatus.get("timestamp"); + String status = (String) timestatus.get("status"); + map.put(Utils.convertStringtoDate("yyyy-MM-dd'T'HH:mm:ss'Z'", time), states.indexOf(status)); + inputTimelines.add(map); + } + + } + + JSONArray timestampList = (JSONArray) output.get("timestamps"); + Iterator timeIter = timestampList.iterator(); + + while (timeIter.hasNext()) { + JSONObject timestatus = (JSONObject) timeIter.next(); + String time = (String) timestatus.get("timestamp"); + String status = (String) timestatus.get("status"); + outputTimeline.put(Utils.convertStringtoDate("yyyy-MM-dd'T'HH:mm:ss'Z'", time), states.indexOf(status)); + + } + + JSONArray opTruthTable = (JSONArray) dataObject.get("operation_truth_table"); + + Iterator opTruthTableIter = opTruthTable.iterator(); + int[][][] table = new int[operations.size()][states.size()][states.size()]; + for (int[][] surface : table) { + for (int[] line : surface) { + Arrays.fill(line, -1); + } + } + while (opTruthTableIter.hasNext()) { + JSONObject truthOperationObj = (JSONObject) opTruthTableIter.next(); + String truthOp = (String) truthOperationObj.get("name"); + int truthOpInt = operations.indexOf(truthOp); + JSONArray truthTable = (JSONArray) truthOperationObj.get("truth_table"); + Iterator truthTableIter = truthTable.iterator(); + while (truthTableIter.hasNext()) { + + JSONObject truthTableObj = (JSONObject) truthTableIter.next(); + String a = (String) truthTableObj.get("a"); + int aInt = states.indexOf(a); + String b = (String) truthTableObj.get("b"); + int bInt = states.indexOf(b); + String x = (String) truthTableObj.get("b"); + int xInt = states.indexOf(x); + table[truthOpInt][aInt][bInt] = xInt; + + } + } + TimelineJson timelineJsonObject = new TimelineJson(inputTimelines, outputTimeline, operations.indexOf(operation),table,states); + return timelineJsonObject; + } + + public class TimelineJson { + + private ArrayList inputTimelines; + private TreeMap outputTimeline; + private Integer operation; + private int[][][] truthTable; + private ArrayList states; + + public TimelineJson(ArrayList inputTimelines, TreeMap outputTimeline, Integer operation, int[][][] truthTable, ArrayList states) { + this.inputTimelines = inputTimelines; + this.outputTimeline = outputTimeline; + this.operation = operation; + this.truthTable = truthTable; + this.states = states; + } + + public ArrayList getInputTimelines() { + return inputTimelines; + } + + public void setInputTimelines(ArrayList inputTimelines) { + this.inputTimelines = inputTimelines; + } + + public TreeMap getOutputTimeline() { + return outputTimeline; + } + + public void setOutputTimeline(TreeMap outputTimeline) { + this.outputTimeline = outputTimeline; + } + + public Integer getOperation() { + return operation; + } + + public void setOperation(Integer operation) { + this.operation = operation; + } + + public int[][][] getTruthTable() { + return truthTable; + } + + public void setTruthTable(int[][][] truthTable) { + this.truthTable = truthTable; + } + + public ArrayList getStates() { + return states; + } + + public void setStates(ArrayList states) { + this.states = states; + } + } + + +} diff --git a/flink_jobs/batch_status/src/main/java/ops/OpsManager.java b/flink_jobs/batch_status/src/main/java/ops/OpsManager.java index f0a262bc..a141cdf2 100644 --- a/flink_jobs/batch_status/src/main/java/ops/OpsManager.java +++ b/flink_jobs/batch_status/src/main/java/ops/OpsManager.java @@ -9,7 +9,6 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; -import java.util.Map.Entry; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; @@ -246,6 +245,7 @@ public void loadJsonString(List opsJson) throws JsonParseException { JsonElement j_element = json_parser.parse(opsJson.get(0)); readJson(j_element); } + public int[][][] getTruthTable() { return truthTable; } diff --git a/flink_jobs/batch_status/src/main/java/sync/AggregationProfileManager.java b/flink_jobs/batch_status/src/main/java/sync/AggregationProfileManager.java index 11648956..daa4b8fd 100644 --- a/flink_jobs/batch_status/src/main/java/sync/AggregationProfileManager.java +++ b/flink_jobs/batch_status/src/main/java/sync/AggregationProfileManager.java @@ -1,7 +1,6 @@ package sync; - import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; @@ -14,329 +13,607 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.Map.Entry; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; +import java.io.Serializable; +import java.util.Map.Entry; -public class AggregationProfileManager { - - private HashMap list; - private static final Logger LOG = Logger.getLogger(AggregationProfileManager.class.getName()); - - public AggregationProfileManager() { - - this.list = new HashMap(); - - } - - private class AvProfileItem { - - private String name; - private String namespace; - private String metricProfile; - private String metricOp; - private String groupType; - private String op; - - private HashMap groups; - private HashMap serviceIndex; - - AvProfileItem() { - this.groups = new HashMap(); - this.serviceIndex = new HashMap(); - } - - private class ServGroupItem { - - String op; - HashMap services; - - ServGroupItem(String op) { - this.op = op; - this.services = new HashMap(); - } - } - - // ServGroupItem Declaration Ends Here - - public void insertGroup(String group, String op) { - if (!this.groups.containsKey(group)) { - this.groups.put(group, new ServGroupItem(op)); - } - } - - public void insertService(String group, String service, String op) { - if (this.groups.containsKey(group)) { - this.groups.get(group).services.put(service, op); - this.serviceIndex.put(service, group); - } - } - } - - // AvProfileItem Declaration Ends Here - - public void clearProfiles() { - this.list.clear(); - } - - public String getTotalOp(String avProfile) { - if (this.list.containsKey(avProfile)) { - return this.list.get(avProfile).op; - } - - return ""; - } - - public String getMetricOp(String avProfile) { - if (this.list.containsKey(avProfile)) { - return this.list.get(avProfile).metricOp; - } - - return ""; - } - - // Return the available Group Names of a profile - public ArrayList getProfileGroups(String avProfile) { - - if (this.list.containsKey(avProfile)) { - ArrayList result = new ArrayList(); - Iterator groupIterator = this.list.get(avProfile).groups.keySet().iterator(); - - while (groupIterator.hasNext()) { - result.add(groupIterator.next()); - } - - return result; - } - - return null; - } - - // Return the available group operation - public String getProfileGroupOp(String avProfile, String groupName) { - if (this.list.containsKey(avProfile)) { - if (this.list.get(avProfile).groups.containsKey(groupName)) { - return this.list.get(avProfile).groups.get(groupName).op; - } - } - - return null; - } - - public ArrayList getProfileGroupServices(String avProfile, String groupName) { - if (this.list.containsKey(avProfile)) { - if (this.list.get(avProfile).groups.containsKey(groupName)) { - ArrayList result = new ArrayList(); - Iterator srvIterator = this.list.get(avProfile).groups.get(groupName).services.keySet() - .iterator(); - - while (srvIterator.hasNext()) { - result.add(srvIterator.next()); - } - - return result; - } - } - - return null; - } - - public String getProfileGroupServiceOp(String avProfile, String groupName, String service) { - - if (this.list.containsKey(avProfile)) { - if (this.list.get(avProfile).groups.containsKey(groupName)) { - if (this.list.get(avProfile).groups.get(groupName).services.containsKey(service)) { - return this.list.get(avProfile).groups.get(groupName).services.get(service); - } - } - } - - return null; - } - - public ArrayList getAvProfiles() { - - if (this.list.size() > 0) { - ArrayList result = new ArrayList(); - Iterator avpIterator = this.list.keySet().iterator(); - while (avpIterator.hasNext()) { - result.add(avpIterator.next()); - } - - return result; - - } - - return null; - } - - public String getProfileNamespace(String avProfile) { - - if (this.list.containsKey(avProfile)) { - return this.list.get(avProfile).namespace; - } - - return null; - } - - public String getProfileMetricProfile(String avProfile) { - - if (this.list.containsKey(avProfile)) { - return this.list.get(avProfile).metricProfile; - } - - return null; - } - - public String getProfileGroupType(String avProfile) { - - if (this.list.containsKey(avProfile)) { - return this.list.get(avProfile).groupType; - } - - return null; - } - - public String getGroupByService(String avProfile, String service) { - - if (this.list.containsKey(avProfile)) { - - return this.list.get(avProfile).serviceIndex.get(service); - - } - return null; +/** + * + * + * AggregationProfileManager class implements objects that store the information + * parsed from a json object containing aggregation profile data or loaded from + * an json file + * + * The AggregationProfileManager keeps info of a list of AvProfileItem objects + * each one representing the aggregation profile information that corresponds to + * a specific client's report + */ +public class AggregationProfileManager implements Serializable { + + private HashMap list; + private static final Logger LOG = Logger.getLogger(AggregationProfileManager.class.getName()); + private HashMap profileNameIdMap; + + public AggregationProfileManager() { + + this.list = new HashMap(); + this.profileNameIdMap = new HashMap(); + + } + + public class AvProfileItem implements Serializable { + + private String id; + private String name; + private String namespace; + private String metricProfile; + private String metricOp; + private String groupType; + private String op; + + private HashMap groups; // a map of function(group) as key and a ServGroupItem as value. ServGroupItem keeps info of each service, operation pair in the profile data + private HashMap serviceIndex; // a map of service as key and an ArrayList of functions(groups) as value that correspond to the specific service + private HashMap serviceOperations; //a map of service as key and operation as value +// private HashMap> serviceFunctions; + private HashMap groupOperations; //a map of function(group) as key and operation as value, corresponding to the specific group + + AvProfileItem() { + this.groups = new HashMap(); + this.serviceIndex = new HashMap(); + this.serviceOperations = new HashMap(); + this.groupOperations = new HashMap(); + } + + public class ServGroupItem implements Serializable { + + String op; + HashMap services; + + ServGroupItem(String op) { + this.op = op; + this.services = new HashMap(); + } + } + + /** + * creates a ServGroupItem object and keeps in a map the pair of group, + * ServGroupItem + * + * @param group, a function e.g "compute" + * @param op , the operation that corresponds to the function e.g "AND" + */ + public void insertGroup(String group, String op) { + if (!this.groups.containsKey(group)) { + this.groups.put(group, new ServGroupItem(op)); + } + } + + /** + * keeps in a map for each function the pair of service, operation + * + * @param group , a function e.g "compute" + * @param service , a service e.g "ARC-CE" + * @param op , an operation e.g "AND" + */ + public void insertService(String group, String service, String op) { + if (this.groups.containsKey(group)) { + this.groups.get(group).services.put(service, op); + + this.serviceIndex.put(service, group); + } + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + public String getMetricProfile() { + return metricProfile; + } + + public void setMetricProfile(String metricProfile) { + this.metricProfile = metricProfile; + } + + public String getMetricOp() { + return metricOp; + } + + public void setMetricOp(String metricOp) { + this.metricOp = metricOp; + } + + public String getGroupType() { + return groupType; + } + + public void setGroupType(String groupType) { + this.groupType = groupType; + } + + public String getOp() { + return op; + } + + public void setOp(String op) { + this.op = op; + } + + public HashMap getGroups() { + return groups; + } + + public void setGroups(HashMap groups) { + this.groups = groups; + } + + public HashMap getServiceIndex() { + return serviceIndex; + } + + public void setServiceIndex(HashMap serviceIndex) { + this.serviceIndex = serviceIndex; + } + + public HashMap getServiceOperations() { + return serviceOperations; + } + + public void setServiceOperations(HashMap serviceOperations) { + this.serviceOperations = serviceOperations; + } + +// public HashMap> getServiceFunctions() { +// return serviceFunctions; +// } +// +// public void setServiceFunctions(HashMap> serviceFunctions) { +// this.serviceFunctions = serviceFunctions; +// } + public HashMap getGroupOperations() { + return groupOperations; + } + + public void setGroupOperationss(HashMap groupOperations) { + this.groupOperations = groupOperations; + } + + } + + /** + * clears the list containing the AvProfileItem + */ + public void clearProfiles() { + this.list.clear(); + } + + /** + * returns the profile operation as exists in the profile data field + * profile_operation + * + * @param avProfile , the name of the aggregation profile name + * @return , the profile operation + */ + public String getTotalOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).op; + } + + return ""; + } + + /** + * returns the metric operation as exists in the profile data field + * metric_operation returns the metric operation as exists in the profile + * data field metric_operation + * + * @param avProfile + * @return + */ + public String getMetricOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricOp; + } + + return ""; + } + + /** + * Return the available Group Names of a profile + * + * @param avProfile , the profile's name + * @return , a list with the groups (functions) + */ + // Return the available Group Names of a profile + public ArrayList getProfileGroups(String avProfile) { + + if (this.list.containsKey(avProfile)) { + ArrayList result = new ArrayList(); + Iterator groupIterator = this.list.get(avProfile).groups.keySet().iterator(); + + while (groupIterator.hasNext()) { + result.add(groupIterator.next()); + } + + return result; + } + + return null; + } + + /** + * Return the available group operation + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return the operation corresponding to the group + */ + public String getProfileGroupOp(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + return this.list.get(avProfile).groups.get(groupName).op; + } + } + + return null; + } + + /** + * Return the available services for the group + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return a list containing the services corresponding to the group + */ + public ArrayList getProfileGroupServices(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + ArrayList result = new ArrayList(); + Iterator srvIterator = this.list.get(avProfile).groups.get(groupName).services.keySet() + .iterator(); + + while (srvIterator.hasNext()) { + result.add(srvIterator.next()); + } + + return result; + } + } + + return null; + } + + public String getProfileGroupServiceOp(String avProfile, String groupName, String service) { + + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + if (this.list.get(avProfile).groups.get(groupName).services.containsKey(service)) { + return this.list.get(avProfile).groups.get(groupName).services.get(service); + } + } + } + + return null; + } - } + public ArrayList getAvProfiles() { - public boolean checkService(String avProfile, String service) { + if (this.list.size() > 0) { + ArrayList result = new ArrayList(); + Iterator avpIterator = this.list.keySet().iterator(); + while (avpIterator.hasNext()) { + result.add(avpIterator.next()); + } - if (this.list.containsKey(avProfile)) { + return result; - if (this.list.get(avProfile).serviceIndex.containsKey(service)) { - return true; - } + } - } - return false; + return null; + } - } + public String getProfileNamespace(String avProfile) { - public void loadJson(File jsonFile) throws IOException { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).namespace; + } - BufferedReader br = null; - try { + return null; + } - br = new BufferedReader(new FileReader(jsonFile)); + public String getProfileMetricProfile(String avProfile) { - JsonParser jsonParser = new JsonParser(); - JsonElement jRootElement = jsonParser.parse(br); - JsonObject jRootObj = jRootElement.getAsJsonObject(); + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricProfile; + } - JsonArray apGroups = jRootObj.getAsJsonArray("groups"); + return null; + } - // Create new entry for this availability profile - AvProfileItem tmpAvp = new AvProfileItem(); + public String getProfileGroupType(String avProfile) { - tmpAvp.name = jRootObj.get("name").getAsString(); - tmpAvp.namespace = jRootObj.get("namespace").getAsString(); - tmpAvp.metricProfile = jRootObj.get("metric_profile").getAsJsonObject().get("name").getAsString(); - tmpAvp.metricOp = jRootObj.get("metric_operation").getAsString(); - tmpAvp.groupType = jRootObj.get("endpoint_group").getAsString(); - tmpAvp.op = jRootObj.get("profile_operation").getAsString(); + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).groupType; + } - for ( JsonElement item : apGroups) { - // service name - JsonObject itemObj = item.getAsJsonObject(); - String itemName = itemObj.get("name").getAsString(); - String itemOp = itemObj.get("operation").getAsString(); - JsonArray itemServices = itemObj.get("services").getAsJsonArray(); - tmpAvp.insertGroup(itemName, itemOp); + return null; + } + + public String getGroupByService(String avProfile, String service) { + + if (this.list.containsKey(avProfile)) { + + return this.list.get(avProfile).serviceIndex.get(service); - for (JsonElement subItem : itemServices) { - JsonObject subObj = subItem.getAsJsonObject(); - String serviceName = subObj.get("name").getAsString(); - String serviceOp = subObj.get("operation").getAsString(); - tmpAvp.insertService(itemName, serviceName,serviceOp); - } + } + return null; - } + } - // Add profile to the list - this.list.put(tmpAvp.name, tmpAvp); + public boolean checkService(String avProfile, String service) { - } catch (FileNotFoundException ex) { - LOG.error("Could not open file:" + jsonFile.getName()); - throw ex; + if (this.list.containsKey(avProfile)) { - } catch (JsonParseException ex) { - LOG.error("File is not valid json:" + jsonFile.getName()); - throw ex; - } finally { - // Close quietly without exceptions the buffered reader - IOUtils.closeQuietly(br); - } + if (this.list.get(avProfile).serviceIndex.containsKey(service)) { + return true; + } - } - - public void loadJsonString(List apsJson) throws IOException { + } + return false; - - try { + } - + public void loadJson(File jsonFile) throws IOException { - JsonParser jsonParser = new JsonParser(); - JsonElement jRootElement = jsonParser.parse(apsJson.get(0)); - JsonObject jRootObj = jRootElement.getAsJsonObject(); + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); - // Create new entry for this availability profile - AvProfileItem tmpAvp = new AvProfileItem(); + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); +// JsonObject jRoot = j_element.getAsJsonObject(); +// JsonArray jData = jRoot.get("data").getAsJsonArray(); +// JsonElement jItem = jData.get(0); + readJson(j_element); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; - JsonArray apGroups = jRootObj.getAsJsonArray("groups"); + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } - tmpAvp.name = jRootObj.get("name").getAsString(); - tmpAvp.namespace = jRootObj.get("namespace").getAsString(); - tmpAvp.metricProfile = jRootObj.get("metric_profile").getAsJsonObject().get("name").getAsString(); - tmpAvp.metricOp = jRootObj.get("metric_operation").getAsString(); - tmpAvp.groupType = jRootObj.get("endpoint_group").getAsString(); - tmpAvp.op = jRootObj.get("profile_operation").getAsString(); + } - for ( JsonElement item : apGroups) { - // service name - JsonObject itemObj = item.getAsJsonObject(); - String itemName = itemObj.get("name").getAsString(); - String itemOp = itemObj.get("operation").getAsString(); - JsonArray itemServices = itemObj.get("services").getAsJsonArray(); - tmpAvp.insertGroup(itemName, itemOp); + public void loadJsonString(List apsJson) throws IOException { - for (JsonElement subItem : itemServices) { - JsonObject subObj = subItem.getAsJsonObject(); - String serviceName = subObj.get("name").getAsString(); - String serviceOp = subObj.get("operation").getAsString(); - tmpAvp.insertService(itemName, serviceName,serviceOp); - } + try { - } + JsonParser jsonParser = new JsonParser(); + JsonElement jRootElement = jsonParser.parse(apsJson.get(0)); + readJson(jRootElement); - // Add profile to the list - this.list.put(tmpAvp.name, tmpAvp); + } catch (JsonParseException ex) { + LOG.error("Contents are not valid json"); + throw ex; + } - + } - } catch (JsonParseException ex) { - LOG.error("Contents are not valid json"); - throw ex; - } - - } - - - + /** + * reads from a JsonElement and stores the necessary information to the + * OperationsManager object fields + * + * @param j_element , a JsonElement containing the operations profiles data + */ + public void readJson(JsonElement j_element) { + JsonObject jRootObj = j_element.getAsJsonObject(); + + // Create new entry for this availability profile + AvProfileItem tmpAvp = new AvProfileItem(); + + JsonArray apGroups = jRootObj.getAsJsonArray("groups"); + tmpAvp.id = jRootObj.get("id").getAsString(); + tmpAvp.name = jRootObj.get("name").getAsString(); + tmpAvp.namespace = jRootObj.get("namespace").getAsString(); + tmpAvp.metricProfile = jRootObj.get("metric_profile").getAsJsonObject().get("name").getAsString(); + tmpAvp.metricOp = jRootObj.get("metric_operation").getAsString(); + tmpAvp.groupType = jRootObj.get("endpoint_group").getAsString(); + tmpAvp.op = jRootObj.get("profile_operation").getAsString(); + + for (JsonElement item : apGroups) { + // service name + JsonObject itemObj = item.getAsJsonObject(); + String itemName = itemObj.get("name").getAsString(); + String itemOp = itemObj.get("operation").getAsString(); + tmpAvp.groupOperations.put(itemName, itemOp); + JsonArray itemServices = itemObj.get("services").getAsJsonArray(); + tmpAvp.insertGroup(itemName, itemOp); + + for (JsonElement subItem : itemServices) { + JsonObject subObj = subItem.getAsJsonObject(); + String serviceName = subObj.get("name").getAsString(); + String serviceOp = subObj.get("operation").getAsString(); + tmpAvp.insertService(itemName, serviceName, serviceOp); + } + + } + buildExtraData(tmpAvp); + + // Add profile to the list + this.list.put(tmpAvp.name, tmpAvp); + this.profileNameIdMap.put(tmpAvp.id, tmpAvp.name); + + } + + /** + * populates extra information by processing the parsed aggregation profile + * data + * + * @param item, the AvProfileItem created while parsing the aggregation + * profile data input + */ + private void buildExtraData(AvProfileItem item) { + + HashMap groupOperations = new HashMap<>(); + + HashMap serviceOperations = new HashMap<>(); + HashMap> serviceFunctions = new HashMap<>(); + + for (String group : item.groups.keySet()) { + AvProfileItem.ServGroupItem servGroupItem = item.groups.get(group); + + if (groupOperations.containsKey(group)) { + groupOperations.put(group, servGroupItem.op); + } + + for (Entry entry : servGroupItem.services.entrySet()) { + serviceOperations.put(entry.getKey(), entry.getValue()); + + ArrayList functions = new ArrayList<>(); + if (serviceFunctions.containsKey(entry.getKey())) { + functions = serviceFunctions.get(entry.getKey()); + } + functions.add(group); + serviceFunctions.put(entry.getKey(), functions); + } + } + item.serviceOperations = serviceOperations; + // item.serviceFunctions = serviceFunctions; + } + + /** + * Return the available function that correspond to the service input + * + * @param service a service + * @return a function correspondig to the service + */ + public String retrieveFunctionsByService(String service) { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex.get(service); + } + + /** + * Return a map containing all the pairs of service , operation that appear + * in the aggregation profile data + * + * @return the map containing the service , operation pairs + */ + public HashMap retrieveServiceOperations() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceOperations; + } + + /** + * Return a map containing all the pairs of service , group (function) that + * appear in the aggregation profile data + * + * @return a map containing service, group (function) pairs + */ + public HashMap retrieveServiceFunctions() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex; + } + + /** + * Checks the service, group (function) pairs for the existance of the + * service input + * + * @param service a service + * @return true if the service exist in the map , false elsewhere + */ + public boolean checkServiceExistance(String service) { + AvProfileItem item = getAvProfileItem(); + + return item.serviceIndex.keySet().contains(service); + } + + public AvProfileItem getAvProfileItem() { + String name = this.list.keySet().iterator().next(); + return this.list.get(name); + + } + + /** + * Return the profile's metric operation as exist in the aggregation profile + * data field : metric_operation + * + * @return the profile's metric operation + */ + public String getMetricOpByProfile() { + AvProfileItem item = getAvProfileItem(); + + return item.metricOp; + + } + + /** + * Return a map containing all the pairs of group (function) , operation + * that appear in the aggregation profile data + * + * @return the map containing all the pairs of group (function), operation + */ + public HashMap retrieveGroupOperations() { + + AvProfileItem item = getAvProfileItem(); + + return item.groupOperations; + } + + /** + * Return the operation that correspond to the group input + * + * @param group , agroup (function) + * @return the operation corresponding to the group + */ + public String retrieveGroupOperation(String group) { + AvProfileItem item = getAvProfileItem(); + + return item.groups.get(group).op; + } + + /** + * Return the profile's profile_operation + * + * @return the profile's operation + */ + public String retrieveProfileOperation() { + AvProfileItem item = getAvProfileItem(); + + return item.op; + } } - diff --git a/flink_jobs/batch_status/src/main/java/sync/MetricProfileManager.java b/flink_jobs/batch_status/src/main/java/sync/MetricProfileManager.java index 12ea8143..56432f3a 100644 --- a/flink_jobs/batch_status/src/main/java/sync/MetricProfileManager.java +++ b/flink_jobs/batch_status/src/main/java/sync/MetricProfileManager.java @@ -22,244 +22,250 @@ public class MetricProfileManager { - private static final Logger LOG = Logger.getLogger(MetricProfileManager.class.getName()); - - private ArrayList list; - private Map>> index; - - private class ProfileItem { - String profile; // Name of the profile - String service; // Name of the service type - String metric; // Name of the metric - HashMap tags; // Tag list - - public ProfileItem() { - // Initializations - this.profile = ""; - this.service = ""; - this.metric = ""; - this.tags = new HashMap(); - } - - public ProfileItem(String profile, String service, String metric, HashMap tags) { - this.profile = profile; - this.service = service; - this.metric = metric; - this.tags = tags; - } - } - - public MetricProfileManager() { - this.list = new ArrayList(); - this.index = new HashMap>>(); - } - - // Clear all profile data (both list and indexes) - public void clear() { - this.list = new ArrayList(); - this.index = new HashMap>>(); - } - - // Indexed List Functions - public int indexInsertProfile(String profile) { - if (!index.containsKey(profile)) { - index.put(profile, new HashMap>()); - return 0; - } - return -1; - } - - public void insert(String profile, String service, String metric, HashMap tags) { - ProfileItem tmpProfile = new ProfileItem(profile, service, metric, tags); - this.list.add(tmpProfile); - this.indexInsertMetric(profile, service, metric); - } - - public int indexInsertService(String profile, String service) { - if (index.containsKey(profile)) { - if (index.get(profile).containsKey(service)) { - return -1; - } else { - index.get(profile).put(service, new ArrayList()); - return 0; - } - - } - - index.put(profile, new HashMap>()); - index.get(profile).put(service, new ArrayList()); - return 0; - - } - - public int indexInsertMetric(String profile, String service, String metric) { - if (index.containsKey(profile)) { - if (index.get(profile).containsKey(service)) { - if (index.get(profile).get(service).contains(metric)) { - // Metric exists so no insertion - return -1; - } - // Metric doesn't exist and must be added - index.get(profile).get(service).add(metric); - return 0; - } else { - // Create the service and the metric - index.get(profile).put(service, new ArrayList()); - index.get(profile).get(service).add(metric); - return 0; - } - - } - // No profile - service - metric so add them all - index.put(profile, new HashMap>()); - index.get(profile).put(service, new ArrayList()); - index.get(profile).get(service).add(metric); - return 0; - - } - - // Getter Functions - - public ArrayList getProfileServices(String profile) { - if (index.containsKey(profile)) { - ArrayList ans = new ArrayList(); - ans.addAll(index.get(profile).keySet()); - return ans; - } - return null; - - } - - public ArrayList getProfiles() { - if (index.size() > 0) { - ArrayList ans = new ArrayList(); - ans.addAll(index.keySet()); - return ans; - } - return null; - } - - public ArrayList getProfileServiceMetrics(String profile, String service) { - if (index.containsKey(profile)) { - if (index.get(profile).containsKey(service)) { - return index.get(profile).get(service); - } - } - return null; - } - - public boolean checkProfileServiceMetric(String profile, String service, String metric) { - if (index.containsKey(profile)) { - if (index.get(profile).containsKey(service)) { - if (index.get(profile).get(service).contains(metric)) - return true; - } - } - - return false; - } - - /** - * Loads metric profile information from an avro file - *

- * This method loads metric profile information contained in an .avro file - * with specific avro schema. - * - *

- * The following fields are expected to be found in each avro row: - *

    - *
  1. profile: string
  2. - *
  3. service: string
  4. - *
  5. metric: string
  6. - *
  7. [optional] tags: hashmap (contains a map of arbitrary key values) - *
  8. - *
- * - * @param avroFile - * a File object of the avro file that will be opened - * @throws IOException - * if there is an error during opening of the avro file - */ - @SuppressWarnings("unchecked") - public void loadAvro(File avroFile) throws IOException { - - // Prepare Avro File Readers - DatumReader datumReader = new GenericDatumReader(); - DataFileReader dataFileReader = null; - try { - dataFileReader = new DataFileReader(avroFile, datumReader); - - // Grab avro schema - Schema avroSchema = dataFileReader.getSchema(); - - // Generate 1st level generic record reader (rows) - GenericRecord avroRow = new GenericData.Record(avroSchema); - - // For all rows in file repeat - while (dataFileReader.hasNext()) { - // read the row - avroRow = dataFileReader.next(avroRow); - HashMap tagMap = new HashMap(); - - // Generate 2nd level generic record reader (tags) - - HashMap tags = (HashMap) (avroRow.get("tags")); - - if (tags != null) { - for (Utf8 item : tags.keySet()) { - tagMap.put(item.toString(), String.valueOf(tags.get(item))); - } - } - - // Grab 1st level mandatory fields - String profile = avroRow.get("profile").toString(); - String service = avroRow.get("service").toString(); - String metric = avroRow.get("metric").toString(); - - // Insert data to list - this.insert(profile, service, metric, tagMap); - - } // end of avro rows - - dataFileReader.close(); - - } catch (IOException ex) { - LOG.error("Could not open avro file:" + avroFile.getName()); - throw ex; - } finally { - // Close quietly without exceptions the buffered reader - IOUtils.closeQuietly(dataFileReader); - } - - } - - /** - * Loads metric profile information from a list of MetricProfile objects - * - */ - @SuppressWarnings("unchecked") - public void loadFromList( List mps) { - - // For each metric profile object in list - for (MetricProfile item : mps){ - String profile = item.getProfile(); - String service = item.getService(); - String metric = item.getMetric(); - HashMap tagMap = new HashMap(); - HashMap tags = (HashMap) item.getTags(); - - if (tags != null) { - for (String key : tags.keySet()) { - tagMap.put(key, tags.get(key)); - } - } - - // Insert data to list - this.insert(profile, service, metric, tagMap); - } - - - } - + private static final Logger LOG = Logger.getLogger(MetricProfileManager.class.getName()); + + private ArrayList list; + private Map>> index; + + private class ProfileItem { + + String profile; // Name of the profile + String service; // Name of the service type + String metric; // Name of the metric + HashMap tags; // Tag list + + public ProfileItem() { + // Initializations + this.profile = ""; + this.service = ""; + this.metric = ""; + this.tags = new HashMap(); + } + + public ProfileItem(String profile, String service, String metric, HashMap tags) { + this.profile = profile; + this.service = service; + this.metric = metric; + this.tags = tags; + } + } + + public MetricProfileManager() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + // Clear all profile data (both list and indexes) + public void clear() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + // Indexed List Functions + public int indexInsertProfile(String profile) { + if (!index.containsKey(profile)) { + index.put(profile, new HashMap>()); + return 0; + } + return -1; + } + + public void insert(String profile, String service, String metric, HashMap tags) { + ProfileItem tmpProfile = new ProfileItem(profile, service, metric, tags); + this.list.add(tmpProfile); + this.indexInsertMetric(profile, service, metric); + } + + public int indexInsertService(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return -1; + } else { + index.get(profile).put(service, new ArrayList()); + return 0; + } + + } + + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + return 0; + + } + + public int indexInsertMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + // Metric exists so no insertion + return -1; + } + // Metric doesn't exist and must be added + index.get(profile).get(service).add(metric); + return 0; + } else { + // Create the service and the metric + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + } + + } + // No profile - service - metric so add them all + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + + } + + // Getter Functions + public ArrayList getProfileServices(String profile) { + if (index.containsKey(profile)) { + ArrayList ans = new ArrayList(); + ans.addAll(index.get(profile).keySet()); + return ans; + } + return null; + + } + + public ArrayList getProfiles() { + if (index.size() > 0) { + ArrayList ans = new ArrayList(); + ans.addAll(index.keySet()); + return ans; + } + return null; + } + + public ArrayList getProfileServiceMetrics(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return index.get(profile).get(service); + } + } + return null; + } + + public boolean checkProfileServiceMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + return true; + } + } + } + + return false; + } + + /** + * Loads metric profile information from an avro file + *

+ * This method loads metric profile information contained in an .avro file + * with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. profile: string
  2. + *
  3. service: string
  4. + *
  5. metric: string
  6. + *
  7. [optional] tags: hashmap (contains a map of arbitrary key values) + *
  8. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String profile = avroRow.get("profile").toString(); + String service = avroRow.get("service").toString(); + String metric = avroRow.get("metric").toString(); + + // Insert data to list + this.insert(profile, service, metric, tagMap); + + } // end of avro rows + + dataFileReader.close(); + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + /** + * Loads metric profile information from a list of MetricProfile objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List mps) { + + // For each metric profile object in list + for (MetricProfile item : mps) { + String profile = item.getProfile(); + String service = item.getService(); + String metric = item.getMetric(); + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(profile, service, metric, tagMap); + } + + } + + public boolean containsMetric(String service, String metric) { + + for (ProfileItem profIt : list) { + if (profIt.equals(service) && profIt.equals(metric)) { + return true; + } + } + return false; + } } diff --git a/flink_jobs/batch_status/src/test/java/argo/batch/PickDataPointsTest.java b/flink_jobs/batch_status/src/test/java/argo/batch/PickDataPointsTest.java index db8e19a1..d7439655 100644 --- a/flink_jobs/batch_status/src/test/java/argo/batch/PickDataPointsTest.java +++ b/flink_jobs/batch_status/src/test/java/argo/batch/PickDataPointsTest.java @@ -7,7 +7,6 @@ import java.io.FileReader; import java.net.URL; -import java.nio.file.Paths; import java.util.List; import org.apache.flink.api.java.DataSet; @@ -22,9 +21,6 @@ import argo.avro.MetricData; import argo.avro.MetricProfile; import junit.framework.Assert; -import ops.ConfigManager; -import ops.OpsManager; -import ops.ThresholdManager; import ops.ThresholdManagerTest; import sync.AggregationProfileManager; import sync.EndpointGroupManager; diff --git a/flink_jobs/status_trends/avro/group_endpoint.avsc b/flink_jobs/status_trends/avro/group_endpoint.avsc new file mode 100644 index 00000000..4ed8818e --- /dev/null +++ b/flink_jobs/status_trends/avro/group_endpoint.avsc @@ -0,0 +1,14 @@ +{"namespace": "argo.avro", + "type": "record", + "name": "GroupEndpoint", + "fields": [ + {"name": "type", "type": "string"}, + {"name": "group", "type": "string"}, + {"name": "service", "type": "string"}, + {"name": "hostname", "type": "string"}, + {"name": "tags", "type" : ["null", { "name" : "Tags", + "type" : "map", + "values" : "string" + }] + }] +} diff --git a/flink_jobs/status_trends/avro/group_group.avsc b/flink_jobs/status_trends/avro/group_group.avsc new file mode 100644 index 00000000..f23f439d --- /dev/null +++ b/flink_jobs/status_trends/avro/group_group.avsc @@ -0,0 +1,14 @@ +{"namespace": "argo.avro", + "type": "record", + "name": "GroupGroup", + "fields": [ + {"name": "type", "type": "string"}, + {"name": "group", "type": "string"}, + {"name": "subgroup", "type": "string"}, + {"name": "tags", "type" : ["null", { "name" : "Tags", + "type" : "map", + "values" : "string" + }] + }] +} + diff --git a/flink_jobs/status_trends/avro/metric_data.avsc b/flink_jobs/status_trends/avro/metric_data.avsc new file mode 100644 index 00000000..ff3d7a56 --- /dev/null +++ b/flink_jobs/status_trends/avro/metric_data.avsc @@ -0,0 +1,18 @@ +{"namespace": "argo.avro", + "type": "record", + "name": "MetricData", + "fields": [ + {"name": "timestamp", "type": "string"}, + {"name": "service", "type": "string"}, + {"name": "hostname", "type": "string"}, + {"name": "metric", "type": "string"}, + {"name": "status", "type": "string"}, + {"name": "monitoring_host", "type": ["null", "string"]}, + {"name": "summary", "type": ["null", "string"]}, + {"name": "message", "type": ["null", "string"]}, + {"name": "tags", "type" : ["null", {"name" : "Tags", + "type" : "map", + "values" : ["null", "string"] + }] + }] +} diff --git a/flink_jobs/status_trends/avro/metric_profile.avsc b/flink_jobs/status_trends/avro/metric_profile.avsc new file mode 100644 index 00000000..df6eb2a8 --- /dev/null +++ b/flink_jobs/status_trends/avro/metric_profile.avsc @@ -0,0 +1,13 @@ +{"namespace": "argo.avro", + "type": "record", + "name": "MetricProfile", + "fields": [ + {"name": "profile", "type": "string"}, + {"name": "service", "type": "string"}, + {"name": "metric", "type": "string"}, + {"name": "tags", "type" : ["null", {"name" : "Tags", + "type" : "map", + "values" : "string" + }] + }] +} diff --git a/flink_jobs/status_trends/src/main/java/argo/avro/GroupEndpoint.java b/flink_jobs/status_trends/src/main/java/argo/avro/GroupEndpoint.java new file mode 100644 index 00000000..2386b1d2 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/avro/GroupEndpoint.java @@ -0,0 +1,336 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class GroupEndpoint extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GroupEndpoint\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"type\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"group\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"service\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"hostname\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String type; + @Deprecated public java.lang.String group; + @Deprecated public java.lang.String service; + @Deprecated public java.lang.String hostname; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. + */ + public GroupEndpoint() {} + + /** + * All-args constructor. + */ + public GroupEndpoint(java.lang.String type, java.lang.String group, java.lang.String service, java.lang.String hostname, java.util.Map tags) { + this.type = type; + this.group = group; + this.service = service; + this.hostname = hostname; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return type; + case 1: return group; + case 2: return service; + case 3: return hostname; + case 4: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: type = (java.lang.String)value$; break; + case 1: group = (java.lang.String)value$; break; + case 2: service = (java.lang.String)value$; break; + case 3: hostname = (java.lang.String)value$; break; + case 4: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'type' field. + */ + public java.lang.String getType() { + return type; + } + + /** + * Sets the value of the 'type' field. + * @param value the value to set. + */ + public void setType(java.lang.String value) { + this.type = value; + } + + /** + * Gets the value of the 'group' field. + */ + public java.lang.String getGroup() { + return group; + } + + /** + * Sets the value of the 'group' field. + * @param value the value to set. + */ + public void setGroup(java.lang.String value) { + this.group = value; + } + + /** + * Gets the value of the 'service' field. + */ + public java.lang.String getService() { + return service; + } + + /** + * Sets the value of the 'service' field. + * @param value the value to set. + */ + public void setService(java.lang.String value) { + this.service = value; + } + + /** + * Gets the value of the 'hostname' field. + */ + public java.lang.String getHostname() { + return hostname; + } + + /** + * Sets the value of the 'hostname' field. + * @param value the value to set. + */ + public void setHostname(java.lang.String value) { + this.hostname = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new GroupEndpoint RecordBuilder */ + public static argo.avro.GroupEndpoint.Builder newBuilder() { + return new argo.avro.GroupEndpoint.Builder(); + } + + /** Creates a new GroupEndpoint RecordBuilder by copying an existing Builder */ + public static argo.avro.GroupEndpoint.Builder newBuilder(argo.avro.GroupEndpoint.Builder other) { + return new argo.avro.GroupEndpoint.Builder(other); + } + + /** Creates a new GroupEndpoint RecordBuilder by copying an existing GroupEndpoint instance */ + public static argo.avro.GroupEndpoint.Builder newBuilder(argo.avro.GroupEndpoint other) { + return new argo.avro.GroupEndpoint.Builder(other); + } + + /** + * RecordBuilder for GroupEndpoint instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String type; + private java.lang.String group; + private java.lang.String service; + private java.lang.String hostname; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.GroupEndpoint.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.GroupEndpoint.Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing GroupEndpoint instance */ + private Builder(argo.avro.GroupEndpoint other) { + super(argo.avro.GroupEndpoint.SCHEMA$); + if (isValidValue(fields()[0], other.type)) { + this.type = data().deepCopy(fields()[0].schema(), other.type); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.group)) { + this.group = data().deepCopy(fields()[1].schema(), other.group); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.service)) { + this.service = data().deepCopy(fields()[2].schema(), other.service); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.hostname)) { + this.hostname = data().deepCopy(fields()[3].schema(), other.hostname); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.tags)) { + this.tags = data().deepCopy(fields()[4].schema(), other.tags); + fieldSetFlags()[4] = true; + } + } + + /** Gets the value of the 'type' field */ + public java.lang.String getType() { + return type; + } + + /** Sets the value of the 'type' field */ + public argo.avro.GroupEndpoint.Builder setType(java.lang.String value) { + validate(fields()[0], value); + this.type = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'type' field has been set */ + public boolean hasType() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'type' field */ + public argo.avro.GroupEndpoint.Builder clearType() { + type = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'group' field */ + public java.lang.String getGroup() { + return group; + } + + /** Sets the value of the 'group' field */ + public argo.avro.GroupEndpoint.Builder setGroup(java.lang.String value) { + validate(fields()[1], value); + this.group = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'group' field has been set */ + public boolean hasGroup() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'group' field */ + public argo.avro.GroupEndpoint.Builder clearGroup() { + group = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'service' field */ + public java.lang.String getService() { + return service; + } + + /** Sets the value of the 'service' field */ + public argo.avro.GroupEndpoint.Builder setService(java.lang.String value) { + validate(fields()[2], value); + this.service = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'service' field has been set */ + public boolean hasService() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'service' field */ + public argo.avro.GroupEndpoint.Builder clearService() { + service = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'hostname' field */ + public java.lang.String getHostname() { + return hostname; + } + + /** Sets the value of the 'hostname' field */ + public argo.avro.GroupEndpoint.Builder setHostname(java.lang.String value) { + validate(fields()[3], value); + this.hostname = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'hostname' field has been set */ + public boolean hasHostname() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'hostname' field */ + public argo.avro.GroupEndpoint.Builder clearHostname() { + hostname = null; + fieldSetFlags()[3] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.GroupEndpoint.Builder setTags(java.util.Map value) { + validate(fields()[4], value); + this.tags = value; + fieldSetFlags()[4] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[4]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.GroupEndpoint.Builder clearTags() { + tags = null; + fieldSetFlags()[4] = false; + return this; + } + + @Override + public GroupEndpoint build() { + try { + GroupEndpoint record = new GroupEndpoint(); + record.type = fieldSetFlags()[0] ? this.type : (java.lang.String) defaultValue(fields()[0]); + record.group = fieldSetFlags()[1] ? this.group : (java.lang.String) defaultValue(fields()[1]); + record.service = fieldSetFlags()[2] ? this.service : (java.lang.String) defaultValue(fields()[2]); + record.hostname = fieldSetFlags()[3] ? this.hostname : (java.lang.String) defaultValue(fields()[3]); + record.tags = fieldSetFlags()[4] ? this.tags : (java.util.Map) defaultValue(fields()[4]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/avro/GroupGroup.java b/flink_jobs/status_trends/src/main/java/argo/avro/GroupGroup.java new file mode 100644 index 00000000..a7712d67 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/avro/GroupGroup.java @@ -0,0 +1,286 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class GroupGroup extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GroupGroup\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"type\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"group\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"subgroup\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String type; + @Deprecated public java.lang.String group; + @Deprecated public java.lang.String subgroup; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. + */ + public GroupGroup() {} + + /** + * All-args constructor. + */ + public GroupGroup(java.lang.String type, java.lang.String group, java.lang.String subgroup, java.util.Map tags) { + this.type = type; + this.group = group; + this.subgroup = subgroup; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return type; + case 1: return group; + case 2: return subgroup; + case 3: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: type = (java.lang.String)value$; break; + case 1: group = (java.lang.String)value$; break; + case 2: subgroup = (java.lang.String)value$; break; + case 3: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'type' field. + */ + public java.lang.String getType() { + return type; + } + + /** + * Sets the value of the 'type' field. + * @param value the value to set. + */ + public void setType(java.lang.String value) { + this.type = value; + } + + /** + * Gets the value of the 'group' field. + */ + public java.lang.String getGroup() { + return group; + } + + /** + * Sets the value of the 'group' field. + * @param value the value to set. + */ + public void setGroup(java.lang.String value) { + this.group = value; + } + + /** + * Gets the value of the 'subgroup' field. + */ + public java.lang.String getSubgroup() { + return subgroup; + } + + /** + * Sets the value of the 'subgroup' field. + * @param value the value to set. + */ + public void setSubgroup(java.lang.String value) { + this.subgroup = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new GroupGroup RecordBuilder */ + public static argo.avro.GroupGroup.Builder newBuilder() { + return new argo.avro.GroupGroup.Builder(); + } + + /** Creates a new GroupGroup RecordBuilder by copying an existing Builder */ + public static argo.avro.GroupGroup.Builder newBuilder(argo.avro.GroupGroup.Builder other) { + return new argo.avro.GroupGroup.Builder(other); + } + + /** Creates a new GroupGroup RecordBuilder by copying an existing GroupGroup instance */ + public static argo.avro.GroupGroup.Builder newBuilder(argo.avro.GroupGroup other) { + return new argo.avro.GroupGroup.Builder(other); + } + + /** + * RecordBuilder for GroupGroup instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String type; + private java.lang.String group; + private java.lang.String subgroup; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.GroupGroup.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.GroupGroup.Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing GroupGroup instance */ + private Builder(argo.avro.GroupGroup other) { + super(argo.avro.GroupGroup.SCHEMA$); + if (isValidValue(fields()[0], other.type)) { + this.type = data().deepCopy(fields()[0].schema(), other.type); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.group)) { + this.group = data().deepCopy(fields()[1].schema(), other.group); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.subgroup)) { + this.subgroup = data().deepCopy(fields()[2].schema(), other.subgroup); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'type' field */ + public java.lang.String getType() { + return type; + } + + /** Sets the value of the 'type' field */ + public argo.avro.GroupGroup.Builder setType(java.lang.String value) { + validate(fields()[0], value); + this.type = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'type' field has been set */ + public boolean hasType() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'type' field */ + public argo.avro.GroupGroup.Builder clearType() { + type = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'group' field */ + public java.lang.String getGroup() { + return group; + } + + /** Sets the value of the 'group' field */ + public argo.avro.GroupGroup.Builder setGroup(java.lang.String value) { + validate(fields()[1], value); + this.group = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'group' field has been set */ + public boolean hasGroup() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'group' field */ + public argo.avro.GroupGroup.Builder clearGroup() { + group = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'subgroup' field */ + public java.lang.String getSubgroup() { + return subgroup; + } + + /** Sets the value of the 'subgroup' field */ + public argo.avro.GroupGroup.Builder setSubgroup(java.lang.String value) { + validate(fields()[2], value); + this.subgroup = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'subgroup' field has been set */ + public boolean hasSubgroup() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'subgroup' field */ + public argo.avro.GroupGroup.Builder clearSubgroup() { + subgroup = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.GroupGroup.Builder setTags(java.util.Map value) { + validate(fields()[3], value); + this.tags = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.GroupGroup.Builder clearTags() { + tags = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public GroupGroup build() { + try { + GroupGroup record = new GroupGroup(); + record.type = fieldSetFlags()[0] ? this.type : (java.lang.String) defaultValue(fields()[0]); + record.group = fieldSetFlags()[1] ? this.group : (java.lang.String) defaultValue(fields()[1]); + record.subgroup = fieldSetFlags()[2] ? this.subgroup : (java.lang.String) defaultValue(fields()[2]); + record.tags = fieldSetFlags()[3] ? this.tags : (java.util.Map) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/avro/MetricProfile.java b/flink_jobs/status_trends/src/main/java/argo/avro/MetricProfile.java new file mode 100644 index 00000000..5055e779 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/avro/MetricProfile.java @@ -0,0 +1,304 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class MetricProfile extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"MetricProfile\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"profile\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"service\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"metric\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tags\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String profile; + @Deprecated public java.lang.String service; + @Deprecated public java.lang.String metric; + @Deprecated public java.util.Map tags; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public MetricProfile() {} + + /** + * All-args constructor. + */ + public MetricProfile(java.lang.String profile, java.lang.String service, java.lang.String metric, java.util.Map tags) { + this.profile = profile; + this.service = service; + this.metric = metric; + this.tags = tags; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return profile; + case 1: return service; + case 2: return metric; + case 3: return tags; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: profile = (java.lang.String)value$; break; + case 1: service = (java.lang.String)value$; break; + case 2: metric = (java.lang.String)value$; break; + case 3: tags = (java.util.Map)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'profile' field. + */ + public java.lang.String getProfile() { + return profile; + } + + /** + * Sets the value of the 'profile' field. + * @param value the value to set. + */ + public void setProfile(java.lang.String value) { + this.profile = value; + } + + /** + * Gets the value of the 'service' field. + */ + public java.lang.String getService() { + return service; + } + + /** + * Sets the value of the 'service' field. + * @param value the value to set. + */ + public void setService(java.lang.String value) { + this.service = value; + } + + /** + * Gets the value of the 'metric' field. + */ + public java.lang.String getMetric() { + return metric; + } + + /** + * Sets the value of the 'metric' field. + * @param value the value to set. + */ + public void setMetric(java.lang.String value) { + this.metric = value; + } + + /** + * Gets the value of the 'tags' field. + */ + public java.util.Map getTags() { + return tags; + } + + /** + * Sets the value of the 'tags' field. + * @param value the value to set. + */ + public void setTags(java.util.Map value) { + this.tags = value; + } + + /** Creates a new MetricProfile RecordBuilder */ + public static argo.avro.MetricProfile.Builder newBuilder() { + return new argo.avro.MetricProfile.Builder(); + } + + /** Creates a new MetricProfile RecordBuilder by copying an existing Builder */ + public static argo.avro.MetricProfile.Builder newBuilder(argo.avro.MetricProfile.Builder other) { + return new argo.avro.MetricProfile.Builder(other); + } + + /** Creates a new MetricProfile RecordBuilder by copying an existing MetricProfile instance */ + public static argo.avro.MetricProfile.Builder newBuilder(argo.avro.MetricProfile other) { + return new argo.avro.MetricProfile.Builder(other); + } + + /** + * RecordBuilder for MetricProfile instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String profile; + private java.lang.String service; + private java.lang.String metric; + private java.util.Map tags; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.MetricProfile.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.MetricProfile.Builder other) { + super(other); + if (isValidValue(fields()[0], other.profile)) { + this.profile = data().deepCopy(fields()[0].schema(), other.profile); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.service)) { + this.service = data().deepCopy(fields()[1].schema(), other.service); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.metric)) { + this.metric = data().deepCopy(fields()[2].schema(), other.metric); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Creates a Builder by copying an existing MetricProfile instance */ + private Builder(argo.avro.MetricProfile other) { + super(argo.avro.MetricProfile.SCHEMA$); + if (isValidValue(fields()[0], other.profile)) { + this.profile = data().deepCopy(fields()[0].schema(), other.profile); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.service)) { + this.service = data().deepCopy(fields()[1].schema(), other.service); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.metric)) { + this.metric = data().deepCopy(fields()[2].schema(), other.metric); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.tags)) { + this.tags = data().deepCopy(fields()[3].schema(), other.tags); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'profile' field */ + public java.lang.String getProfile() { + return profile; + } + + /** Sets the value of the 'profile' field */ + public argo.avro.MetricProfile.Builder setProfile(java.lang.String value) { + validate(fields()[0], value); + this.profile = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'profile' field has been set */ + public boolean hasProfile() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'profile' field */ + public argo.avro.MetricProfile.Builder clearProfile() { + profile = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'service' field */ + public java.lang.String getService() { + return service; + } + + /** Sets the value of the 'service' field */ + public argo.avro.MetricProfile.Builder setService(java.lang.String value) { + validate(fields()[1], value); + this.service = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'service' field has been set */ + public boolean hasService() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'service' field */ + public argo.avro.MetricProfile.Builder clearService() { + service = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'metric' field */ + public java.lang.String getMetric() { + return metric; + } + + /** Sets the value of the 'metric' field */ + public argo.avro.MetricProfile.Builder setMetric(java.lang.String value) { + validate(fields()[2], value); + this.metric = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'metric' field has been set */ + public boolean hasMetric() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'metric' field */ + public argo.avro.MetricProfile.Builder clearMetric() { + metric = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'tags' field */ + public java.util.Map getTags() { + return tags; + } + + /** Sets the value of the 'tags' field */ + public argo.avro.MetricProfile.Builder setTags(java.util.Map value) { + validate(fields()[3], value); + this.tags = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'tags' field has been set */ + public boolean hasTags() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'tags' field */ + public argo.avro.MetricProfile.Builder clearTags() { + tags = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public MetricProfile build() { + try { + MetricProfile record = new MetricProfile(); + record.profile = fieldSetFlags()[0] ? this.profile : (java.lang.String) defaultValue(fields()[0]); + record.service = fieldSetFlags()[1] ? this.service : (java.lang.String) defaultValue(fields()[1]); + record.metric = fieldSetFlags()[2] ? this.metric : (java.lang.String) defaultValue(fields()[2]); + record.tags = fieldSetFlags()[3] ? this.tags : (java.util.Map) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java index 5c8adb18..7f90ea15 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java @@ -43,7 +43,8 @@ public class BatchEndpointFlipFlopTrends { private static DataSet yesterdayData; private static DataSet todayData; private static Integer rankNum; - private static final String endpointTrends = "flipflop_trends_endpoints"; + + private static final String endpointTrends = "flipflop_trends_endpoints"; private static String mongoUri; private static ProfilesLoader profilesLoader; @@ -65,7 +66,6 @@ public static void main(String[] args) throws Exception { System.exit(0); } - if (params.get("clearMongo") != null && params.getBoolean("clearMongo") == true) { clearMongo = true; } @@ -82,7 +82,7 @@ public static void main(String[] args) throws Exception { todayData = readInputData(env, params, "todayData"); calcFlipFlops(); -// execute program + StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Group Endpoint Flip Flops for: "); jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); @@ -105,10 +105,10 @@ private static void calcFlipFlops() { //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); //group data by service endpoint and count flip flops - DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOp(), profilesLoader.getOperationParser())); + DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); if (rankNum != null) { //sort and rank data serviceEndpointGroupData = serviceEndpointGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); @@ -126,8 +126,7 @@ public Trends map(EndpointTrends in) throws Exception { }); trends.output(metricMongoOut); - // return serviceEndpointGroupData; - } //read input from file + } private static DataSet readInputData(ExecutionEnvironment env, ParameterTool params, String path) { DataSet inputData; diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java index 1817c004..7422e49a 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java @@ -44,7 +44,7 @@ * --key: ARGO web api token * --reportId: the id of the report the job will need to process * --apiUri: ARGO wep api to connect to e.g msg.example.com -*Optional: +* Optional: * -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true * -- N : the number of the result the job will provide, if the parameter exists , e.g 10 * @@ -84,8 +84,6 @@ public static void main(String[] args) throws Exception { profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); profilesDateStr = Utils.convertDateToString(format, profilesDate); - - if (params.get("N") != null) { rankNum = params.getInt("N"); } @@ -120,8 +118,7 @@ private static void calcFlipFlops() { //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); - + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); DataSet noZeroServiceEndpointMetricGroupData = serviceEndpointMetricGroupData.filter(new ZeroMetricTrendsFilter()); if (rankNum != null) { //sort and rank data @@ -131,8 +128,6 @@ private static void calcFlipFlops() { } MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, metricTrends, MongoTrendsOutput.TrendsType.TRENDS_METRIC, reportId, profilesDateStr, clearMongo); - - DataSet trends = noZeroServiceEndpointMetricGroupData.map(new MapFunction() { @Override @@ -143,7 +138,7 @@ public Trends map(MetricTrends in) throws Exception { trends.output(metricMongoOut); //group data by service endpoint and count flip flops - DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOp(), profilesLoader.getOperationParser())); + DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); DataSet noZeroserviceEndpointGroupData = serviceEndpointGroupData.filter(new ZeroEndpointTrendsFilter()); if (rankNum != null) { //sort and rank data @@ -182,7 +177,7 @@ public Trends map(ServiceTrends in) throws Exception { }); trends.output(metricMongoOut); -//flat map data to add function as described in aggregation profile groups + //flat map data to add function as described in aggregation profile groups serviceGroupData = serviceGroupData.flatMap(new MapServices(profilesLoader.getAggregationProfileParser())); //group data by group,function and count flip flops @@ -198,9 +193,7 @@ public Trends map(ServiceTrends in) throws Exception { } else { noZerogroupData = noZerogroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); } - metricMongoOut = new MongoTrendsOutput(mongoUri, groupTrends, MongoTrendsOutput.TrendsType.TRENDS_GROUP, reportId, profilesDateStr, clearMongo); - trends = noZerogroupData.map(new MapFunction() { @Override diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java index 623226ea..918eb557 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java @@ -25,25 +25,25 @@ import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.core.fs.Path; import org.joda.time.DateTime; + + /** - * Implements an ARGO Status Trends Job in flink , to count the number of status changes - * that occur to the level of group of the topology hierarchy - * - * Submit job in flink cluster using the following parameters - -* --date:the date for which the job runs and need to return results , yyyy-MM-dd -* --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database -* --apiUri: path to the mongo db the , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --key: ARGO web api token -* --reportId: the id of the report the job will need to process -* --apiUri: ARGO wep api to connect to msg.example.com -*Optional: -* -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true -* -- N : the number of the result the job will provide, if the parameter exists , e.g 10 -* -*/ + * Implements an ARGO Status Trends Job in flink , to count the number of status + * changes that occur to the level of group of the topology hierarchy + * + * Submit job in flink cluster using the following parameters * + * --date:the date for which the job runs and need to return results , yyyy-MM-dd + * --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database + * --key: ARGO web api token + * --reportId: the id of the report the job will need to process + * --apiUri: ARGO wep api to connect to msg.example.com + * Optional: + * -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true + * -- N : the number of the result the job will provide, if the parameter exists , e.g 10 + */ + public class BatchGroupFlipFlopTrends { private static DataSet yesterdayData; @@ -56,8 +56,10 @@ public class BatchGroupFlipFlopTrends { private static String format = "yyyy-MM-dd"; private static String reportId; - private static boolean clearMongo=false; + + private static boolean clearMongo = false; private static String profilesDateStr; + public static void main(String[] args) throws Exception { // set up the batch execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @@ -67,15 +69,14 @@ public static void main(String[] args) throws Exception { if (!Utils.checkParameters(params, "yesterdayData", "todayData", "mongoUri", "apiUri", "key", "date", "reportId")) { System.exit(0); } - + if (params.get("clearMongo") != null && params.getBoolean("clearMongo") == true) { clearMongo = true; - } reportId = params.getRequired("reportId"); - profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); + profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); profilesDateStr = Utils.convertDateToString(format, profilesDate); - + if (params.get("N") != null) { rankNum = params.getInt("N"); } @@ -86,7 +87,6 @@ public static void main(String[] args) throws Exception { // calculate on data calcFlipFlops(); -// execute program StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Group Flip Flops for: "); jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); @@ -96,7 +96,6 @@ public static void main(String[] args) throws Exception { jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); - } // filter yesterdaydata and exclude the ones not contained in topology and metric profile data and get the last timestamp data for each service endpoint metric @@ -108,10 +107,10 @@ private static void calcFlipFlops() { DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(),profilesLoader.getTopologyEndpointParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); //group data by service endpoint and count flip flops - DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOp(), profilesLoader.getOperationParser())); + DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); //group data by service and count flip flops DataSet serviceGroupData = serviceEndpointGroupData.filter(new ServiceFilter(profilesLoader.getAggregationProfileParser())).groupBy("group", "service").reduceGroup(new CalcServiceFlipFlop(profilesLoader.getOperationParser(), profilesLoader.getAggregationProfileParser())); @@ -130,7 +129,7 @@ private static void calcFlipFlops() { groupData = groupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); } - MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, groupTrends, MongoTrendsOutput.TrendsType.TRENDS_GROUP, reportId, profilesDateStr, clearMongo); + MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, groupTrends, MongoTrendsOutput.TrendsType.TRENDS_GROUP, reportId, profilesDateStr, clearMongo); DataSet trends = groupData.map(new MapFunction() { diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java index ae890d4f..10c7b189 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java @@ -18,25 +18,26 @@ import argo.profiles.ProfilesLoader; import org.joda.time.DateTime; - /** - * Implements an ARGO Status Trends Job in flink , to count the number of status changes - * that occur to the level of group, service, endpoint, metric of the topology hierarchy - * - * Submit job in flink cluster using the following parameters - -* --date:the date for which the job runs and need to return results , yyyy-MM-dd -* --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database -* --apiUri: path to the mongo db the , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --key: ARGO web api token -* --reportId: the id of the report the job will need to process -* --apiUri: ARGO wep api to connect to msg.example.com -*Optional: -* -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true -* -- N : the number of the result the job will provide, if the parameter exists , e.g 10 -*/ + * Implements an ARGO Status Trends Job in flink , to count the number of status + * changes that occur to the level of group, service, endpoint, metric of the + * topology hierarchy + * + * Submit job in flink cluster using the following parameters * + * --date:the date for which the job runs and need to return results , + * yyyy-MM-dd --yesterdayData: path to the metric profile data, of the previous + * day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --todayData: path to the metric profile + * data, of the current day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --mongoUri: path to MongoDB destination + * (eg mongodb://localhost:27017/database --apiUri: path to the mongo db the , + * for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --key: ARGO web api token --reportId: the + * id of the report the job will need to process --apiUri: ARGO wep api to + * connect to msg.example.com Optional: -- clearMongo: option to clear the mongo + * db before saving the new result or not, e.g true -- N : the number of the + * result the job will provide, if the parameter exists , e.g 10 + */ public class BatchMetricFlipFlopTrends { static Logger LOG = LoggerFactory.getLogger(BatchMetricFlipFlopTrends.class); @@ -69,11 +70,10 @@ public static void main(String[] args) throws Exception { clearMongo = true; } - - profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); + profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); profilesDateStr = Utils.convertDateToString(format, profilesDate); - mongoUri = params.getRequired("mongoUri"); + if (params.get("N") != null) { rankNum = params.getInt("N"); } @@ -106,7 +106,7 @@ private static void calcFlipFlops() { DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); - DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(),profilesLoader.getTopologyEndpointParser(), profilesLoader.getAggregationProfileParser(),profilesDate)); + DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(),profilesLoader.getAggregationProfileParser(), profilesDate)); if (rankNum != null) { metricData = metricData.sortPartition("flipflops", Order.DESCENDING).first(rankNum); @@ -115,9 +115,9 @@ private static void calcFlipFlops() { metricData = metricData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); } - + MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, metricTrends, MongoTrendsOutput.TrendsType.TRENDS_METRIC, reportId, profilesDateStr, clearMongo); - DataSet trends = metricData.map(new MapFunction() { + DataSet trends = metricData.map(new MapFunction() { @Override public Trends map(MetricTrends in) throws Exception { diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java index e1604ac3..9fdb271a 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java @@ -110,10 +110,10 @@ private static void calcFlipFlops() { DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(),profilesLoader.getTopologyEndpointParser(), profilesLoader.getAggregationProfileParser(),profilesDate)); + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(),profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(),profilesDate)); //group data by service endpoint and count flip flops - DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOp(), profilesLoader.getOperationParser())); + DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); //group data by service and count flip flops DataSet< ServiceTrends> serviceGroupData = serviceEndpointGroupData.filter(new ServiceFilter(profilesLoader.getAggregationProfileParser())).groupBy("group", "service").reduceGroup(new CalcServiceFlipFlop(profilesLoader.getOperationParser(), profilesLoader.getAggregationProfileParser())); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java index 2dbea5c2..9393358a 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java @@ -18,6 +18,7 @@ import org.slf4j.LoggerFactory; import argo.profiles.ProfilesLoader; import org.joda.time.DateTime; + /** * Implements an ARGO Status Trends Job in flink , to count the number of status appearances per status type * that occur to the level of group, service, endpoint. metric of the topology hierarchy diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java index f50cd250..9ba6761c 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java @@ -29,7 +29,6 @@ public enum TrendsType { private int mongoPort; private String dbName; private String colName; -// private MongoMethod method; private TrendsType trendsType; private String report; private int date; diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroEndpointTrendsFilter.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroEndpointTrendsFilter.java index 79895d40..f53a98b1 100644 --- a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroEndpointTrendsFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroEndpointTrendsFilter.java @@ -8,16 +8,12 @@ import argo.functions.calctimelines.ServiceFilter; import argo.pojos.EndpointTrends; -import argo.pojos.MetricTrends; import org.apache.flink.api.common.functions.FilterFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * - * @author cthermolia - * - * StatusFilter, filters data by status + * ZeroEndpointTrendsFilter, filters EndpointTrends data and rejects the ones with flipflop=0 */ public class ZeroEndpointTrendsFilter implements FilterFunction { diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupFunctionFilter.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupFunctionFilter.java index 82404094..7eda86e0 100644 --- a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupFunctionFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupFunctionFilter.java @@ -5,15 +5,13 @@ */ package argo.filter.zero.flipflops; -import argo.functions.calctimelines.ServiceFilter; import argo.pojos.GroupFunctionTrends; import org.apache.flink.api.common.functions.FilterFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * - * @author cthermolia + * ZeroGroupFunctionFilter, filters GroupFunctionTrends data and rejects the ones with flipflop=0 */ public class ZeroGroupFunctionFilter implements FilterFunction { diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupTrendsFilter.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupTrendsFilter.java index f341c4d4..9ec897cd 100644 --- a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupTrendsFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroGroupTrendsFilter.java @@ -11,8 +11,7 @@ import org.slf4j.LoggerFactory; /** - * - * @author cthermolia + * ZeroGroupTrendsFilter, filters GroupTrends data and rejects the ones with flipflop=0 */ diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroMetricTrendsFilter.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroMetricTrendsFilter.java index e3ffa413..6ff7d442 100644 --- a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroMetricTrendsFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroMetricTrendsFilter.java @@ -13,11 +13,8 @@ import org.slf4j.LoggerFactory; /** - * - * @author cthermolia - * - * StatusFilter, filters data by status - */ +* ZeroMetricTrendsFilter, filters MetricTrends data and rejects the ones with flipflop=0 +*/ public class ZeroMetricTrendsFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroServiceTrendsFilter.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroServiceTrendsFilter.java index fbed0e37..4eb23808 100644 --- a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroServiceTrendsFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroServiceTrendsFilter.java @@ -5,15 +5,13 @@ */ package argo.filter.zero.flipflops; -import argo.functions.calctimelines.ServiceFilter; import argo.pojos.ServiceTrends; import org.apache.flink.api.common.functions.FilterFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * - * @author cthermolia + * ZeroServiceTrendsFilter, filters ServiceTrends data and rejects the ones with flipflop=0 */ public class ZeroServiceTrendsFilter implements FilterFunction { diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/CalcLastTimeStatus.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/CalcLastTimeStatus.java index 96bf5503..3d6fe93b 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/CalcLastTimeStatus.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/CalcLastTimeStatus.java @@ -12,8 +12,6 @@ import org.apache.flink.util.Collector; /** - * - * @author cthermolia * CalcLastTimeStatus keeps data of the latest time entry */ diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/MapServices.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/MapServices.java index 1a7ec07e..4f8cf43c 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/MapServices.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/MapServices.java @@ -7,15 +7,13 @@ */ import argo.pojos.ServiceTrends; -import argo.profiles.AggregationProfileParser; +import argo.profiles.AggregationProfileManager; import java.util.ArrayList; import java.util.HashMap; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.util.Collector; /** - * @author cthermolia - * * MapServices produces TimelineTrends for each service,that maps to the groups * of functions as described in aggregation profile groups endpoint , metric */ @@ -23,12 +21,14 @@ public class MapServices implements FlatMapFunction { - private AggregationProfileParser aggregationProfileParser; - + //private AggregationProfileParser aggregationProfileParser; + private AggregationProfileManager aggregationProfileParser; + private HashMap> serviceFunctions; - public MapServices(AggregationProfileParser aggregationProfileParser) { + public MapServices(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; + } /** @@ -44,13 +44,13 @@ public MapServices(AggregationProfileParser aggregationProfileParser) { public void flatMap(ServiceTrends t, Collector out) throws Exception { String service = t.getService(); - ArrayList functionList = aggregationProfileParser.retrieveServiceFunctions(service); - if (functionList != null) { - for (String f : functionList) { + String function = aggregationProfileParser.retrieveFunctionsByService(service); + + ServiceTrends newT = t; - newT.setFunction(f); + newT.setFunction(function); out.collect(newT); - } - } + + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java index 178c6ac7..df8fbc6b 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java @@ -7,36 +7,36 @@ */ import argo.pojos.EndpointTrends; -import argo.profiles.AggregationProfileParser; -import java.util.ArrayList; +import argo.profiles.AggregationProfileManager; import org.apache.flink.api.common.functions.FilterFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * - * @author cthermolia - * * StatusFilter, filters data by status */ public class ServiceFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); - private AggregationProfileParser aggregationProfileParser; - - public ServiceFilter(AggregationProfileParser aggregationProfileParser) { + //private AggregationProfileParser aggregationProfileParser; + private AggregationProfileManager aggregationProfileParser; + + public ServiceFilter(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; + } //if the status field value in Tuple equals the given status returns true, else returns false @Override public boolean filter(EndpointTrends t) throws Exception { - ArrayList services = new ArrayList<>(aggregationProfileParser.getServiceOperations().keySet()); - if (services.contains(t.getService())) { - return true; - } - return false; +// boolean exist= aggregationProfileParser.checkServiceExistance(t.getService()); +// System.out.println("service : "+t.getService()+" exist: "+exist); +// + + // return exist; + return aggregationProfileParser.checkServiceExistance(t.getService()); + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java index 0cc9f8b8..921d78c7 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java @@ -11,9 +11,6 @@ import org.slf4j.LoggerFactory; /** - * - * @author cthermolia - * * StatusFilter, filters data by status */ public class StatusFilter implements FilterFunction> { diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java index 40cec30a..ba545ab6 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java @@ -6,49 +6,47 @@ package argo.functions.calctimelines; import argo.avro.MetricData; -import argo.profiles.AggregationProfileParser; -import argo.profiles.MetricProfileParser; -import argo.profiles.TopologyEndpointParser; +import argo.profiles.AggregationProfileManager; +import argo.profiles.EndpointGroupManager; +import argo.profiles.GroupGroupManager; +import argo.profiles.MetricProfileManager; + import argo.profiles.TopologyGroupParser; import java.util.ArrayList; -import java.util.HashMap; import org.apache.flink.api.common.functions.FilterFunction; -//import org.apache.flink.api.common.functions.RichFilterFunction; /** - * - * @author cthermolia - * * TopologyMetricFilter , filters service endpoint and exclude the ones that do * not appear in topology and metric profile data inputs */ public class TopologyMetricFilter implements FilterFunction { - private MetricProfileParser metricProfileParser; - private TopologyEndpointParser topologyEndpointParser; - private TopologyGroupParser topologyGroupParser; - private AggregationProfileParser aggregationProfileParser; + private MetricProfileManager metricProfileParser; + private EndpointGroupManager topologyEndpointParser; + private GroupGroupManager topologyGroupParser; + private AggregationProfileManager aggregationProfileParser; - public TopologyMetricFilter(MetricProfileParser metricProfileParser, TopologyEndpointParser topologyEndpointParser, TopologyGroupParser topologyGroupParser, AggregationProfileParser aggregationProfileParser) { + public TopologyMetricFilter(MetricProfileManager metricProfileParser, EndpointGroupManager topologyEndpointParser, GroupGroupManager topologyGroupParser, AggregationProfileManager aggregationProfileParser) { this.metricProfileParser = metricProfileParser; this.topologyEndpointParser = topologyEndpointParser; this.topologyGroupParser = topologyGroupParser; - this.aggregationProfileParser = aggregationProfileParser; + } + @Override public boolean filter(MetricData t) throws Exception { + String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); - String group = topologyEndpointParser.retrieveGroup(aggregationProfileParser.getEndpointGroup().toUpperCase(), t.getHostname() + "-" + t.getService()); - boolean hasGroup = false; - if (topologyGroupParser.containsGroup(group) && group != null) { - hasGroup = true; - } - if (hasGroup && metricProfileParser.containsMetric(t.getService().toString(), t.getMetric().toString())) { - + ArrayList groups = topologyEndpointParser.getGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), t.getHostname().toString(), t.getService().toString()); + //boolean hasGroup = false; - return true; + for (String group : groups) { + if (topologyGroupParser.checkSubGroup(group) && metricProfileParser.containsMetric(t.getService().toString(), t.getMetric().toString())) { + return true; + } } return false; + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java index 1584b598..ddb200da 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java @@ -17,10 +17,8 @@ import org.joda.time.DateTime; import timelines.Timeline; import timelines.TimelineAggregator; + /** - * - * @author cthermolia - * * CalcEndpointTrends, count status changes for each service endpoint group */ public class CalcEndpointFlipFlopTrends extends RichGroupReduceFunction { @@ -51,15 +49,14 @@ public void reduce(Iterable in, Collector< EndpointTrends> out) th //store the necessary info //collect all timelines in a list - HashMap timelinelist = new HashMap<>(); + HashMap timelinelist = new HashMap<>(); for (MetricTrends time : in) { group = time.getGroup(); service = time.getService(); hostname = time.getEndpoint(); Timeline timeline = time.getTimeline(); - - timelinelist.put(time.getMetric(),timeline); + timelinelist.put(time.getMetric(), timeline); } // merge the timelines into one timeline , @@ -67,14 +64,13 @@ public void reduce(Iterable in, Collector< EndpointTrends> out) th // as multiple status (each status exist in each timeline) correspond to each timestamp, there is a need to conclude into one status/timestamp //for each timestamp the status that prevails is concluded by the truth table that is defined for the operation TimelineAggregator timelineAggregator = new TimelineAggregator(timelinelist); - timelineAggregator.aggregate( operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); + timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); Timeline mergedTimeline = timelineAggregator.getOutput(); //collect all timelines that correspond to the group service endpoint group , merge them in order to create one timeline Integer flipflops = mergedTimeline.calcStatusChanges();//calculate flip flops on the concluded merged timeline - if (group != null && service != null && hostname != null) { EndpointTrends endpointTrends = new EndpointTrends(group, service, hostname, mergedTimeline, flipflops); out.collect(endpointTrends); - } + } } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFlipFlop.java index 44daeacd..48b40d12 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFlipFlop.java @@ -9,10 +9,8 @@ //import argo.functions.calctimelines.TimelineMerger; import argo.pojos.GroupFunctionTrends; import argo.pojos.GroupTrends; -//import argo.pojos.Timeline; -import argo.profiles.AggregationProfileParser; +import argo.profiles.AggregationProfileManager; import argo.profiles.OperationsParser; - import java.util.HashMap; import org.apache.flink.api.common.functions.GroupReduceFunction; import org.apache.flink.util.Collector; @@ -31,26 +29,24 @@ public class CalcGroupFlipFlop implements GroupReduceFunction< GroupFunctionTren private OperationsParser operationsParser; private String groupOperation; - public CalcGroupFlipFlop(OperationsParser operationsParser, AggregationProfileParser aggregationProfileParser) { + public CalcGroupFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { this.operationsParser = operationsParser; - this.groupOperation = aggregationProfileParser.getProfileOp(); + this.groupOperation = aggregationProfileParser.retrieveProfileOperation(); } @Override public void reduce(Iterable in, Collector< GroupTrends> out) throws Exception { String group = null; - HashMap timelist = new HashMap<>(); + HashMap timelist = new HashMap<>(); for (GroupFunctionTrends time : in) { group = time.getGroup(); - timelist.put(time.getFunction(),time.getTimeline()); + timelist.put(time.getFunction(), time.getTimeline()); } TimelineAggregator timelineAggregator = new TimelineAggregator(timelist); - - timelineAggregator.aggregate(operationsParser.getTruthTable(),operationsParser.getIntOperation(groupOperation)); - Timeline timeline=timelineAggregator.getOutput(); - + timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(groupOperation)); + Timeline timeline = timelineAggregator.getOutput(); int flipflops = timeline.calcStatusChanges(); GroupTrends groupTrends = new GroupTrends(group, timeline, flipflops); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFunctionFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFunctionFlipFlop.java index a19894d0..3b691038 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFunctionFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcGroupFunctionFlipFlop.java @@ -5,13 +5,10 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ - //import argo.functions.calctimelines.TimelineMerger; - import argo.pojos.GroupFunctionTrends; import argo.pojos.ServiceTrends; -//import argo.pojos.Timeline; -import argo.profiles.AggregationProfileParser; +import argo.profiles.AggregationProfileManager; import argo.profiles.OperationsParser; import java.util.HashMap; import org.apache.flink.api.common.functions.GroupReduceFunction; @@ -23,39 +20,38 @@ * * @author cthermolia * - * CalcGroupFunctionFlipFlop, count status changes for each group function - * group + * CalcGroupFunctionFlipFlop, count status changes for each group function group */ public class CalcGroupFunctionFlipFlop implements GroupReduceFunction< ServiceTrends, GroupFunctionTrends> { private OperationsParser operationsParser; - private HashMap functionOperations; + private HashMap functionOperations; - public CalcGroupFunctionFlipFlop(OperationsParser operationsParser, AggregationProfileParser aggregationProfileParser ) { + public CalcGroupFunctionFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { this.operationsParser = operationsParser; - this.functionOperations=aggregationProfileParser.getFunctionOperations(); + this.functionOperations = aggregationProfileParser.retrieveGroupOperations(); + } @Override public void reduce(Iterable in, Collector< GroupFunctionTrends> out) throws Exception { String group = null; String function = null; - // ArrayList list = new ArrayList<>(); + // ArrayList list = new ArrayList<>(); //construct a timeline containing all the timestamps of each metric timeline - - HashMap timelist = new HashMap<>(); + HashMap timelist = new HashMap<>(); for (ServiceTrends time : in) { group = time.getGroup(); - function=time.getFunction(); - timelist.put(time.getService(),time.getTimeline()); + function = time.getFunction(); + timelist.put(time.getService(), time.getTimeline()); } - String operation=functionOperations.get(function); //for each function an operation exists , so retrieve the corresponding truth table + String operation = functionOperations.get(function); //for each function an operation exists , so retrieve the corresponding truth table TimelineAggregator timelineAggregator = new TimelineAggregator(timelist); - timelineAggregator.aggregate( operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); - - Timeline timeline= timelineAggregator.getOutput(); + timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); + + Timeline timeline = timelineAggregator.getOutput(); int flipflops = timeline.calcStatusChanges(); GroupFunctionTrends groupFunctionTrends = new GroupFunctionTrends(group, function, timeline, flipflops); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java index 014aec3e..740226f6 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java @@ -8,9 +8,11 @@ import argo.avro.MetricData; //import argo.pojos.Timeline; import argo.pojos.MetricTrends; -import argo.profiles.AggregationProfileParser; +import argo.profiles.AggregationProfileManager; +import argo.profiles.EndpointGroupManager; +import argo.profiles.GroupGroupManager; import argo.profiles.OperationsParser; -import argo.profiles.TopologyEndpointParser; +import argo.profiles.TopologyGroupParser; import argo.utils.Utils; import java.util.ArrayList; import java.util.TreeMap; @@ -27,21 +29,19 @@ */ public class CalcMetricFlipFlopTrends implements GroupReduceFunction { - //private HashMap groupEndpoints; private final String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; - private TopologyEndpointParser topologyEndpointParser; - private AggregationProfileParser aggregationProfileParser; + private EndpointGroupManager topologyEndpointParser; + // private TopologyGroupParser topologyGroupParser; + private GroupGroupManager topologyGroupParser; + private AggregationProfileManager aggregationProfileParser; private OperationsParser operationsParser; private DateTime date; -// -// public CalcMetricFlipFlopTrends(HashMap groupEndpoints) { -// this.groupEndpoints = groupEndpoints; -// } - public CalcMetricFlipFlopTrends(OperationsParser operationsParser, TopologyEndpointParser topologyEndpointParser, AggregationProfileParser aggregationProfileParser, DateTime date) { + public CalcMetricFlipFlopTrends(OperationsParser operationsParser, EndpointGroupManager topologyEndpointParser, GroupGroupManager topologyGroupParser, AggregationProfileManager aggregationProfileParser, DateTime date) { this.topologyEndpointParser = topologyEndpointParser; this.aggregationProfileParser = aggregationProfileParser; this.operationsParser = operationsParser; + this.topologyGroupParser = topologyGroupParser; this.date = date; } @@ -55,29 +55,39 @@ public CalcMetricFlipFlopTrends(OperationsParser operationsParser, TopologyEndpo @Override public void reduce(Iterable in, Collector out) throws Exception { TreeMap timeStatusMap = new TreeMap<>(); - String group = null; + ArrayList groups = new ArrayList<>(); String hostname = null; String service = null; String metric = null; for (MetricData md : in) { hostname = md.getHostname().toString(); service = md.getService().toString(); + metric = md.getMetric().toString(); // group = groupEndpoints.get(md.getHostname().toString() + "-" + md.getService()); //retrieve the group for the service, as contained in file - group = topologyEndpointParser.retrieveGroup(aggregationProfileParser.getEndpointGroup().toUpperCase(), md.getHostname().toString() + "-" + md.getService().toString()); + String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); + + // group = topologyEndpointParser.retrieveGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString() + "-" + md.getService().toString()); + groups = topologyEndpointParser.getGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString(), md.getService().toString()); int st = operationsParser.getIntStatus(md.getStatus().toString()); timeStatusMap.put(Utils.convertStringtoDate(format, md.getTimestamp().toString()), st); + } - + Timeline timeline = new Timeline(); timeline.insertDateTimeStamps(timeStatusMap); timeline.replacePreviousDateStatus(date, new ArrayList<>(operationsParser.getStates().keySet()));//handle the first timestamp to contain the previous days timestamp status if necessary and the last timestamp to contain the status of the last timelines's entry Integer flipflop = timeline.calcStatusChanges(); - - if (group != null && service != null && hostname != null && metric != null) { - MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop); - out.collect(metricTrends); + if (service != null && hostname != null && metric != null) { + for (String group : groups) { + + if (topologyGroupParser.checkSubGroup(group)) { + MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop); + out.collect(metricTrends); + } + } } - } + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java index 57e91565..b86253fd 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java @@ -5,11 +5,9 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ -//import argo.functions.calctimelines.TimelineMerger; import argo.pojos.EndpointTrends; import argo.pojos.ServiceTrends; -//import argo.pojos.Timeline; -import argo.profiles.AggregationProfileParser; +import argo.profiles.AggregationProfileManager; import argo.profiles.OperationsParser; import java.util.ArrayList; import java.util.HashMap; @@ -29,13 +27,14 @@ public class CalcServiceFlipFlop implements GroupReduceFunction< EndpointTrends, private HashMap> operationTruthTables; - private HashMap serviceOperationMap; + private HashMap serviceFunctionsMap; private OperationsParser operationsParser; - public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfileParser aggregationProfileParser) { + public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { // this.operationTruthTables = operationParser.getOpTruthTable(); this.operationsParser = operationsParser; - this.serviceOperationMap = aggregationProfileParser.getServiceOperations(); + this.serviceFunctionsMap = aggregationProfileParser.retrieveServiceOperations(); + } @Override @@ -46,7 +45,6 @@ public void reduce(Iterable in, Collector< ServiceTrends> out) t ArrayList list = new ArrayList<>(); //construct a timeline containing all the timestamps of each metric timeline - HashMap timelineList = new HashMap<>(); for (EndpointTrends endpointTrend : in) { @@ -54,16 +52,13 @@ public void reduce(Iterable in, Collector< ServiceTrends> out) t service = endpointTrend.getService(); timelineList.put(endpointTrend.getEndpoint(), endpointTrend.getTimeline()); } - String operation = serviceOperationMap.get(service); - + String operation = serviceFunctionsMap.get(service); TimelineAggregator timelineAggregator = new TimelineAggregator(timelineList); timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); -/// HashMap opTruthTable = operationTruthTables.get(operation); Timeline timeline = timelineAggregator.getOutput(); int flipflops = timeline.calcStatusChanges(); - if (group != null && service != null) { ServiceTrends serviceTrends = new ServiceTrends(group, service, timeline, flipflops); out.collect(serviceTrends); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java index 42fc3f0c..a040ff44 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java @@ -6,19 +6,15 @@ * and open the template in the editor. */ import argo.avro.MetricData; -import argo.profiles.AggregationProfileParser; -import argo.profiles.TopologyEndpointParser; +import argo.profiles.AggregationProfileManager; +import argo.profiles.EndpointGroupManager; import java.text.ParseException; -import java.util.HashMap; -import java.util.TreeMap; +import java.util.ArrayList; import org.apache.flink.api.common.functions.GroupReduceFunction; import org.apache.flink.api.java.tuple.Tuple6; import org.apache.flink.util.Collector; /** - * - * @author cthermolia - * * CalcServiceEnpointMetricStatus, for each service endpoint metric group , * keeps count for each status (CRITICAL,WARNING,UNKNOW) appearance and returns * the group information (group, service,hostname, metric, status, @@ -26,19 +22,16 @@ */ public class CalcStatusTrends implements GroupReduceFunction> { - //private HashMap groupEndpoints; - private TopologyEndpointParser topologyEndpointParser; - private AggregationProfileParser aggregationProfileParser; -// public CalcStatusTrends(HashMap groupEndpoints) { -// this.groupEndpoints = groupEndpoints; -// } + private EndpointGroupManager topologyEndpointParser; + private AggregationProfileManager aggregationProfileParser; - public CalcStatusTrends(TopologyEndpointParser topologyEndpointParser,AggregationProfileParser aggregationProfileParser) { + + public CalcStatusTrends(EndpointGroupManager topologyEndpointParser, AggregationProfileManager aggregationProfileParser) { this.topologyEndpointParser = topologyEndpointParser; - this.aggregationProfileParser=aggregationProfileParser; + this.aggregationProfileParser = aggregationProfileParser; + } - /** * for each service endpoint metric Iterable check the MetricData status , * keep counter for each status(CRITICAL, WARNING,UNKNOWN) and provide @@ -48,12 +41,10 @@ public CalcStatusTrends(TopologyEndpointParser topologyEndpointParser,Aggregatio * @param out, the collection of 3 Tuple6 for each Iterable (one for each * status) that keeps info for group ,service, hostname, metric, status, and * status counter - * @throws Exception */ @Override public void reduce(Iterable in, Collector> out) throws ParseException { - TreeMap timeStatusMap = new TreeMap<>(); - String group = null; + ArrayList groups = new ArrayList<>(); String hostname = null; String service = null; String status = null; @@ -62,28 +53,33 @@ public void reduce(Iterable in, Collector tupleCritical = new Tuple6( group, service, hostname, metric, "CRITICAL", criticalSum); out.collect(tupleCritical); @@ -95,6 +91,7 @@ public void reduce(Iterable in, Collector tupleUnknown = new Tuple6( group, service, hostname, metric, "UNKNOWN", unknownSum); out.collect(tupleUnknown); + } } } diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/EndpointTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/EndpointTrends.java index 4873126e..e13c4c73 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/EndpointTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/EndpointTrends.java @@ -5,14 +5,11 @@ */ package argo.pojos; -import java.util.Date; -import java.util.TreeMap; import timelines.Timeline; + /** - * - * @author cthermolia - * - * EndpointTrends, describes the computed trend information extracted from the timelines at the level of group service endpoints groups + * EndpointTrends, describes the computed trend information extracted from the + * timelines at the level of group service endpoints groups */ public class EndpointTrends { @@ -73,6 +70,4 @@ public void setFlipflops(Integer flipflops) { this.flipflops = flipflops; } - - } diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/GroupFunctionTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/GroupFunctionTrends.java index 29d3d6dd..da4506f2 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/GroupFunctionTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/GroupFunctionTrends.java @@ -4,13 +4,12 @@ * and open the template in the editor. */ package argo.pojos; + import timelines.Timeline; + /** - * - * @author cthermolia - * - * GroupFunctionTrends, describes the computed trend information extracted from the - * timelines at the level of group function groups + * GroupFunctionTrends, describes the computed trend information extracted from + * the timelines at the level of group function groups */ public class GroupFunctionTrends { @@ -61,5 +60,4 @@ public void setFlipflops(Integer flipflops) { this.flipflops = flipflops; } - } diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/GroupTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/GroupTrends.java index b7d884de..024af24d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/GroupTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/GroupTrends.java @@ -4,12 +4,12 @@ * and open the template in the editor. */ package argo.pojos; + import timelines.Timeline; + /** - * - * @author cthermolia MetricTrends, describes the computed trend information - * extracted from the set of the timelines at the level of group service - * endpoints metrics groups * + * MetricTrends, describes the computed trend information extracted from the set + * of the timelines at the level of group service endpoints metrics groups * */ public class GroupTrends { diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java index a851a3c6..c2058eed 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java @@ -4,15 +4,14 @@ * and open the template in the editor. */ package argo.pojos; + import timelines.Timeline; /** - * - * @author cthermolia - * MetricTrends, describes the computed trend information extracted from the set of the timelines at the level of group service endpoints metrics groups - + * MetricTrends, describes the computed trend information extracted from the set + * of the timelines at the level of group service endpoints metrics groups */ -public class MetricTrends{ +public class MetricTrends { String group; String service; @@ -24,17 +23,8 @@ public class MetricTrends{ public MetricTrends() { } -// public MetricTrends(String group, String service, String endpoint, String metric, Timeline timeline, Integer flipflops) { -// this.group = group; -// this.service = service; -// this.endpoint = endpoint; -// this.metric = metric; -// this.timeline = timeline; -// this.flipflops = flipflops; -// } - public MetricTrends(String group, String service, String hostname, String metric, timelines.Timeline timeline, Integer flipflop) { - this.group = group; + this.group = group; this.service = service; this.endpoint = hostname; this.metric = metric; diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/ServiceTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/ServiceTrends.java index 53248c2a..fcb58100 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/ServiceTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/ServiceTrends.java @@ -4,12 +4,12 @@ * and open the template in the editor. */ package argo.pojos; + import timelines.Timeline; + /** - * - * @author cthermolia - * - * ServiceTrends, describes the computed trend information extracted from the timelines at the level of group service endpoints groups + * ServiceTrends, describes the computed trend information extracted from the + * timelines at the level of group service endpoints groups */ public class ServiceTrends { @@ -22,7 +22,6 @@ public class ServiceTrends { public ServiceTrends() { } - public ServiceTrends(String group, String service, Timeline timeline, Integer flipflops) { this.group = group; this.service = service; @@ -70,6 +69,4 @@ public void setFunction(String function) { this.function = function; } - - } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileManager.java new file mode 100644 index 00000000..2802bc9e --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileManager.java @@ -0,0 +1,605 @@ +package argo.profiles; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.Serializable; +import java.util.Map.Entry; + +/** + * AggregationProfileManager class implements objects that store the information + * parsed from a json object containing aggregation profile data or loaded from + * an json file + * + * The AggregationProfileManager keeps info of a list of AvProfileItem objects + * each one representing the aggregation profile information that corresponds to + * a specific client's report + */ +public class AggregationProfileManager implements Serializable { + + private HashMap list; + private static final Logger LOG = Logger.getLogger(AggregationProfileManager.class.getName()); + private HashMap profileNameIdMap; + + public AggregationProfileManager() { + + this.list = new HashMap(); + this.profileNameIdMap = new HashMap(); + + } + + public class AvProfileItem implements Serializable { + + private String id; + private String name; + private String namespace; + private String metricProfile; + private String metricOp; + private String groupType; + private String op; + + private HashMap groups; // a map of function(group) as key and a ServGroupItem as value. ServGroupItem keeps info of each service, operation pair in the profile data + private HashMap serviceIndex; // a map of service as key and an ArrayList of functions(groups) as value that correspond to the specific service + private HashMap serviceOperations; //a map of service as key and operation as value + private HashMap groupOperations; //a map of function(group) as key and operation as value, corresponding to the specific group + + AvProfileItem() { + this.groups = new HashMap(); + this.serviceIndex = new HashMap(); + this.serviceOperations = new HashMap(); + this.groupOperations = new HashMap(); + } + + public class ServGroupItem implements Serializable { + + String op; + HashMap services; + + ServGroupItem(String op) { + this.op = op; + this.services = new HashMap(); + } + } + + /** + * creates a ServGroupItem object and keeps in a map the pair of group, + * ServGroupItem + * + * @param group, a function e.g "compute" + * @param op , the operation that corresponds to the function e.g "AND" + */ + public void insertGroup(String group, String op) { + if (!this.groups.containsKey(group)) { + this.groups.put(group, new ServGroupItem(op)); + } + } + + /** + * keeps in a map for each function the pair of service, operation + * + * @param group , a function e.g "compute" + * @param service , a service e.g "ARC-CE" + * @param op , an operation e.g "AND" + */ + public void insertService(String group, String service, String op) { + if (this.groups.containsKey(group)) { + this.groups.get(group).services.put(service, op); + + this.serviceIndex.put(service, group); + } + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + public String getMetricProfile() { + return metricProfile; + } + + public void setMetricProfile(String metricProfile) { + this.metricProfile = metricProfile; + } + + public String getMetricOp() { + return metricOp; + } + + public void setMetricOp(String metricOp) { + this.metricOp = metricOp; + } + + public String getGroupType() { + return groupType; + } + + public void setGroupType(String groupType) { + this.groupType = groupType; + } + + public String getOp() { + return op; + } + + public void setOp(String op) { + this.op = op; + } + + public HashMap getGroups() { + return groups; + } + + public void setGroups(HashMap groups) { + this.groups = groups; + } + + public HashMap getServiceIndex() { + return serviceIndex; + } + + public void setServiceIndex(HashMap serviceIndex) { + this.serviceIndex = serviceIndex; + } + + public HashMap getServiceOperations() { + return serviceOperations; + } + + public void setServiceOperations(HashMap serviceOperations) { + this.serviceOperations = serviceOperations; + } + + public HashMap getGroupOperations() { + return groupOperations; + } + + public void setGroupOperationss(HashMap groupOperations) { + this.groupOperations = groupOperations; + } + + } + + /** + * clears the list containing the AvProfileItem + */ + public void clearProfiles() { + this.list.clear(); + } + + /** + * returns the profile operation as exists in the profile data field + * profile_operation + * + * @param avProfile , the name of the aggregation profile name + * @return , the profile operation + */ + public String getTotalOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).op; + } + + return ""; + } + + /** + * returns the metric operation as exists in the profile data field + * metric_operation returns the metric operation as exists in the profile + * data field metric_operation + * + * @param avProfile + * @return + */ + public String getMetricOp(String avProfile) { + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricOp; + } + + return ""; + } + + /** + * Return the available Group Names of a profile + * + * @param avProfile , the profile's name + * @return , a list with the groups (functions) + */ + // Return the available Group Names of a profile + public ArrayList getProfileGroups(String avProfile) { + + if (this.list.containsKey(avProfile)) { + ArrayList result = new ArrayList(); + Iterator groupIterator = this.list.get(avProfile).groups.keySet().iterator(); + + while (groupIterator.hasNext()) { + result.add(groupIterator.next()); + } + + return result; + } + + return null; + } + + /** + * Return the available group operation + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return the operation corresponding to the group + */ + public String getProfileGroupOp(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + return this.list.get(avProfile).groups.get(groupName).op; + } + } + + return null; + } + + /** + * Return the available services for the group + * + * @param avProfile , the profile's name + * @param groupName , the group + * @return a list containing the services corresponding to the group + */ + public ArrayList getProfileGroupServices(String avProfile, String groupName) { + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + ArrayList result = new ArrayList(); + Iterator srvIterator = this.list.get(avProfile).groups.get(groupName).services.keySet() + .iterator(); + + while (srvIterator.hasNext()) { + result.add(srvIterator.next()); + } + + return result; + } + } + + return null; + } + + public String getProfileGroupServiceOp(String avProfile, String groupName, String service) { + + if (this.list.containsKey(avProfile)) { + if (this.list.get(avProfile).groups.containsKey(groupName)) { + if (this.list.get(avProfile).groups.get(groupName).services.containsKey(service)) { + return this.list.get(avProfile).groups.get(groupName).services.get(service); + } + } + } + + return null; + } + + public ArrayList getAvProfiles() { + + if (this.list.size() > 0) { + ArrayList result = new ArrayList(); + Iterator avpIterator = this.list.keySet().iterator(); + while (avpIterator.hasNext()) { + result.add(avpIterator.next()); + } + + return result; + + } + + return null; + } + + public String getProfileNamespace(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).namespace; + } + + return null; + } + + public String getProfileMetricProfile(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).metricProfile; + } + + return null; + } + + public String getProfileGroupType(String avProfile) { + + if (this.list.containsKey(avProfile)) { + return this.list.get(avProfile).groupType; + } + + return null; + } + + public String getGroupByService(String avProfile, String service) { + + if (this.list.containsKey(avProfile)) { + + return this.list.get(avProfile).serviceIndex.get(service); + + } + return null; + + } + + public boolean checkService(String avProfile, String service) { + + if (this.list.containsKey(avProfile)) { + + if (this.list.get(avProfile).serviceIndex.containsKey(service)) { + return true; + } + + } + return false; + + } + + public void loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + readJson(j_element); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + public void loadJsonString(List apsJson) throws IOException { + + try { + + JsonParser jsonParser = new JsonParser(); + JsonElement jRootElement = jsonParser.parse(apsJson.get(0)); + readJson(jRootElement); + + } catch (JsonParseException ex) { + LOG.error("Contents are not valid json"); + throw ex; + } + + } + + /** + * reads from a JsonElement and stores the necessary information to the + * OperationsManager object fields + * + * @param j_element , a JsonElement containing the operations profiles data + */ + public void readJson(JsonElement j_element) { + JsonObject jRootObj = j_element.getAsJsonObject(); + + // Create new entry for this availability profile + AvProfileItem tmpAvp = new AvProfileItem(); + + JsonArray apGroups = jRootObj.getAsJsonArray("groups"); + tmpAvp.id = jRootObj.get("id").getAsString(); + tmpAvp.name = jRootObj.get("name").getAsString(); + tmpAvp.namespace = jRootObj.get("namespace").getAsString(); + tmpAvp.metricProfile = jRootObj.get("metric_profile").getAsJsonObject().get("name").getAsString(); + tmpAvp.metricOp = jRootObj.get("metric_operation").getAsString(); + tmpAvp.groupType = jRootObj.get("endpoint_group").getAsString(); + tmpAvp.op = jRootObj.get("profile_operation").getAsString(); + + for (JsonElement item : apGroups) { + // service name + JsonObject itemObj = item.getAsJsonObject(); + String itemName = itemObj.get("name").getAsString(); + String itemOp = itemObj.get("operation").getAsString(); + tmpAvp.groupOperations.put(itemName, itemOp); + JsonArray itemServices = itemObj.get("services").getAsJsonArray(); + tmpAvp.insertGroup(itemName, itemOp); + + for (JsonElement subItem : itemServices) { + JsonObject subObj = subItem.getAsJsonObject(); + String serviceName = subObj.get("name").getAsString(); + String serviceOp = subObj.get("operation").getAsString(); + tmpAvp.insertService(itemName, serviceName, serviceOp); + } + + } + buildExtraData(tmpAvp); + + // Add profile to the list + this.list.put(tmpAvp.name, tmpAvp); + this.profileNameIdMap.put(tmpAvp.id, tmpAvp.name); + + } + + /** + * populates extra information by processing the parsed aggregation profile + * data + * + * @param item, the AvProfileItem created while parsing the aggregation + * profile data input + */ + private void buildExtraData(AvProfileItem item) { + + HashMap groupOperations = new HashMap<>(); + + HashMap serviceOperations = new HashMap<>(); + HashMap> serviceFunctions = new HashMap<>(); + + for (String group : item.groups.keySet()) { + AvProfileItem.ServGroupItem servGroupItem = item.groups.get(group); + + if (groupOperations.containsKey(group)) { + groupOperations.put(group, servGroupItem.op); + } + + for (Entry entry : servGroupItem.services.entrySet()) { + serviceOperations.put(entry.getKey(), entry.getValue()); + + ArrayList functions = new ArrayList<>(); + if (serviceFunctions.containsKey(entry.getKey())) { + functions = serviceFunctions.get(entry.getKey()); + } + functions.add(group); + serviceFunctions.put(entry.getKey(), functions); + } + } + item.serviceOperations = serviceOperations; + // item.serviceFunctions = serviceFunctions; + } + + /** + * Return the available function that correspond to the service input + * + * @param service a service + * @return a function correspondig to the service + */ + public String retrieveFunctionsByService(String service) { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex.get(service); + } + + /** + * Return a map containing all the pairs of service , operation that appear + * in the aggregation profile data + * + * @return the map containing the service , operation pairs + */ + public HashMap retrieveServiceOperations() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceOperations; + } + + /** + * Return a map containing all the pairs of service , group (function) that + * appear in the aggregation profile data + * + * @return a map containing service, group (function) pairs + */ + public HashMap retrieveServiceFunctions() { + + AvProfileItem item = getAvProfileItem(); + return item.serviceIndex; + } + + /** + * Checks the service, group (function) pairs for the existance of the + * service input + * + * @param service a service + * @return true if the service exist in the map , false elsewhere + */ + public boolean checkServiceExistance(String service) { + AvProfileItem item = getAvProfileItem(); + + return item.serviceIndex.keySet().contains(service); + } + + public AvProfileItem getAvProfileItem() { + String name = this.list.keySet().iterator().next(); + return this.list.get(name); + + } + + /** + * Return the profile's metric operation as exist in the aggregation profile + * data field : metric_operation + * + * @return the profile's metric operation + */ + public String getMetricOpByProfile() { + AvProfileItem item = getAvProfileItem(); + + return item.metricOp; + + } + + /** + * Return a map containing all the pairs of group (function) , operation + * that appear in the aggregation profile data + * + * @return the map containing all the pairs of group (function), operation + */ + public HashMap retrieveGroupOperations() { + + AvProfileItem item = getAvProfileItem(); + + return item.groupOperations; + } + + /** + * Return the operation that correspond to the group input + * + * @param group , agroup (function) + * @return the operation corresponding to the group + */ + public String retrieveGroupOperation(String group) { + AvProfileItem item = getAvProfileItem(); + + return item.groups.get(group).op; + } + + /** + * Return the profile's profile_operation + * + * @return the profile's operation + */ + public String retrieveProfileOperation() { + AvProfileItem item = getAvProfileItem(); + + return item.op; + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileParser.java index 09932450..588df5ce 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/AggregationProfileParser.java @@ -16,8 +16,7 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia AggregationProfileParser, collects data as described in + * AggregationProfileParser, collects data as described in * the json received from web api aggregation profiles request */ @@ -104,7 +103,7 @@ public void readApiRequestResult(){ if (serviceFunctions.get(servicename) != null) { serviceFunctionList = serviceFunctions.get(servicename); } - serviceFunctionList.add(groupname); + serviceFunctionList.add(serviceoperation); serviceFunctions.put(servicename, serviceFunctionList); } groups.add(new GroupOps(groupname, groupoperation, services)); diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/EndpointGroupManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/EndpointGroupManager.java new file mode 100644 index 00000000..e3fb2663 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/EndpointGroupManager.java @@ -0,0 +1,477 @@ +package argo.profiles; + + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.TreeMap; +import java.util.Map.Entry; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.util.Utf8; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import argo.avro.GroupEndpoint; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * + * + * EndpointGroupManager class implements objects that store the information + * parsed from a json object containing topology group-endpoint profile data or + * loaded from an avro file + * + * The EndpointGroupManager keeps info of a list of EndpointItem objects each + * one representing the current topology information that corresponds to a + * specific client's report or the topology information that is valid for a + * given date , for a specific client's report + * + * Also stores a list of EndpointItem objects that are filtered on the tags + * information, given some criteria on the tags + */ +public class EndpointGroupManager implements Serializable { + + private static final Logger LOG = Logger.getLogger(EndpointGroupManager.class.getName()); + + private ArrayList list; // the list of EndpointItems that are included in the profile data + private ArrayList fList; // the list of the filter EndpointItems that correspond to the criteria + private HashMap> topologyEndpoint = new HashMap<>(); + + //* A EndpointItem class implements an object containing info of an group endpoint included in the topology + protected class EndpointItem implements Serializable { // the object + + String type; // type of group + String group; // name of the group + String service; // type of the service + String hostname; // name of host + HashMap tags; // Tag list + + public EndpointItem() { + // Initializations + this.type = ""; + this.group = ""; + this.service = ""; + this.hostname = ""; + this.tags = new HashMap(); + } + + public EndpointItem(String type, String group, String service, String hostname, HashMap tags) { + this.type = type; + this.group = group; + this.service = service; + this.hostname = hostname; + this.tags = tags; + + } + + @Override + public int hashCode() { + int hash = 7; + hash = 17 * hash + Objects.hashCode(this.type); + hash = 17 * hash + Objects.hashCode(this.group); + hash = 17 * hash + Objects.hashCode(this.service); + hash = 17 * hash + Objects.hashCode(this.hostname); + hash = 17 * hash + Objects.hashCode(this.tags); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final EndpointItem other = (EndpointItem) obj; + if (!Objects.equals(this.type, other.type)) { + return false; + } + if (!Objects.equals(this.group, other.group)) { + return false; + } + if (!Objects.equals(this.service, other.service)) { + return false; + } + if (!Objects.equals(this.hostname, other.hostname)) { + return false; + } + if (!Objects.equals(this.tags, other.tags)) { + return false; + } + return true; + } + + } + + public EndpointGroupManager() { + this.list = new ArrayList<>(); + this.fList = new ArrayList<>(); + + } + + /** + * Inserts into the list a new EndpointItem , that contains the information + * of a group endpoint of the topology + * + * @param type the type of the endpoint group + * @param group the group of the endpoint group + * @param service the service of the endpoint group + * @param hostname the hostname of the endpoint group + * @param tags the tags of the endpoint group + * @return 0 if the item is inserted successfully to the list of the + * EndpointItems + */ + public int insert(String type, String group, String service, String hostname, HashMap tags) { + EndpointItem new_item = new EndpointItem(type, group, service, hostname, tags); + this.list.add(new_item); + return 0; // All good + } + + /** + * Checks the filtered list if group endpoint with a hostname , service + * exists + * + * @param hostname + * @param service + * @return true if the group endpoint exists, else elsewhere + */ + public boolean checkEndpoint(String hostname, String service) { + + for (EndpointItem item : fList) { + if (item.hostname.equals(hostname) && item.service.equals(service)) { + return true; + } + } + + return false; + } + + /** + * Returns the list of groups each one containing the group endpoint with + * the specific info + * + * @param type the type + * @param hostname the hostname + * @param service the service + * @return a list of groups that contain the group endpoint with the + * specific type, hostname, service + */ + public ArrayList getGroup(String type, String hostname, String service) { + + ArrayList results = new ArrayList(); + + for (EndpointItem item : fList) { + if (item.type.equals(type) && item.hostname.equals(hostname) && item.service.equals(service)) { + results.add(item.group); + } + } + + return results; + } + + /** + * Searches the tags of a group endpoint with the specific group, type, + * hostname, service and if the tags map contains keys with "info." in them + * then it returns the values combinations + * + * @param group the group + * @param type the type + * @param hostname the hostname + * @param service the service + * @return a string where all values of a "info." tag are combined and + * represented + */ + public String getInfo(String group, String type, String hostname, String service) { + String info = ""; + boolean first = true; + HashMap tags = this.getGroupTags(group, type, hostname, service); + + if (tags == null) { + return info; + } + + for (String tName : tags.keySet()) { + + if (tName.startsWith("info.")) { + + String infoName = tName.replaceFirst("info.", ""); + + String value = tags.get(tName); + + if (!value.equalsIgnoreCase("")) { + + if (!first) { + info = info + ","; + } else { + first = false; + } + info = info + infoName + ":" + value; + + } + } + } + return info; + } + + /** + * Finds in the filtered EndpointItem fList a EndpointItem with specific + * group, type. service, hostname and return the tags if it is found + * + * @param group a group + * @param type a type + * @param hostname a hostname + * @param service a service + * @return a map of tags for a specific group endpoint or null if there are + * no tags + */ + public HashMap getGroupTags(String group, String type, String hostname, String service) { + + for (EndpointItem item : fList) { + if (item.group.equals(group) && item.type.equals(type) && item.hostname.equals(hostname) && item.service.equals(service)) { + return item.tags; + } + } + + return null; + } + + /** + * counts the items in the filtered list + * + * @return + */ + public int count() { + return this.fList.size(); + } + /** + * Clear the filtered fList and initialize with all the EndpointItems the list includes + */ + public void unfilter() { + this.fList.clear(); + for (EndpointItem item : this.list) { + this.fList.add(item); + } + } +/** + * Applies filter on the tags of the EndpointItems included in the list, based on a map of criteria + * each criteria containing a pair of the tags field name and the value it searches . e.g ("monitored","1") searches tags + * with the specific pair of criteria + * @param fTags a map of criteria + */ + public void filter(TreeMap fTags) { + this.fList.clear(); + boolean trim; + for (EndpointItem item : this.list) { + trim = false; + HashMap itemTags = item.tags; + for (Entry fTagItem : fTags.entrySet()) { + + if (itemTags.containsKey(fTagItem.getKey())) { + // First Check binary tags as Y/N 0/1 + + if (fTagItem.getValue().equalsIgnoreCase("y") || fTagItem.getValue().equalsIgnoreCase("n")) { + String binValue = ""; + if (fTagItem.getValue().equalsIgnoreCase("y")) { + binValue = "1"; + } + if (fTagItem.getValue().equalsIgnoreCase("n")) { + binValue = "0"; + } + + if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(binValue) == false) { + trim = true; + } + } else if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(fTagItem.getValue()) == false) { + trim = true; + } + + } + } + + if (trim == false) { + fList.add(item); + } + } + } + + /** + * Loads endpoint grouping information from an avro file + *

+ * This method loads endpoint grouping information contained in an .avro + * file with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. type: string (describes the type of grouping)
  2. + *
  3. group: string
  4. + *
  5. service: string
  6. + *
  7. hostname: string
  8. + *
  9. tags: hashmap (contains a map of arbitrary key values)
  10. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab Avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String type = avroRow.get("type").toString(); + String group = avroRow.get("group").toString(); + String service = avroRow.get("service").toString(); + String hostname = avroRow.get("hostname").toString(); + + // Insert data to list + this.insert(type, group, service, hostname, tagMap); +// populateTopologyEndpoint(hostname, service, type, group); + } // end of avro rows + + this.unfilter(); + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + public ArrayList getList() { + return this.list; + } + + public ArrayList getfList() { + return fList; + } + + public void setfList(ArrayList fList) { + this.fList = fList; + } + + /** + * Loads information from a list of EndpointGroup objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List egp) { + + // For each endpoint group record + for (GroupEndpoint item : egp) { + String type = item.getType(); + String group = item.getGroup(); + String service = item.getService(); + String hostname = item.getHostname(); + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(type, group, service, hostname, tagMap); +// populateTopologyEndpoint(hostname, service, type, group); + } + + this.unfilter(); + + } + +// private void populateTopologyEndpoint(String hostname, String service, String type, String group) { +// +// String topologyEndpointKey = hostname + "-" + service; +// +// HashMap endpMap = new HashMap(); +// if (topologyEndpoint.get(type) != null) { +// endpMap = topologyEndpoint.get(type); +// } +// +// endpMap.put(topologyEndpointKey, group); +// topologyEndpoint.put(type, endpMap); +// } + + public void loadGroupEndpointProfile(JsonArray element) throws IOException { + GroupEndpoint[] groupEndpoints = readJson(element); + loadFromList(Arrays.asList(groupEndpoints)); + } +/** + * reads from a JsonElement array and stores the necessary information to the + * GroupEndpoint objects and add them to the list + * + * @param jElement , a JsonElement containing the topology group endpoint profiles data + * @return + */ + public GroupEndpoint[] readJson(JsonArray jElement) { + List results = new ArrayList<>(); + + for (int i = 0; i < jElement.size(); i++) { + JsonObject jItem = jElement.get(i).getAsJsonObject(); + String group = jItem.get("group").getAsString(); + String gType = jItem.get("type").getAsString(); + String service = jItem.get("service").getAsString(); + String hostname = jItem.get("hostname").getAsString(); + JsonObject jTags = jItem.get("tags").getAsJsonObject(); + Map tags = new HashMap(); + for (Entry kv : jTags.entrySet()) { + tags.put(kv.getKey(), kv.getValue().getAsString()); + } + GroupEndpoint ge = new GroupEndpoint(gType, group, service, hostname, tags); + results.add(ge); + } + + GroupEndpoint[] rArr = new GroupEndpoint[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/GroupGroupManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/GroupGroupManager.java new file mode 100644 index 00000000..0859b3b4 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/GroupGroupManager.java @@ -0,0 +1,352 @@ +package argo.profiles; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.TreeMap; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import argo.avro.GroupGroup; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; + +/** + * + * + * GroupGroupManager class implements objects that store the information parsed + * from a json object containing topology group-endpoint profile data or loaded + * from an avro file + * + * The GroupGroupManager keeps info of a list of GroupItem objects each one + * representing the current topology information that corresponds to a specific + * client's report or the topology information that is valid for a given date , + * for a specific client's report + * + * Also stores a list of GroupItem objects that are filtered on the tags + * information, given some criteria on the tags + */ +public class GroupGroupManager implements Serializable { + + static Logger log = Logger.getLogger(GroupGroupManager.class.getName()); + + private ArrayList list; + private ArrayList fList; + + //* A GroupItem class implements an object containing info of an group endpoint included in the topology + protected class GroupItem implements Serializable { + + String type; // type of group + String group; // name of the group + String subgroup; // name of sub-group + HashMap tags; // Tag list + + public GroupItem() { + // Initializations + this.type = ""; + this.group = ""; + this.subgroup = ""; + this.tags = new HashMap(); + } + + public GroupItem(String type, String group, String subgroup, HashMap tags) { + this.type = type; + this.group = group; + this.subgroup = subgroup; + this.tags = tags; + + } + + } + + public GroupGroupManager() { + this.list = new ArrayList(); + this.fList = new ArrayList(); + } + + /** + * Inserts into the list a new EndpointItem , that contains the information + * of a group endpoint of the topology + * + * @param type the type of the group group + * @param group the group of the group group + * @param subgroup the subgroup of the group group + * @param tags the tags of the endpoint group + * @return 0 if the item is inserted successfully to the list of the + * GroupItems + */ + public int insert(String type, String group, String subgroup, HashMap tags) { + GroupItem new_item = new GroupItem(type, group, subgroup, tags); + this.list.add(new_item); + return 0; // All good + } + + /** + * Finds in the filtered GroupItem fList a GroupItem with specific type. + * subgroup and return the tags if it is found + * + * @param type a type + * @param subgroup a subgroup + * @return a map of tags for a specific group group or null if there are no + * tags + */ + public HashMap getGroupTags(String type, String subgroup) { + for (GroupItem item : this.fList) { + if (item.type.equals(type) && item.subgroup.equals(subgroup)) { + return item.tags; + } + } + + return null; + } + + /** + * counts the items in the filtered list + * + * @return + */ + + public int count() { + return this.fList.size(); + } + + /** + * Returns the group corresponding to the specific type, subgroup info + * + * @param type the type + * @param subgroup the subgroup + * @return a group group with the specific type, subgroup + */ + public String getGroup(String type, String subgroup) { + for (GroupItem item : this.fList) { + if (item.type.equals(type) && item.subgroup.equals(subgroup)) { + return item.group; + } + } + + return null; + } + + /** + * Clear the filtered fList and initialize with all the GroupItems the + * list includes + */ + public void unfilter() { + this.fList.clear(); + for (GroupItem item : this.list) { + this.fList.add(item); + } + } + + /** + * Applies filter on the tags of the GroupItems included in the list, based + * on a map of criteria each criteria containing a pair of the tags field + * name and the value it searches . e.g ("monitored","1") searches tags with + * the specific pair of criteria + * + * @param fTags a map of criteria + */ + public void filter(TreeMap fTags) { + this.fList.clear(); + boolean trim; + for (GroupItem item : this.list) { + trim = false; + HashMap itemTags = item.tags; + for (Entry fTagItem : fTags.entrySet()) { + + if (itemTags.containsKey(fTagItem.getKey())) { + if (itemTags.get(fTagItem.getKey()).equalsIgnoreCase(fTagItem.getValue()) == false) { + trim = true; + } + + } + } + + if (trim == false) { + fList.add(item); + } + } + } + + /** + * Checks the filtered list if group group with a specific subgroup exists + * + * @param subgroup + * @return true if the group group exists, else elsewhere + */ + public boolean checkSubGroup(String subgroup) { + for (GroupItem item : fList) { + if (item.subgroup.equals(subgroup)) { + return true; + } + } + + return false; + } + + /** + * Loads groups of groups information from an avro file + *

+ * This method loads groups of groups information contained in an .avro file + * with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. type: string (describes the type of grouping)
  2. + *
  3. group: string
  4. + *
  5. subgroup: string
  6. + *
  7. tags: hashmap (contains a map of arbitrary key values)
  8. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) avroRow.get("tags"); + if (tags != null) { + for (Object item : tags.keySet()) { + tagMap.put(item.toString(), tags.get(item).toString()); + } + } + + // Grab 1st level mandatory fields + String type = avroRow.get("type").toString(); + String group = avroRow.get("group").toString(); + String subgroup = avroRow.get("subgroup").toString(); + + // Insert data to list + this.insert(type, group, subgroup, tagMap); + + } // end of avro rows + + this.unfilter(); + + dataFileReader.close(); + + } catch (IOException ex) { + log.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + /** + * Loads group of group information from a list of GroupGroup objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List ggp) { + + // For each group of groups record + for (GroupGroup item : ggp) { + String type = item.getType(); + String group = item.getGroup(); + String subgroup = item.getSubgroup(); + + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(type, group, subgroup, tagMap); + } + + this.unfilter(); + + } + + public void loadGroupGroupProfile(JsonArray element) throws IOException { + GroupGroup[] groupGroups = readJson(element); + loadFromList(Arrays.asList(groupGroups)); + } + + /** + * reads from a JsonElement array and stores the necessary information to + * the GroupGroup objects and add them to the list + * + * @param jElement , a JsonElement containing the topology group endpoint + * profiles data + * @return + */ + public GroupGroup[] readJson(JsonArray jElement) { + List results = new ArrayList<>(); + + for (int i = 0; i < jElement.size(); i++) { + JsonObject jItem = jElement.get(i).getAsJsonObject(); + String group = jItem.get("group").getAsString(); + String gType = jItem.get("type").getAsString(); + String subgroup = jItem.get("subgroup").getAsString(); + JsonObject jTags = jItem.get("tags").getAsJsonObject(); + Map tags = new HashMap(); + for (Entry kv : jTags.entrySet()) { + tags.put(kv.getKey(), kv.getValue().getAsString()); + } + GroupGroup gg = new GroupGroup(gType, group, subgroup, tags); + results.add(gg); + } + + GroupGroup[] rArr = new GroupGroup[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + + public ArrayList getList() { + return list; + } + + public void setList(ArrayList list) { + this.list = list; + } + + public ArrayList getfList() { + return fList; + } + + public void setfList(ArrayList fList) { + this.fList = fList; + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileManager.java new file mode 100644 index 00000000..aee9f43a --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileManager.java @@ -0,0 +1,367 @@ +package argo.profiles; + +import argo.avro.MetricProfile; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.util.Utf8; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +public class MetricProfileManager implements Serializable { + + private static final Logger LOG = Logger.getLogger(MetricProfileManager.class.getName()); + + private ArrayList list; + private Map>> index; + private HashMap profileNameIdMap; + + private class ProfileItem implements Serializable{ + + String profile; // Name of the profile + String service; // Name of the service type + String metric; // Name of the metric + HashMap tags; // Tag list + + public ProfileItem() { + // Initializations + this.profile = ""; + this.service = ""; + this.metric = ""; + this.tags = new HashMap(); + } + + public ProfileItem(String profile, String service, String metric, HashMap tags) { + this.profile = profile; + this.service = service; + this.metric = metric; + this.tags = tags; + } + + public String getProfile() { + return profile; + } + + public void setProfile(String profile) { + this.profile = profile; + } + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getMetric() { + return metric; + } + + public void setMetric(String metric) { + this.metric = metric; + } + + public HashMap getTags() { + return tags; + } + + public void setTags(HashMap tags) { + this.tags = tags; + } + + } + + public MetricProfileManager() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + // Clear all profile data (both list and indexes) + public void clear() { + this.list = new ArrayList(); + this.index = new HashMap>>(); + } + + // Indexed List Functions + public int indexInsertProfile(String profile) { + if (!index.containsKey(profile)) { + index.put(profile, new HashMap>()); + return 0; + } + return -1; + } + + public void insert(String profile, String service, String metric, HashMap tags) { + ProfileItem tmpProfile = new ProfileItem(profile, service, metric, tags); + this.list.add(tmpProfile); + this.indexInsertMetric(profile, service, metric); + } + + public int indexInsertService(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return -1; + } else { + index.get(profile).put(service, new ArrayList()); + return 0; + } + + } + + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + return 0; + + } + + public int indexInsertMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + // Metric exists so no insertion + return -1; + } + // Metric doesn't exist and must be added + index.get(profile).get(service).add(metric); + return 0; + } else { + // Create the service and the metric + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + } + + } + // No profile - service - metric so add them all + index.put(profile, new HashMap>()); + index.get(profile).put(service, new ArrayList()); + index.get(profile).get(service).add(metric); + return 0; + + } + + // Getter Functions + public ArrayList getProfileServices(String profile) { + if (index.containsKey(profile)) { + ArrayList ans = new ArrayList(); + ans.addAll(index.get(profile).keySet()); + return ans; + } + return null; + + } + + public ArrayList getProfiles() { + if (index.size() > 0) { + ArrayList ans = new ArrayList(); + ans.addAll(index.keySet()); + return ans; + } + return null; + } + + public ArrayList getProfileServiceMetrics(String profile, String service) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + return index.get(profile).get(service); + } + } + return null; + } + + public boolean checkProfileServiceMetric(String profile, String service, String metric) { + if (index.containsKey(profile)) { + if (index.get(profile).containsKey(service)) { + if (index.get(profile).get(service).contains(metric)) { + return true; + } + } + } + + return false; + } + + /** + * Loads metric profile information from an avro file + *

+ * This method loads metric profile information contained in an .avro file + * with specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. profile: string
  2. + *
  3. service: string
  4. + *
  5. metric: string
  6. + *
  7. [optional] tags: hashmap (contains a map of arbitrary key values) + *
  8. + *
+ * + * @param avroFile a File object of the avro file that will be opened + * @throws IOException if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String profile = avroRow.get("profile").toString(); + String service = avroRow.get("service").toString(); + String metric = avroRow.get("metric").toString(); + + // Insert data to list + this.insert(profile, service, metric, tagMap); + + } // end of avro rows + + dataFileReader.close(); + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + /** + * Loads metric profile information from a list of MetricProfile objects + * + */ + @SuppressWarnings("unchecked") + public void loadFromList(List mps) { + + // For each metric profile object in list + for (MetricProfile item : mps) { + String profile = item.getProfile(); + String service = item.getService(); + String metric = item.getMetric(); + HashMap tagMap = new HashMap(); + HashMap tags = (HashMap) item.getTags(); + + if (tags != null) { + for (String key : tags.keySet()) { + tagMap.put(key, tags.get(key)); + } + } + + // Insert data to list + this.insert(profile, service, metric, tagMap); + } + + } + + public void loadMetricProfile(JsonElement element) throws IOException, org.json.simple.parser.ParseException { + MetricProfile[] metrics = readJson(element); + loadFromList(Arrays.asList(metrics)); + } + + public MetricProfile[] readJson(JsonElement jElement) { + List results = new ArrayList(); +// if (!this.data.containsKey(ApiResource.METRIC)) { +// MetricProfile[] rArr = new MetricProfile[results.size()]; +// rArr = results.toArray(rArr); +// return rArr; +// } +// + +// String content = this.data.get(ApiResource.METRIC); +// JsonParser jsonParser = new JsonParser(); +// JsonElement jElement = jsonParser.parse(content); + JsonObject jRoot = jElement.getAsJsonObject(); + String profileName = jRoot.get("name").getAsString(); + JsonArray jElements = jRoot.get("services").getAsJsonArray(); + for (int i = 0; i < jElements.size(); i++) { + JsonObject jItem = jElements.get(i).getAsJsonObject(); + String service = jItem.get("service").getAsString(); + JsonArray jMetrics = jItem.get("metrics").getAsJsonArray(); + for (int j = 0; j < jMetrics.size(); j++) { + String metric = jMetrics.get(j).getAsString(); + + Map tags = new HashMap(); + MetricProfile mp = new MetricProfile(profileName, service, metric, tags); + results.add(mp); + } + + } + + MetricProfile[] rArr = new MetricProfile[results.size()]; + rArr = results.toArray(rArr); + return rArr; + } + + public boolean containsMetric(String service, String metric) { + for (ProfileItem profIt : list) { + if (profIt.service.equals(service) && profIt.metric.equals(metric)) { + return true; + } + } + return false; + } + + public ArrayList getList() { + return list; + } + + public void setList(ArrayList list) { + this.list = list; + } + + public Map>> getIndex() { + return index; + } + + public void setIndex(Map>> index) { + this.index = index; + } + + public String getMetricProfileName(Integer id){ + + return this.profileNameIdMap.get(id); + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileParser.java index 224a87eb..601f2fb7 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/MetricProfileParser.java @@ -17,9 +17,6 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia - * * MetricProfileParser, collects data as described in the json received from web * api metric profiles request */ @@ -113,45 +110,6 @@ public void readApiRequestResult(){ } } } - - public boolean containsMetric(String service, String metric){ - - if(metricData.get(service)!=null && metricData.get(service).contains(metric)){ - return true; - } - return false; - } - - public JSONObject getJsonObject() { - return jsonObject; - } - - public void setJsonObject(JSONObject jsonObject) { - this.jsonObject = jsonObject; - } - - public String getId() { - return id; - } - - public String getDate() { - return date; - } - - public String getName() { - return name; - } - public String getDescription() { - return description; - } - - public ArrayList getServices() { - return services; - } - - public HashMap> getMetricData() { - return metricData; - } } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/OperationsParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/OperationsParser.java index e18c4625..46470abb 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/OperationsParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/OperationsParser.java @@ -5,7 +5,6 @@ */ package argo.profiles; -import com.google.gson.Gson; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; @@ -79,8 +78,6 @@ public OperationsParser(boolean _order) { this.truthTable = null; } - - public String getDefaultDown() { return this.defaultDownState; } @@ -100,7 +97,6 @@ public int getDefaultDownInt() { public String getDefaultMissing() { return this.defaultMissingState; } - public int getDefaultMissingInt() { return this.getIntStatus(this.defaultMissingState); } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java index 7c452306..bbdd3b3d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java @@ -4,50 +4,75 @@ * and open the template in the editor. */ package argo.profiles; + import argo.utils.RequestManager; +import com.google.gson.JsonArray; import com.google.gson.JsonElement; import java.io.IOException; import org.apache.flink.api.java.utils.ParameterTool; -import org.json.simple.JSONObject; import org.json.simple.parser.ParseException; /** * * @author cthermolia - * - * ProfilesLoader, loads all the parser that will be used to collect the information from the web api + * + * ProfilesLoader, loads all the parser that will be used to collect the + * information from the web api */ public class ProfilesLoader { private ReportParser reportParser; - private TopologyEndpointParser topologyEndpointParser; - private MetricProfileParser metricProfileParser; + private EndpointGroupManager topologyEndpointParser; + private MetricProfileManager metricProfileParser; + private OperationsParser operationParser; - private AggregationProfileParser aggregationProfileParser; - private TopologyGroupParser topolGroupParser; + private AggregationProfileManager aggregationProfileParser; +// private TopologyGroupParser topolGroupParser; + private GroupGroupManager topolGroupParser; + + private String aggregationId; + private String metricId; + private String operationsId; public ProfilesLoader() { } - public ProfilesLoader(ParameterTool params) throws IOException, ParseException { reportParser = new ReportParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("reportId")); String[] reportInfo = reportParser.getTenantReport().getInfo(); - topolGroupParser = new TopologyGroupParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("date"),reportInfo[0]); - topologyEndpointParser = new TopologyEndpointParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("date"),reportInfo[0]); - - String aggregationId = reportParser.getAggregationReportId(); - String metricId = reportParser.getMetricReportId(); - String operationsId = reportParser.getOperationReportId(); - - aggregationProfileParser = new AggregationProfileParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), aggregationId, params.get("date")); - metricProfileParser = new MetricProfileParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), metricId, params.get("date")); - JsonElement opProfileJson=RequestManager.operationsProfileRequest(params.getRequired("apiUri"), operationsId, params.getRequired("key"), params.get("proxy"), params.get("date")); - + //topolGroupParser = new TopologyGroupParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("date"), reportInfo[0]); + + aggregationId = reportParser.getAggregationReportId(); + metricId = reportParser.getMetricReportId(); + operationsId = reportParser.getOperationReportId(); + + JsonElement opProfileJson = RequestManager.operationsProfileRequest(params.getRequired("apiUri"), operationsId, params.getRequired("key"), params.get("proxy"), params.get("date")); + operationParser = new OperationsParser(); operationParser.readJson(opProfileJson); + JsonElement metricProfileJson = RequestManager.metricProfileRequest(params.getRequired("apiUri"), metricId, params.getRequired("key"), params.get("proxy"), params.get("date")); + + metricProfileParser = new MetricProfileManager(); + metricProfileParser.loadMetricProfile(metricProfileJson); + + JsonElement aggregationProfileJson = RequestManager.aggregationProfileRequest(params.getRequired("apiUri"), aggregationId, params.getRequired("key"), params.get("proxy"), params.get("date")); + + aggregationProfileParser = new AggregationProfileManager(); + aggregationProfileParser.readJson(aggregationProfileJson); + + + JsonArray endpointGroupProfileJson = RequestManager.endpointGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportInfo[0],params.get("date")); + + topologyEndpointParser = new EndpointGroupManager(); + topologyEndpointParser.loadGroupEndpointProfile(endpointGroupProfileJson); + + + JsonArray groupGroupProfileJson = RequestManager.groupGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportInfo[0],params.get("date")); + + topolGroupParser = new GroupGroupManager(); + topolGroupParser.loadGroupGroupProfile(groupGroupProfileJson); } public ReportParser getReportParser() { @@ -58,19 +83,19 @@ public void setReportParser(ReportParser reportParser) { this.reportParser = reportParser; } - public TopologyEndpointParser getTopologyEndpointParser() { + public EndpointGroupManager getTopologyEndpointParser() { return topologyEndpointParser; } - public void setTopologyEndpointParser(TopologyEndpointParser topologyEndpointParser) { + public void setTopologyEndpointParser(EndpointGroupManager topologyEndpointParser) { this.topologyEndpointParser = topologyEndpointParser; } - public MetricProfileParser getMetricProfileParser() { + public MetricProfileManager getMetricProfileParser() { return metricProfileParser; } - public void setMetricProfileParser(MetricProfileParser metricProfileParser) { + public void setMetricProfileParser(MetricProfileManager metricProfileParser) { this.metricProfileParser = metricProfileParser; } @@ -82,21 +107,52 @@ public void setOperationParser(OperationsParser operationParser) { this.operationParser = operationParser; } - public AggregationProfileParser getAggregationProfileParser() { + public AggregationProfileManager getAggregationProfileParser() { return aggregationProfileParser; } - - public void setAggregationProfileParser(AggregationProfileParser aggregationProfileParser) { + public void setAggregationProfileParser(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; } - public TopologyGroupParser getTopolGroupParser() { +// public TopologyGroupParser getTopolGroupParser() { +// return topolGroupParser; +// } +// +// public void setTopolGroupParser(TopologyGroupParser topolGroupParser) { +// this.topolGroupParser = topolGroupParser; +// } + + public GroupGroupManager getTopolGroupParser() { return topolGroupParser; } - public void setTopolGroupParser(TopologyGroupParser topolGroupParser) { + public void setTopolGroupParser(GroupGroupManager topolGroupParser) { this.topolGroupParser = topolGroupParser; } + public String getAggregationId() { + return aggregationId; + } + + public void setAggregationId(String aggregationId) { + this.aggregationId = aggregationId; + } + + public String getMetricId() { + return metricId; + } + + public void setMetricId(String metricId) { + this.metricId = metricId; + } + + public String getOperationsId() { + return operationsId; + } + + public void setOperationsId(String operationsId) { + this.operationsId = operationsId; + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ReportParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ReportParser.java index eecfe50b..1a6ecb87 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ReportParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ReportParser.java @@ -14,9 +14,6 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia - * * ReportParser, collects data as described in the json received from web api report request, that corresponds to the specific tenant */ public class ReportParser { @@ -101,18 +98,6 @@ public void readApiRequestResult(){ } -// public String getProfileId(String profileName) { -// ArrayList profiles = tenantReport.getProfiles(); -// if (profiles != null) { -// for (Profiles profile : profiles) { -// if (profile.getType().equalsIgnoreCase(profileName)) { -// return profile.id; -// } -// } -// } -// return null; -// } - public String getAggregationReportId() { ArrayList profiles = tenantReport.getProfiles(); if (profiles != null) { diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyEndpointParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyEndpointParser.java index 80f91807..7d8ba35d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyEndpointParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyEndpointParser.java @@ -17,10 +17,7 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia - * - * * TopologyEndpointParser, collects data as described in the json received + * TopologyEndpointParser, collects data as described in the json received * from web api topology endpoint request */ public class TopologyEndpointParser implements Serializable{ @@ -29,17 +26,13 @@ public class TopologyEndpointParser implements Serializable{ private HashMap> topologyEndpoint=new HashMap<>(); private final String url = "/topology/endpoints/by_report"; -// private final String url = "/topology/endpoints"; private JSONObject jsonObject; public TopologyEndpointParser() { } public TopologyEndpointParser(String apiUri, String key, String proxy, String date, String reportname) throws IOException, ParseException { - // by_report/{report-name}?date=YYYY-MM-DD String uri = apiUri + url + "/" + reportname; - // String uri = apiUri + url; - if (date != null) { uri = uri + "?date=" + date; } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyGroupParser.java b/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyGroupParser.java index ae4edfed..60c45ae1 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyGroupParser.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/TopologyGroupParser.java @@ -17,9 +17,6 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia - * * TopologyGroupParser, collects data as described in the json received from web * api topology group request */ diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/EnumStatus.java b/flink_jobs/status_trends/src/main/java/argo/utils/EnumStatus.java index d240d336..cce1fce3 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/EnumStatus.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/EnumStatus.java @@ -5,10 +5,6 @@ */ package argo.utils; -/** - * - * @author cthermolia - */ public enum EnumStatus { CRITICAL, WARNING, diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java index 48f1be4f..ebd51c67 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java @@ -14,15 +14,10 @@ import org.apache.http.client.fluent.Request; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; -/** - * - * @author cthermolia - */ public class RequestManager { public static JSONObject request(String uri, String key, String proxy) throws ParseException { @@ -66,8 +61,8 @@ public static JsonElement callRequest(String uri, String key, String proxy) thro Executor executor = Executor.newInstance(httpClient); content = executor.execute(request).returnContent().asString(); - JsonParser parser = new JsonParser(); - j_element = (JsonElement) parser.parse(content); + JsonParser parser = new JsonParser(); + j_element = (JsonElement) parser.parse(content); } catch (IOException e) { // TODO Auto-generated catch block @@ -84,18 +79,64 @@ public static JsonElement operationsProfileRequest(String apiUri, String operati } else { uri = uri + "?date=" + dateStr; } - return loadOperationProfile(uri, key, proxy); + + return loadProfile(uri, key, proxy).get(0); + } + +// public static JsonElement loadOperationProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { +// JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); +// JsonObject jsonObj=jsonElement.getAsJsonObject(); +// JsonArray dataObj = jsonObj.getAsJsonArray("data"); +// JsonElement dataElement=dataObj.get(0); +// +// +// return dataElement; +// } + public static JsonElement metricProfileRequest(String apiUri, String metricId, String key, String proxy, String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/metric_profiles" + "/" + metricId; + if (dateStr != null) { + uri = uri + "?date=" + dateStr; + } + return loadProfile(uri, key, proxy).get(0); + + } + + public static JsonElement aggregationProfileRequest(String apiUri, String aggregationId, String key, String proxy, String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/aggregation_profiles" + "/" + aggregationId; + if (dateStr != null) { + uri = uri + "?date=" + dateStr; + } + return loadProfile(uri, key, proxy).get(0); } + public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname,String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/topology/endpoints/by_report" + "/" + reportname; + if (dateStr != null) { + uri = uri + "?date=" + dateStr; + } + return loadProfile(uri, key, proxy); - public static JsonElement loadOperationProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { + } + public static JsonArray groupGroupProfileRequest(String apiUri, String key, String proxy, String reportname,String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/topology/groups/by_report" + "/" + reportname; + if (dateStr != null) { + uri = uri + "?date=" + dateStr; + } + return loadProfile(uri, key, proxy); + + } + + public static JsonArray loadProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); - JsonObject jsonObj=jsonElement.getAsJsonObject(); + JsonObject jsonObj = jsonElement.getAsJsonObject(); JsonArray dataObj = jsonObj.getAsJsonArray("data"); - JsonElement dataElement=dataObj.get(0); - - - return dataElement; + //JsonElement dataElement = dataObj.get(0); + + return dataObj; } } diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java b/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java index abf810a8..a8aa853c 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java @@ -22,25 +22,9 @@ import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -/** - * - * @author cthermolia - */ public class Utils { static Logger LOG = LoggerFactory.getLogger(Utils.class); - // String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; - -// public static String getParameterDate(String format, String paramDate) throws ParseException { -// DateTime date = convertStringtoDate(format, paramDate); -// -// DateTime dt = new DateTime(date); -// DateTimeFormatter dtf = DateTimeFormat.forPattern(format); -// String profileDate = dt.toString(dtf); -// -// return profileDate; -// -// } public static String convertDateToString(String format, DateTime date) throws ParseException { @@ -52,14 +36,6 @@ public static String convertDateToString(String format, DateTime date) throws Pa } public static DateTime convertStringtoDate(String format, String dateStr) throws ParseException { - - // String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; -// SimpleDateFormat sdf = new SimpleDateFormat(format); -// sdf.setTimeZone(TimeZone.getDefault()); -// Calendar cal = Calendar.getInstance(); -// cal.setTime(sdf.parse(dateStr)); -// cal.set(Calendar.MILLISECOND, 0); -// return new DateTime(cal.getTime()); DateTimeFormatter formatter = DateTimeFormat.forPattern(format); DateTime dt = formatter.parseDateTime(dateStr); diff --git a/flink_jobs/status_trends/src/main/java/timelines/TimelineAggregator.java b/flink_jobs/status_trends/src/main/java/timelines/TimelineAggregator.java index 2795399b..29d856d3 100644 --- a/flink_jobs/status_trends/src/main/java/timelines/TimelineAggregator.java +++ b/flink_jobs/status_trends/src/main/java/timelines/TimelineAggregator.java @@ -1,7 +1,5 @@ package timelines; - - import java.text.ParseException; import java.util.HashMap; import java.util.Map; @@ -12,7 +10,6 @@ import org.joda.time.LocalDate; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import scala.concurrent.ops; public class TimelineAggregator { private Timeline output; From 0a4efb349e9ae765c36403e4dcc0822c2ff493f1 Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Thu, 17 Jun 2021 09:03:31 +0300 Subject: [PATCH 003/112] ARGO-3206 Create Report Profile Parser independent Component --- .gitignore | 1 - flink_jobs/ProfilesManager/.gitignore | 8 + .../profilesmanager/MetricProfileManager.java | 5 +- .../java/profilesmanager/ReportManager.java | 336 +++++++++++++ .../src/main/resources/profiles/report.json | 51 ++ .../profilesmanager/ReportManagerTest.java | 240 +++++++++ .../src/main/java/ops/ConfigManager.java | 471 +++++++++++------- .../batch/BatchEndpointFlipFlopTrends.java | 4 +- .../argo/batch/BatchFlipFlopCollection.java | 47 +- .../argo/batch/BatchGroupFlipFlopTrends.java | 32 +- .../argo/batch/BatchMetricFlipFlopTrends.java | 35 +- .../batch/BatchServiceFlipFlopTrends.java | 4 +- .../java/argo/batch/BatchStatusTrends.java | 46 +- .../calctimelines/ServiceFilter.java | 14 +- .../calctrends/CalcServiceFlipFlop.java | 27 +- .../java/argo/profiles/ProfilesLoader.java | 52 +- .../java/argo/profiles/ReportManager.java | 336 +++++++++++++ .../main/java/argo/utils/RequestManager.java | 33 +- 18 files changed, 1411 insertions(+), 331 deletions(-) create mode 100644 flink_jobs/ProfilesManager/.gitignore create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/ReportManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/report.json create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/ReportManagerTest.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/ReportManager.java diff --git a/.gitignore b/.gitignore index 52f175f5..e259e829 100644 --- a/.gitignore +++ b/.gitignore @@ -24,4 +24,3 @@ conf.cfg .project .settings/ -/flink_jobs/ProfilesManager/target/ diff --git a/flink_jobs/ProfilesManager/.gitignore b/flink_jobs/ProfilesManager/.gitignore new file mode 100644 index 00000000..6c4e323f --- /dev/null +++ b/flink_jobs/ProfilesManager/.gitignore @@ -0,0 +1,8 @@ +/target/ +.project +.settings/ +.classpath/ +.classpath +/nbproject +nbactions.xml + diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java index bfe0dea0..c084ebc4 100644 --- a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java @@ -1,7 +1,7 @@ package profilesmanager; import argo.avro.MetricProfile; -import com.google.gson.Gson; + import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; @@ -397,8 +397,7 @@ public MetricProfile[] readJson(JsonElement jElement) { } /** - * Checks if a combination of service, metric exists ing the list of - * ProfileItems + * Checks if a combination of service, metric exists in the list of ProfileItems * * @param service, a service * @param metric, a metric diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ReportManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ReportManager.java new file mode 100644 index 00000000..02158466 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ReportManager.java @@ -0,0 +1,336 @@ +package profilesmanager; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.List; +import java.util.TreeMap; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.util.ArrayList; +import java.util.Iterator; + +/** + * + * + * ReportManager class implements objects that store the information parsed from + * a json object containing the profile data of a tenant's report or loaded from + * a json file + */ + +public class ReportManager { + + private static final Logger LOG = Logger.getLogger(ReportManager.class.getName()); + + public String id; // report uuid reference + public String report; + public String tenant; + public String egroup; // endpoint group + public String ggroup; // group of groups + public String weight; // weight factor type + public TreeMap egroupTags; + public TreeMap ggroupTags; + public TreeMap mdataTags; + private Threshold threshold; + ArrayList profiles; + + /** + * Constructor to a ReportManager object to store the tenant's report + * information + */ + public ReportManager() { + this.report = null; + this.id = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.egroupTags = new TreeMap(); + this.ggroupTags = new TreeMap(); + this.mdataTags = new TreeMap(); + this.profiles = new ArrayList<>(); + + } + + /** + * Clears the stored data of a ReportManager object + */ + public void clear() { + this.id = null; + this.report = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.threshold = null; + this.egroupTags.clear(); + this.ggroupTags.clear(); + this.mdataTags.clear(); + this.profiles.clear(); + + } + + public String getReportID() { + return id; + } + + public String getReport() { + return report; + } + + public String getTenant() { + return tenant; + } + + public String getEgroup() { + return egroup; + } + + /** + * loads from a json file that contain the tenant's report , and stores the + * info to the corresponding fields + * + * @param jsonFile + * @throws IOException + */ + public void loadJson(File jsonFile) throws IOException { + // Clear data + this.clear(); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser jsonParser = new JsonParser(); + JsonElement jElement = jsonParser.parse(br); + readJson(jElement); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + /** + * Loads Report config information from a config json string + * + */ + public void loadJsonString(List confJson) throws JsonParseException { + // Clear data + this.clear(); + + try { + + JsonParser jsonParser = new JsonParser(); + // Grab the first - and only line of json from ops data + JsonElement jElement = jsonParser.parse(confJson.get(0)); + readJson(jElement); + + } catch (JsonParseException ex) { + LOG.error("Not valid json contents"); + throw ex; + } + + } + + /** + * reads from a JsonElement array and stores the necessary information to + * the ReportManager objects and add them to the list + * + * @param jElement , a JsonElement containing the tenant's report data + * @return + */ + public void readJson(JsonElement jElement) { + + JsonObject jObj = jElement.getAsJsonObject(); + // Get the simple fields + this.id = jObj.get("id").getAsString(); + this.tenant = jObj.get("tenant").getAsString(); + this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); + + // get topology schema names + JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); + this.ggroup = topoGroup.get("type").getAsString(); + this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); + + // optional weight filtering + this.weight = ""; + if (jObj.has("weight")) { + this.weight = jObj.get("weight").getAsString(); + } + // Get compound fields + JsonArray jTags = jObj.getAsJsonArray("filter_tags"); + + // Iterate tags + if (jTags != null) { + for (JsonElement tag : jTags) { + JsonObject jTag = tag.getAsJsonObject(); + String name = jTag.get("name").getAsString(); + String value = jTag.get("value").getAsString(); + String ctx = jTag.get("context").getAsString(); + if (ctx.equalsIgnoreCase("group_of_groups")) { + this.ggroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("endpoint_groups")) { + this.egroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("metric_data")) { + this.mdataTags.put(name, value); + } + + } + } + + JsonObject thresholdsObject = jObj.get("thresholds").getAsJsonObject(); + + this.threshold = new Threshold(thresholdsObject.get("availability").getAsLong(), thresholdsObject.get("reliability").getAsLong(), thresholdsObject.get("uptime").getAsDouble(), + thresholdsObject.get("unknown").getAsDouble(), thresholdsObject.get("downtime").getAsDouble()); + + JsonArray profilesArray = jObj.get("profiles").getAsJsonArray(); + + Iterator profileIter = profilesArray.iterator(); + while (profileIter.hasNext()) { + JsonObject profileObject = profileIter.next().getAsJsonObject(); + Profiles profile = new Profiles(profileObject.get("id").getAsString(), profileObject.get("name").getAsString(), profileObject.get("type").getAsString()); + profiles.add(profile); + } + + } + + /** + * Threshold class is an inner class that stores the info of the thresholds as described from the tenant's report + */ + public class Threshold { + + private Long availability; + private Long reliability; + private Double uptime; + private Double unknown; + private Double downtime; + + public Threshold(Long availability, Long reliability, Double uptime, Double unknown, Double downtime) { + this.availability = availability; + this.reliability = reliability; + this.uptime = uptime; + this.unknown = unknown; + this.downtime = downtime; + } + + public Long getAvailability() { + return availability; + } + + public Long getReliability() { + return reliability; + } + + public Double getUptime() { + return uptime; + } + + public Double getUnknown() { + return unknown; + } + + public Double getDowntime() { + return downtime; + } + + } +/** + * Profiles class is a class that stores the info of the profiles as described from the tenant's report + */ + private class Profiles { + + private String id; + private String name; + private String type; + + public Profiles(String id, String name, String type) { + this.id = id; + this.name = name; + this.type = type; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + } +/** + * Returns the aggregation's report id , as described in the profiles of tenant's report + * @return + */ + public String getAggregationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.AGGREGATION.name())) { + return profile.id; + } + } + } + return null; + } + + + /** + * Returns the metric's report id , as described in the profiles of tenant's report + * @return + */ + public String getMetricReportId() { + + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.METRIC.name())) { + return profile.id; + } + } + } + return null; + } + + /** + * Returns the operation's report id , as described in the profiles of tenant's report + * @return + */ + public String getOperationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.OPERATIONS.name())) { + return profile.id; + } + } + } + return null; + } + + public enum ProfileType { + + METRIC, + AGGREGATION, + OPERATIONS + + } +} diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/report.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/report.json new file mode 100644 index 00000000..7d22e51b --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/report.json @@ -0,0 +1,51 @@ + { + "id": "04edb428-01e6-4286-87f1-050546736f7c", + "tenant": "EGI", + "disabled": false, + "info": { + "name": "SLA", + "description": "A/R report based on mapping SLAs to EGI service groups", + "created": "2018-09-28 15:08:48", + "updated": "2021-07-15 13:35:50" + }, + "thresholds": { + "availability": 80, + "reliability": 85, + "uptime": 0.8, + "unknown": 0.1, + "downtime": 0.1 + }, + "topology_schema": { + "group": { + "type": "PROJECT", + "group": { + "type": "SERVICEGROUPS" + } + } + }, + "profiles": [ + { + "id": "5850d7fe-75f5-4aa4-bacc-9a5c10280b59", + "name": "ARGO_MON_CRITICAL", + "type": "metric" + }, + { + "id": "00272336-7199-4ea4-bbe9-043aca02838c", + "name": "sla_test", + "type": "aggregation" + }, + { + "id": "8ce59c4d-3761-4f25-a364-f019e394bf8b", + "name": "egi_ops", + "type": "operations" + } + ], + "filter_tags": [ + { + "name": "scope", + "value": "SLA", + "context": "argo.group.filter.tags.array" + } + ] + } + \ No newline at end of file diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ReportManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ReportManagerTest.java new file mode 100644 index 00000000..219ea052 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ReportManagerTest.java @@ -0,0 +1,240 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import com.google.gson.JsonElement; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import static junit.framework.Assert.assertNotNull; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + */ +public class ReportManagerTest { + + public ReportManagerTest() { + assertNotNull("Test file missing", ReportManagerTest.class.getResource("/profiles/report.json")); + } + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of clear method, of class ReportManager. + */ + @Test + public void testClear() throws IOException { + System.out.println("clear"); + ReportManager instance = new ReportManager(); + // JsonElement jsonElement = loadJson(new File(ReportManager.class.getResource("/profiles/report.json").getFile())); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + instance.clear(); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getReportID method, of class ReportManager. + */ + @Test + public void testGetReportID() throws IOException { + System.out.println("getReportID"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "04edb428-01e6-4286-87f1-050546736f7c"; + String result = instance.getReportID(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getReport method, of class ReportManager. + */ + @Test + public void testGetReport() throws IOException { + System.out.println("getReport"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "SLA"; + String result = instance.getReport(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getTenant method, of class ReportManager. + */ + @Test + public void testGetTenant() throws IOException { + System.out.println("getTenant"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "EGI"; + String result = instance.getTenant(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getEgroup method, of class ReportManager. + */ + @Test + public void testGetEgroup() throws IOException { + System.out.println("getEgroup"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "SERVICEGROUPS"; + String result = instance.getEgroup(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of loadJson method, of class ReportManager. + */ + @Test + public void testLoadJson() throws Exception { + System.out.println("loadJson"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of loadJsonString method, of class ReportManager. + */ +// @Test +// public void testLoadJsonString() { +// System.out.println("loadJsonString"); +// List confJson = null; +// ReportManager instance = new ReportManager(); +// instance.loadJsonString(confJson); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of readJson method, of class ReportManager. + */ + @Test + public void testReadJson() throws IOException { + System.out.println("readJson"); + JsonElement jElement = loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + ReportManager instance = new ReportManager(); + instance.readJson(jElement); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getAggregationReportId method, of class ReportManager. + */ + @Test + public void testGetAggregationReportId() throws IOException { + System.out.println("getAggregationReportId"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "00272336-7199-4ea4-bbe9-043aca02838c"; + String result = instance.getAggregationReportId(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getMetricReportId method, of class ReportManager. + */ + @Test + public void testGetMetricReportId() throws IOException { + System.out.println("getMetricReportId"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "5850d7fe-75f5-4aa4-bacc-9a5c10280b59"; + String result = instance.getMetricReportId(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getOperationReportId method, of class ReportManager. + */ + @Test + public void testGetOperationReportId() throws IOException { + System.out.println("getOperationReportId"); + ReportManager instance = new ReportManager(); + instance.loadJson(new File(ReportManagerTest.class.getResource("/profiles/report.json").getFile())); + + String expResult = "8ce59c4d-3761-4f25-a364-f019e394bf8b"; + String result = instance.getOperationReportId(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + private JsonElement loadJson(File jsonFile) throws IOException { + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); +// JsonObject jRoot = j_element.getAsJsonObject(); +// JsonArray jData = jRoot.get("data").getAsJsonArray(); +// JsonElement jItem = jData.get(0); + + return j_element; + } catch (FileNotFoundException ex) { + // LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + // LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + +} diff --git a/flink_jobs/batch_ar/src/main/java/ops/ConfigManager.java b/flink_jobs/batch_ar/src/main/java/ops/ConfigManager.java index bdc3069a..57cb0c9b 100644 --- a/flink_jobs/batch_ar/src/main/java/ops/ConfigManager.java +++ b/flink_jobs/batch_ar/src/main/java/ops/ConfigManager.java @@ -16,181 +16,308 @@ import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; +import java.util.ArrayList; +import java.util.Iterator; public class ConfigManager { - private static final Logger LOG = Logger.getLogger(ConfigManager.class.getName()); - - public String id; // report uuid reference - public String report; - public String tenant; - public String egroup; // endpoint group - public String ggroup; // group of groups - public String weight; // weight factor type - public TreeMap egroupTags; - public TreeMap ggroupTags; - public TreeMap mdataTags; - - public ConfigManager() { - this.report = null; - this.id = null; - this.tenant = null; - this.egroup = null; - this.ggroup = null; - this.weight = null; - this.egroupTags = new TreeMap(); - this.ggroupTags = new TreeMap(); - this.mdataTags = new TreeMap(); - - } - - public void clear() { - this.id = null; - this.report = null; - this.tenant = null; - this.egroup = null; - this.ggroup = null; - this.weight = null; - this.egroupTags.clear(); - this.ggroupTags.clear(); - this.mdataTags.clear(); - - } - - public String getReportID() { - return id; - } - - public String getReport() { - return report; - } - - public String getTenant() { - return tenant; - } - - - public String getEgroup() { - return egroup; - } - - public void loadJson(File jsonFile) throws IOException { - // Clear data - this.clear(); - - BufferedReader br = null; - try { - br = new BufferedReader(new FileReader(jsonFile)); - - JsonParser jsonParser = new JsonParser(); - JsonElement jElement = jsonParser.parse(br); - JsonObject jObj = jElement.getAsJsonObject(); - // Get the simple fields - this.id = jObj.get("id").getAsString(); - this.tenant = jObj.get("tenant").getAsString(); - this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); - - // get topology schema names - JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); - this.ggroup = topoGroup.get("type").getAsString(); - this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); - - // optional weight filtering - this.weight = ""; - if (jObj.has("weight")){ - this.weight = jObj.get("weight").getAsString(); - } - // Get compound fields - JsonArray jTags = jObj.getAsJsonArray("filter_tags"); - - // Iterate tags - if (jTags != null) { - for (JsonElement tag : jTags) { - JsonObject jTag = tag.getAsJsonObject(); - String name = jTag.get("name").getAsString(); - String value = jTag.get("value").getAsString(); - String ctx = jTag.get("context").getAsString(); - if (ctx.equalsIgnoreCase("group_of_groups")){ - this.ggroupTags.put(name, value); - } else if (ctx.equalsIgnoreCase("endpoint_groups")){ - this.egroupTags.put(name, value); - } else if (ctx.equalsIgnoreCase("metric_data")) { - this.mdataTags.put(name, value); - } - - } - } - - - } catch (FileNotFoundException ex) { - LOG.error("Could not open file:" + jsonFile.getName()); - throw ex; - - } catch (JsonParseException ex) { - LOG.error("File is not valid json:" + jsonFile.getName()); - throw ex; - } finally { - // Close quietly without exceptions the buffered reader - IOUtils.closeQuietly(br); - } - - } - - - /** - * Loads Report config information from a config json string - * - */ - public void loadJsonString(List confJson) throws JsonParseException { - // Clear data - this.clear(); - - try { - - JsonParser jsonParser = new JsonParser(); - // Grab the first - and only line of json from ops data - JsonElement jElement = jsonParser.parse(confJson.get(0)); - JsonObject jObj = jElement.getAsJsonObject(); - // Get the simple fields - this.id = jObj.get("id").getAsString(); - this.tenant = jObj.get("tenant").getAsString(); - this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); - // get topology schema names - JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); - this.ggroup = topoGroup.get("type").getAsString(); - this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); - // optional weight filtering - this.weight = ""; - if (jObj.has("weight")){ - this.weight = jObj.get("weight").getAsString(); - } - // Get compound fields - JsonArray jTags = jObj.getAsJsonArray("tags"); - - // Iterate tags - if (jTags != null) { - for (JsonElement tag : jTags) { - JsonObject jTag = tag.getAsJsonObject(); - String name = jTag.get("name").getAsString(); - String value = jTag.get("value").getAsString(); - String ctx = jTag.get("context").getAsString(); - if (ctx.equalsIgnoreCase("group_of_groups")){ - this.ggroupTags.put(name, value); - } else if (ctx.equalsIgnoreCase("endpoint_groups")){ - this.egroupTags.put(name, value); - } else if (ctx.equalsIgnoreCase("metric_data")) { - this.mdataTags.put(name, value); - } - - } - } - - } catch (JsonParseException ex) { - LOG.error("Not valid json contents"); - throw ex; - } - - } + private static final Logger LOG = Logger.getLogger(ConfigManager.class.getName()); + public String id; // report uuid reference + public String report; + public String tenant; + public String egroup; // endpoint group + public String ggroup; // group of groups + public String weight; // weight factor type + public TreeMap egroupTags; + public TreeMap ggroupTags; + public TreeMap mdataTags; + private Threshold threshold; + ArrayList profiles; + + public ConfigManager() { + this.report = null; + this.id = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.egroupTags = new TreeMap(); + this.ggroupTags = new TreeMap(); + this.mdataTags = new TreeMap(); + this.profiles = new ArrayList<>(); + + } + + public void clear() { + this.id = null; + this.report = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.egroupTags.clear(); + this.ggroupTags.clear(); + this.mdataTags.clear(); + + } + + public String getReportID() { + return id; + } + + public String getReport() { + return report; + } + + public String getTenant() { + return tenant; + } + + public String getEgroup() { + return egroup; + } + + public void loadJson(File jsonFile) throws IOException { + // Clear data + this.clear(); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser jsonParser = new JsonParser(); + JsonElement jElement = jsonParser.parse(br); + readJson(jElement); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + /** + * Loads Report config information from a config json string + * + */ + public void loadJsonString(List confJson) throws JsonParseException { + // Clear data + this.clear(); + + try { + + JsonParser jsonParser = new JsonParser(); + // Grab the first - and only line of json from ops data + JsonElement jElement = jsonParser.parse(confJson.get(0)); + readJson(jElement); +// JsonObject jObj = jElement.getAsJsonObject(); +// // Get the simple fields +// this.id = jObj.get("id").getAsString(); +// this.tenant = jObj.get("tenant").getAsString(); +// this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); +// // get topology schema names +// JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); +// this.ggroup = topoGroup.get("type").getAsString(); +// this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); +// // optional weight filtering +// this.weight = ""; +// if (jObj.has("weight")){ +// this.weight = jObj.get("weight").getAsString(); +// } +// // Get compound fields +// JsonArray jTags = jObj.getAsJsonArray("tags"); +// +// // Iterate tags +// if (jTags != null) { +// for (JsonElement tag : jTags) { +// JsonObject jTag = tag.getAsJsonObject(); +// String name = jTag.get("name").getAsString(); +// String value = jTag.get("value").getAsString(); +// String ctx = jTag.get("context").getAsString(); +// if (ctx.equalsIgnoreCase("group_of_groups")){ +// this.ggroupTags.put(name, value); +// } else if (ctx.equalsIgnoreCase("endpoint_groups")){ +// this.egroupTags.put(name, value); +// } else if (ctx.equalsIgnoreCase("metric_data")) { +// this.mdataTags.put(name, value); +// } +// +// } +// } + + } catch (JsonParseException ex) { + LOG.error("Not valid json contents"); + throw ex; + } + + } + + public void readJson(JsonElement jElement) { + + JsonObject jObj = jElement.getAsJsonObject(); + // Get the simple fields + this.id = jObj.get("id").getAsString(); + this.tenant = jObj.get("tenant").getAsString(); + this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); + + // get topology schema names + JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); + this.ggroup = topoGroup.get("type").getAsString(); + this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); + + // optional weight filtering + this.weight = ""; + if (jObj.has("weight")) { + this.weight = jObj.get("weight").getAsString(); + } + // Get compound fields + JsonArray jTags = jObj.getAsJsonArray("filter_tags"); + + // Iterate tags + if (jTags != null) { + for (JsonElement tag : jTags) { + JsonObject jTag = tag.getAsJsonObject(); + String name = jTag.get("name").getAsString(); + String value = jTag.get("value").getAsString(); + String ctx = jTag.get("context").getAsString(); + if (ctx.equalsIgnoreCase("group_of_groups")) { + this.ggroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("endpoint_groups")) { + this.egroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("metric_data")) { + this.mdataTags.put(name, value); + } + + } + } + if(jObj.has("thresholds")){ + JsonObject thresholdsObject = jObj.get("thresholds").getAsJsonObject(); + + this.threshold = new Threshold(thresholdsObject.get("availability").getAsLong(), thresholdsObject.get("reliability").getAsLong(), thresholdsObject.get("uptime").getAsDouble(), + thresholdsObject.get("unknown").getAsDouble(), thresholdsObject.get("downtime").getAsDouble()); + } + JsonArray profilesArray = jObj.get("profiles").getAsJsonArray(); + + Iterator profileIter = profilesArray.iterator(); + while (profileIter.hasNext()) { + JsonObject profileObject = profileIter.next().getAsJsonObject(); + Profiles profile = new Profiles(profileObject.get("id").getAsString(), profileObject.get("name").getAsString(), profileObject.get("type").getAsString()); + profiles.add(profile); + } + + } + + public class Threshold { + + private Long availability; + private Long reliability; + private Double uptime; + private Double unknown; + private Double downtime; + + public Threshold(Long availability, Long reliability, Double uptime, Double unknown, Double downtime) { + this.availability = availability; + this.reliability = reliability; + this.uptime = uptime; + this.unknown = unknown; + this.downtime = downtime; + } + + public Long getAvailability() { + return availability; + } + + public Long getReliability() { + return reliability; + } + + public Double getUptime() { + return uptime; + } + + public Double getUnknown() { + return unknown; + } + + public Double getDowntime() { + return downtime; + } + + } + + private class Profiles { + + private String id; + private String name; + private String type; + + public Profiles(String id, String name, String type) { + this.id = id; + this.name = name; + this.type = type; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + } + + public String getAggregationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.AGGREGATION.name())) { + return profile.id; + } + } + } + return null; + } + + public String getMetricReportId() { + + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.METRIC.name())) { + return profile.id; + } + } + } + return null; + } + + public String getOperationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.OPERATIONS.name())) { + return profile.id; + } + } + } + return null; + } + + public enum ProfileType { + + METRIC, + AGGREGATION, + OPERATIONS + + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java index 7f90ea15..85fac953 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchEndpointFlipFlopTrends.java @@ -85,9 +85,9 @@ public static void main(String[] args) throws Exception { StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Group Endpoint Flip Flops for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java index 7422e49a..e8333c41 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java @@ -30,25 +30,28 @@ import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.core.fs.Path; import org.joda.time.DateTime; + /** - * Implements an ARGO Status Trends Job in flink , to count the number of status changes - * that occur to all levels of the topology hierarchy - * - * Submit job in flink cluster using the following parameters - -* --date:the date for which the job runs and need to return results , e.g yyyy-MM-dd -* --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database -* --apiUri: path to the mongo db the , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --key: ARGO web api token -* --reportId: the id of the report the job will need to process -* --apiUri: ARGO wep api to connect to e.g msg.example.com -* Optional: -* -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true -* -- N : the number of the result the job will provide, if the parameter exists , e.g 10 -* -*/ + + * Implements an ARGO Status Trends Job in flink , to count the number of status + * changes that occur to all levels of the topology hierarchy + * + * Submit job in flink cluster using the following parameters * + * --date:the date for which the job runs and need to return results , e.g + * yyyy-MM-dd --yesterdayData: path to the metric profile data, of the previous + * day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --todayData: path to the metric profile + * data, of the current day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --mongoUri: path to MongoDB destination + * (eg mongodb://localhost:27017/database --apiUri: path to the mongo db the , + * for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --key: ARGO web api token --reportId: the + * id of the report the job will need to process --apiUri: ARGO wep api to + * connect to e.g msg.example.com Optional: -- clearMongo: option to clear the + * mongo db before saving the new result or not, e.g true -- N : the number of + * the result the job will provide, if the parameter exists , e.g 10 + * + */ public class BatchFlipFlopCollection { @@ -98,9 +101,9 @@ public static void main(String[] args) throws Exception { // execute program StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Collection Flip Flops for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); @@ -117,9 +120,7 @@ private static void calcFlipFlops() { DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); - + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); DataSet noZeroServiceEndpointMetricGroupData = serviceEndpointMetricGroupData.filter(new ZeroMetricTrendsFilter()); if (rankNum != null) { //sort and rank data noZeroServiceEndpointMetricGroupData = noZeroServiceEndpointMetricGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java index 918eb557..9a50fe4c 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java @@ -26,24 +26,24 @@ import org.apache.flink.core.fs.Path; import org.joda.time.DateTime; - /** * Implements an ARGO Status Trends Job in flink , to count the number of status * changes that occur to the level of group of the topology hierarchy * - * Submit job in flink cluster using the following parameters * - * --date:the date for which the job runs and need to return results , yyyy-MM-dd - * --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) - * --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) - * --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database - * --key: ARGO web api token - * --reportId: the id of the report the job will need to process - * --apiUri: ARGO wep api to connect to msg.example.com - * Optional: - * -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true - * -- N : the number of the result the job will provide, if the parameter exists , e.g 10 - */ + * Submit job in flink cluster using the following parameters * --date:the date + * for which the job runs and need to return results , yyyy-MM-dd + * --yesterdayData: path to the metric profile data, of the previous day , for + * which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --todayData: path to the metric profile data, of the current day , for which + * the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --mongoUri: path to MongoDB destination (eg + * mongodb://localhost:27017/database --key: ARGO web api token --reportId: the + * id of the report the job will need to process --apiUri: ARGO wep api to + * connect to msg.example.com Optional: -- clearMongo: option to clear the mongo + * db before saving the new result or not, e.g true -- N : the number of the + * result the job will provide, if the parameter exists , e.g 10 + */ public class BatchGroupFlipFlopTrends { private static DataSet yesterdayData; @@ -89,9 +89,9 @@ public static void main(String[] args) throws Exception { calcFlipFlops(); StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Group Flip Flops for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); @@ -107,7 +107,7 @@ private static void calcFlipFlops() { DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status - DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); //group data by service endpoint and count flip flops DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java index 10c7b189..ab7d007b 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java @@ -23,20 +23,20 @@ * changes that occur to the level of group, service, endpoint, metric of the * topology hierarchy * - * Submit job in flink cluster using the following parameters * - * --date:the date for which the job runs and need to return results , - * yyyy-MM-dd --yesterdayData: path to the metric profile data, of the previous - * day , for which the jobs runs profile (For hdfs use: - * hdfs://namenode:port/path/to/file) --todayData: path to the metric profile - * data, of the current day , for which the jobs runs profile (For hdfs use: - * hdfs://namenode:port/path/to/file) --mongoUri: path to MongoDB destination - * (eg mongodb://localhost:27017/database --apiUri: path to the mongo db the , - * for which the jobs runs profile (For hdfs use: - * hdfs://namenode:port/path/to/file) --key: ARGO web api token --reportId: the - * id of the report the job will need to process --apiUri: ARGO wep api to - * connect to msg.example.com Optional: -- clearMongo: option to clear the mongo - * db before saving the new result or not, e.g true -- N : the number of the - * result the job will provide, if the parameter exists , e.g 10 + * Submit job in flink cluster using the following parameters * --date:the date + * for which the job runs and need to return results , yyyy-MM-dd + * --yesterdayData: path to the metric profile data, of the previous day , for + * which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --todayData: path to the metric profile data, of the current day , for which + * the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --mongoUri: path to MongoDB destination (eg + * mongodb://localhost:27017/database --apiUri: path to the mongo db the , for + * which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --key: ARGO web api token --reportId: the id of the report the job will need + * to process --apiUri: ARGO wep api to connect to msg.example.com Optional: -- + * clearMongo: option to clear the mongo db before saving the new result or not, + * e.g true -- N : the number of the result the job will provide, if the + * parameter exists , e.g 10 */ public class BatchMetricFlipFlopTrends { @@ -88,9 +88,9 @@ public static void main(String[] args) throws Exception { // execute program StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Metric Flip Flops for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); @@ -105,8 +105,7 @@ private static void calcFlipFlops() { DataSet filteredYesterdayData = yesterdayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())).groupBy("hostname", "service", "metric").reduceGroup(new CalcLastTimeStatus()); DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); - - DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(),profilesLoader.getAggregationProfileParser(), profilesDate)); + DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); if (rankNum != null) { metricData = metricData.sortPartition("flipflops", Order.DESCENDING).first(rankNum); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java index 9fdb271a..1fdee156 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchServiceFlipFlopTrends.java @@ -89,9 +89,9 @@ public static void main(String[] args) throws Exception { // execute program StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Service Flip Flops for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java index 9393358a..ccbb4c03 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java @@ -20,24 +20,26 @@ import org.joda.time.DateTime; /** - * Implements an ARGO Status Trends Job in flink , to count the number of status appearances per status type - * that occur to the level of group, service, endpoint. metric of the topology hierarchy - * - * Submit job in flink cluster using the following parameters - -* --date:the date for which the job runs and need to return results , yyyy-MM-dd -* --yesterdayData: path to the metric profile data, of the previous day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --todayData: path to the metric profile data, of the current day , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --mongoUri: path to MongoDB destination (eg mongodb://localhost:27017/database -* --apiUri: path to the mongo db the , for which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) -* --key: ARGO web api token -* --reportId: the id of the report the job will need to process -* --apiUri: ARGO wep api to connect to msg.example.com -*Optional: -* -- clearMongo: option to clear the mongo db before saving the new result or not, e.g true -* -- N : the number of the result the job will provide, if the parameter exists , e.g 10 -* -*/ + * Implements an ARGO Status Trends Job in flink , to count the number of status + * appearances per status type that occur to the level of group, service, + * endpoint. metric of the topology hierarchy + * + * Submit job in flink cluster using the following parameters * + * --date:the date for which the job runs and need to return results , + * yyyy-MM-dd --yesterdayData: path to the metric profile data, of the previous + * day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --todayData: path to the metric profile + * data, of the current day , for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --mongoUri: path to MongoDB destination + * (eg mongodb://localhost:27017/database --apiUri: path to the mongo db the , + * for which the jobs runs profile (For hdfs use: + * hdfs://namenode:port/path/to/file) --key: ARGO web api token --reportId: the + * id of the report the job will need to process --apiUri: ARGO wep api to + * connect to msg.example.com Optional: -- clearMongo: option to clear the mongo + * db before saving the new result or not, e.g true -- N : the number of the + * result the job will provide, if the parameter exists , e.g 10 + * + */ public class BatchStatusTrends { static Logger LOG = LoggerFactory.getLogger(BatchStatusTrends.class); @@ -47,7 +49,6 @@ public class BatchStatusTrends { private static Integer rankNum; private static final String statusTrendsCol = "status_trends_metrics"; - private static String mongoUri; private static ProfilesLoader profilesLoader; @@ -69,7 +70,6 @@ public static void main(String[] args) throws Exception { System.exit(0); } - if (params.get("clearMongo") != null && params.getBoolean("clearMongo") == true) { clearMongo = true; } @@ -91,11 +91,11 @@ public static void main(String[] args) throws Exception { filterByStatusAndWrite(statusTrendsCol, rankedData, "unknown"); // execute program - StringBuilder jobTitleSB = new StringBuilder(); + StringBuilder jobTitleSB = new StringBuilder(); jobTitleSB.append("Status Trends for: "); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getTenant()); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); jobTitleSB.append("/"); - jobTitleSB.append(profilesLoader.getReportParser().getTenantReport().getInfo()[0]); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); jobTitleSB.append("/"); jobTitleSB.append(profilesDate); env.execute(jobTitleSB.toString()); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java index df8fbc6b..ede09526 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java @@ -5,7 +5,6 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ - import argo.pojos.EndpointTrends; import argo.profiles.AggregationProfileManager; import org.apache.flink.api.common.functions.FilterFunction; @@ -19,24 +18,19 @@ public class ServiceFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); - //private AggregationProfileParser aggregationProfileParser; private AggregationProfileManager aggregationProfileParser; - + public ServiceFilter(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; - + } //if the status field value in Tuple equals the given status returns true, else returns false @Override public boolean filter(EndpointTrends t) throws Exception { -// boolean exist= aggregationProfileParser.checkServiceExistance(t.getService()); -// System.out.println("service : "+t.getService()+" exist: "+exist); -// - - // return exist; + return aggregationProfileParser.checkServiceExistance(t.getService()); - + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java index b86253fd..d5cd5fac 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java @@ -31,18 +31,14 @@ public class CalcServiceFlipFlop implements GroupReduceFunction< EndpointTrends, private OperationsParser operationsParser; public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { -// this.operationTruthTables = operationParser.getOpTruthTable(); this.operationsParser = operationsParser; this.serviceFunctionsMap = aggregationProfileParser.retrieveServiceOperations(); - } @Override public void reduce(Iterable in, Collector< ServiceTrends> out) throws Exception { String group = null; String service = null; - // String hostname = null; - ArrayList list = new ArrayList<>(); //construct a timeline containing all the timestamps of each metric timeline HashMap timelineList = new HashMap<>(); @@ -50,18 +46,19 @@ public void reduce(Iterable in, Collector< ServiceTrends> out) t for (EndpointTrends endpointTrend : in) { group = endpointTrend.getGroup(); service = endpointTrend.getService(); - timelineList.put(endpointTrend.getEndpoint(), endpointTrend.getTimeline()); + timelineList.put(endpointTrend.getEndpoint(), endpointTrend.getTimeline()); } - String operation = serviceFunctionsMap.get(service); - TimelineAggregator timelineAggregator = new TimelineAggregator(timelineList); - timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); + String operation = serviceFunctionsMap.get(service); + TimelineAggregator timelineAggregator = new TimelineAggregator(timelineList); + timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); - Timeline timeline = timelineAggregator.getOutput(); - int flipflops = timeline.calcStatusChanges(); - if (group != null && service != null) { - ServiceTrends serviceTrends = new ServiceTrends(group, service, timeline, flipflops); - out.collect(serviceTrends); - } - } + Timeline timeline = timelineAggregator.getOutput(); + int flipflops = timeline.calcStatusChanges(); + if (group != null && service != null) { + ServiceTrends serviceTrends = new ServiceTrends(group, service, timeline, flipflops); + System.out.println("group: " + group + " service: " + service + " flipflops: " + flipflops); + out.collect(serviceTrends); + } + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java index bbdd3b3d..ccf538c4 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java @@ -13,22 +13,21 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia - * * ProfilesLoader, loads all the parser that will be used to collect the * information from the web api */ public class ProfilesLoader { - private ReportParser reportParser; + + private ReportManager reportParser; + private EndpointGroupManager topologyEndpointParser; private MetricProfileManager metricProfileParser; private OperationsParser operationParser; private AggregationProfileManager aggregationProfileParser; -// private TopologyGroupParser topolGroupParser; - private GroupGroupManager topolGroupParser; + + private GroupGroupManager topolGroupParser; private String aggregationId; private String metricId; @@ -39,13 +38,16 @@ public ProfilesLoader() { public ProfilesLoader(ParameterTool params) throws IOException, ParseException { - reportParser = new ReportParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("reportId")); - String[] reportInfo = reportParser.getTenantReport().getInfo(); - //topolGroupParser = new TopologyGroupParser(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("date"), reportInfo[0]); - - aggregationId = reportParser.getAggregationReportId(); - metricId = reportParser.getMetricReportId(); - operationsId = reportParser.getOperationReportId(); + JsonElement reportProfileJson = RequestManager.reportProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("reportId")); + reportParser = new ReportManager(); + reportParser.readJson(reportProfileJson); + + String reportName = reportParser.getReport(); + + aggregationId = reportParser.getAggregationReportId(); + metricId = reportParser.getMetricReportId(); + operationsId = reportParser.getOperationReportId(); + JsonElement opProfileJson = RequestManager.operationsProfileRequest(params.getRequired("apiUri"), operationsId, params.getRequired("key"), params.get("proxy"), params.get("date")); @@ -61,25 +63,25 @@ public ProfilesLoader(ParameterTool params) throws IOException, ParseException { aggregationProfileParser = new AggregationProfileManager(); aggregationProfileParser.readJson(aggregationProfileJson); - - - JsonArray endpointGroupProfileJson = RequestManager.endpointGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportInfo[0],params.get("date")); + + JsonArray endpointGroupProfileJson = RequestManager.endpointGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportName, params.get("date")); + topologyEndpointParser = new EndpointGroupManager(); topologyEndpointParser.loadGroupEndpointProfile(endpointGroupProfileJson); - - JsonArray groupGroupProfileJson = RequestManager.groupGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportInfo[0],params.get("date")); + JsonArray groupGroupProfileJson = RequestManager.groupGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportName, params.get("date")); + topolGroupParser = new GroupGroupManager(); topolGroupParser.loadGroupGroupProfile(groupGroupProfileJson); } - public ReportParser getReportParser() { + public ReportManager getReportParser() { return reportParser; } - public void setReportParser(ReportParser reportParser) { + public void setReportParser(ReportManager reportParser) { this.reportParser = reportParser; } @@ -113,15 +115,6 @@ public AggregationProfileManager getAggregationProfileParser() { public void setAggregationProfileParser(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; } - -// public TopologyGroupParser getTopolGroupParser() { -// return topolGroupParser; -// } -// -// public void setTopolGroupParser(TopologyGroupParser topolGroupParser) { -// this.topolGroupParser = topolGroupParser; -// } - public GroupGroupManager getTopolGroupParser() { return topolGroupParser; } @@ -129,7 +122,6 @@ public GroupGroupManager getTopolGroupParser() { public void setTopolGroupParser(GroupGroupManager topolGroupParser) { this.topolGroupParser = topolGroupParser; } - public String getAggregationId() { return aggregationId; diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ReportManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ReportManager.java new file mode 100644 index 00000000..91c2cb79 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ReportManager.java @@ -0,0 +1,336 @@ +package argo.profiles; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.List; +import java.util.TreeMap; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.util.ArrayList; +import java.util.Iterator; + +/** + * + * + * ReportManager class implements objects that store the information parsed from + * a json object containing the profile data of a tenant's report or loaded from + * a json file + */ + +public class ReportManager { + + private static final Logger LOG = Logger.getLogger(ReportManager.class.getName()); + + public String id; // report uuid reference + public String report; + public String tenant; + public String egroup; // endpoint group + public String ggroup; // group of groups + public String weight; // weight factor type + public TreeMap egroupTags; + public TreeMap ggroupTags; + public TreeMap mdataTags; + private Threshold threshold; + ArrayList profiles; + + /** + * Constructor to a ReportManager object to store the tenant's report + * information + */ + public ReportManager() { + this.report = null; + this.id = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.egroupTags = new TreeMap(); + this.ggroupTags = new TreeMap(); + this.mdataTags = new TreeMap(); + this.profiles = new ArrayList<>(); + + } + + /** + * Clears the stored data of a ReportManager object + */ + public void clear() { + this.id = null; + this.report = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.threshold = null; + this.egroupTags.clear(); + this.ggroupTags.clear(); + this.mdataTags.clear(); + this.profiles.clear(); + + } + + public String getReportID() { + return id; + } + + public String getReport() { + return report; + } + + public String getTenant() { + return tenant; + } + + public String getEgroup() { + return egroup; + } + + /** + * loads from a json file that contain the tenant's report , and stores the + * info to the corresponding fields + * + * @param jsonFile + * @throws IOException + */ + public void loadJson(File jsonFile) throws IOException { + // Clear data + this.clear(); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser jsonParser = new JsonParser(); + JsonElement jElement = jsonParser.parse(br); + readJson(jElement); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + /** + * Loads Report config information from a config json string + * + */ + public void loadJsonString(List confJson) throws JsonParseException { + // Clear data + this.clear(); + + try { + + JsonParser jsonParser = new JsonParser(); + // Grab the first - and only line of json from ops data + JsonElement jElement = jsonParser.parse(confJson.get(0)); + readJson(jElement); + + } catch (JsonParseException ex) { + LOG.error("Not valid json contents"); + throw ex; + } + + } + + /** + * reads from a JsonElement array and stores the necessary information to + * the ReportManager objects and add them to the list + * + * @param jElement , a JsonElement containing the tenant's report data + * @return + */ + public void readJson(JsonElement jElement) { + + JsonObject jObj = jElement.getAsJsonObject(); + // Get the simple fields + this.id = jObj.get("id").getAsString(); + this.tenant = jObj.get("tenant").getAsString(); + this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); + + // get topology schema names + JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); + this.ggroup = topoGroup.get("type").getAsString(); + this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); + + // optional weight filtering + this.weight = ""; + if (jObj.has("weight")) { + this.weight = jObj.get("weight").getAsString(); + } + // Get compound fields + JsonArray jTags = jObj.getAsJsonArray("filter_tags"); + + // Iterate tags + if (jTags != null) { + for (JsonElement tag : jTags) { + JsonObject jTag = tag.getAsJsonObject(); + String name = jTag.get("name").getAsString(); + String value = jTag.get("value").getAsString(); + String ctx = jTag.get("context").getAsString(); + if (ctx.equalsIgnoreCase("group_of_groups")) { + this.ggroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("endpoint_groups")) { + this.egroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("metric_data")) { + this.mdataTags.put(name, value); + } + + } + } + + JsonObject thresholdsObject = jObj.get("thresholds").getAsJsonObject(); + + this.threshold = new Threshold(thresholdsObject.get("availability").getAsLong(), thresholdsObject.get("reliability").getAsLong(), thresholdsObject.get("uptime").getAsDouble(), + thresholdsObject.get("unknown").getAsDouble(), thresholdsObject.get("downtime").getAsDouble()); + + JsonArray profilesArray = jObj.get("profiles").getAsJsonArray(); + + Iterator profileIter = profilesArray.iterator(); + while (profileIter.hasNext()) { + JsonObject profileObject = profileIter.next().getAsJsonObject(); + Profiles profile = new Profiles(profileObject.get("id").getAsString(), profileObject.get("name").getAsString(), profileObject.get("type").getAsString()); + profiles.add(profile); + } + + } + + /** + * Threshold class is an inner class that stores the info of the thresholds as described from the tenant's report + */ + public class Threshold { + + private Long availability; + private Long reliability; + private Double uptime; + private Double unknown; + private Double downtime; + + public Threshold(Long availability, Long reliability, Double uptime, Double unknown, Double downtime) { + this.availability = availability; + this.reliability = reliability; + this.uptime = uptime; + this.unknown = unknown; + this.downtime = downtime; + } + + public Long getAvailability() { + return availability; + } + + public Long getReliability() { + return reliability; + } + + public Double getUptime() { + return uptime; + } + + public Double getUnknown() { + return unknown; + } + + public Double getDowntime() { + return downtime; + } + + } +/** + * Profiles class is a class that stores the info of the profiles as described from the tenant's report + */ + private class Profiles { + + private String id; + private String name; + private String type; + + public Profiles(String id, String name, String type) { + this.id = id; + this.name = name; + this.type = type; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + } +/** + * Returns the aggregation's report id , as described in the profiles of tenant's report + * @return + */ + public String getAggregationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.AGGREGATION.name())) { + return profile.id; + } + } + } + return null; + } + + + /** + * Returns the metric's report id , as described in the profiles of tenant's report + * @return + */ + public String getMetricReportId() { + + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.METRIC.name())) { + return profile.id; + } + } + } + return null; + } + + /** + * Returns the operation's report id , as described in the profiles of tenant's report + * @return + */ + public String getOperationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.OPERATIONS.name())) { + return profile.id; + } + } + } + return null; + } + + public enum ProfileType { + + METRIC, + AGGREGATION, + OPERATIONS + + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java index ebd51c67..2df79a15 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java @@ -83,15 +83,7 @@ public static JsonElement operationsProfileRequest(String apiUri, String operati return loadProfile(uri, key, proxy).get(0); } -// public static JsonElement loadOperationProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { -// JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); -// JsonObject jsonObj=jsonElement.getAsJsonObject(); -// JsonArray dataObj = jsonObj.getAsJsonArray("data"); -// JsonElement dataElement=dataObj.get(0); -// -// -// return dataElement; -// } + public static JsonElement metricProfileRequest(String apiUri, String metricId, String key, String proxy, String dateStr) throws IOException, ParseException { String uri = apiUri + "/metric_profiles" + "/" + metricId; @@ -110,10 +102,12 @@ public static JsonElement aggregationProfileRequest(String apiUri, String aggreg } return loadProfile(uri, key, proxy).get(0); + } - public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname,String dateStr) throws IOException, ParseException { - - String uri = apiUri + "/topology/endpoints/by_report" + "/" + reportname; + + public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname, String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/topology/endpoints/by_report" + "/" + reportname; if (dateStr != null) { uri = uri + "?date=" + dateStr; } @@ -128,15 +122,22 @@ public static JsonArray groupGroupProfileRequest(String apiUri, String key, Stri } return loadProfile(uri, key, proxy); + + } + + public static JsonElement reportProfileRequest(String apiUri, String key, String proxy, String reportId) throws IOException, ParseException { + + String uri = apiUri + "/reports/" + reportId; + return loadProfile(uri, key, proxy).get(0); + + } public static JsonArray loadProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); JsonObject jsonObj = jsonElement.getAsJsonObject(); JsonArray dataObj = jsonObj.getAsJsonArray("data"); - //JsonElement dataElement = dataObj.get(0); - - return dataObj; + return dataObj; } -} +} \ No newline at end of file From c80f1ea9d1015009ccb41193d281cd4e6d8f9cac Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Thu, 17 Jun 2021 09:03:31 +0300 Subject: [PATCH 004/112] ARGO-3231 Create Threshold Profile Parser independent Component --- .../src/main/java/argo/avro/Downtime.java | 286 +++++++ .../java/profilesmanager/DowntimeManager.java | 189 +++++ .../profilesmanager/MetricProfileManager.java | 1 - .../profilesmanager/OperationsManager.java | 392 +++++++++ .../profilesmanager/ThresholdManager.java | 758 ++++++++++++++++++ .../resources/profiles/EGI-algorithm.json | 1 + .../main/resources/profiles/EGI-rules.json | 1 + .../main/resources/profiles/operations.json | 1 + .../main/resources/profiles/truthtable.json | 587 ++++++++++++++ .../OperationsManagerTest.java | 672 ++++++++++++++++ .../profilesmanager/ThresholdManagerTest.java | 91 +++ .../src/test/java/profilesmanager/Utils.java | 123 +++ .../argo/batch/BatchFlipFlopCollection.java | 1 - .../argo/batch/BatchGroupFlipFlopTrends.java | 1 - .../argo/batch/BatchMetricFlipFlopTrends.java | 3 +- .../calctimelines/ServiceFilter.java | 3 - .../calctrends/CalcServiceFlipFlop.java | 2 +- .../java/argo/profiles/ConfigManager.java | 336 ++++++++ .../java/argo/profiles/ProfilesLoader.java | 6 +- .../main/java/argo/utils/RequestManager.java | 22 +- 20 files changed, 3455 insertions(+), 21 deletions(-) create mode 100644 flink_jobs/ProfilesManager/src/main/java/argo/avro/Downtime.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/OperationsManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/java/profilesmanager/ThresholdManager.java create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-algorithm.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-rules.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/operations.json create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/truthtable.json create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/OperationsManagerTest.java create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/ThresholdManagerTest.java create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/Utils.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/profiles/ConfigManager.java diff --git a/flink_jobs/ProfilesManager/src/main/java/argo/avro/Downtime.java b/flink_jobs/ProfilesManager/src/main/java/argo/avro/Downtime.java new file mode 100644 index 00000000..b73e100d --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/argo/avro/Downtime.java @@ -0,0 +1,286 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package argo.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class Downtime extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Downtime\",\"namespace\":\"argo.avro\",\"fields\":[{\"name\":\"hostname\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"service\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"start_time\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"end_time\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String hostname; + @Deprecated public java.lang.String service; + @Deprecated public java.lang.String start_time; + @Deprecated public java.lang.String end_time; + + /** + * Default constructor. + */ + public Downtime() {} + + /** + * All-args constructor. + */ + public Downtime(java.lang.String hostname, java.lang.String service, java.lang.String start_time, java.lang.String end_time) { + this.hostname = hostname; + this.service = service; + this.start_time = start_time; + this.end_time = end_time; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return hostname; + case 1: return service; + case 2: return start_time; + case 3: return end_time; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: hostname = (java.lang.String)value$; break; + case 1: service = (java.lang.String)value$; break; + case 2: start_time = (java.lang.String)value$; break; + case 3: end_time = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'hostname' field. + */ + public java.lang.String getHostname() { + return hostname; + } + + /** + * Sets the value of the 'hostname' field. + * @param value the value to set. + */ + public void setHostname(java.lang.String value) { + this.hostname = value; + } + + /** + * Gets the value of the 'service' field. + */ + public java.lang.String getService() { + return service; + } + + /** + * Sets the value of the 'service' field. + * @param value the value to set. + */ + public void setService(java.lang.String value) { + this.service = value; + } + + /** + * Gets the value of the 'start_time' field. + */ + public java.lang.String getStartTime() { + return start_time; + } + + /** + * Sets the value of the 'start_time' field. + * @param value the value to set. + */ + public void setStartTime(java.lang.String value) { + this.start_time = value; + } + + /** + * Gets the value of the 'end_time' field. + */ + public java.lang.String getEndTime() { + return end_time; + } + + /** + * Sets the value of the 'end_time' field. + * @param value the value to set. + */ + public void setEndTime(java.lang.String value) { + this.end_time = value; + } + + /** Creates a new Downtime RecordBuilder */ + public static argo.avro.Downtime.Builder newBuilder() { + return new argo.avro.Downtime.Builder(); + } + + /** Creates a new Downtime RecordBuilder by copying an existing Builder */ + public static argo.avro.Downtime.Builder newBuilder(argo.avro.Downtime.Builder other) { + return new argo.avro.Downtime.Builder(other); + } + + /** Creates a new Downtime RecordBuilder by copying an existing Downtime instance */ + public static argo.avro.Downtime.Builder newBuilder(argo.avro.Downtime other) { + return new argo.avro.Downtime.Builder(other); + } + + /** + * RecordBuilder for Downtime instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String hostname; + private java.lang.String service; + private java.lang.String start_time; + private java.lang.String end_time; + + /** Creates a new Builder */ + private Builder() { + super(argo.avro.Downtime.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(argo.avro.Downtime.Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing Downtime instance */ + private Builder(argo.avro.Downtime other) { + super(argo.avro.Downtime.SCHEMA$); + if (isValidValue(fields()[0], other.hostname)) { + this.hostname = data().deepCopy(fields()[0].schema(), other.hostname); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.service)) { + this.service = data().deepCopy(fields()[1].schema(), other.service); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.start_time)) { + this.start_time = data().deepCopy(fields()[2].schema(), other.start_time); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.end_time)) { + this.end_time = data().deepCopy(fields()[3].schema(), other.end_time); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'hostname' field */ + public java.lang.String getHostname() { + return hostname; + } + + /** Sets the value of the 'hostname' field */ + public argo.avro.Downtime.Builder setHostname(java.lang.String value) { + validate(fields()[0], value); + this.hostname = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'hostname' field has been set */ + public boolean hasHostname() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'hostname' field */ + public argo.avro.Downtime.Builder clearHostname() { + hostname = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'service' field */ + public java.lang.String getService() { + return service; + } + + /** Sets the value of the 'service' field */ + public argo.avro.Downtime.Builder setService(java.lang.String value) { + validate(fields()[1], value); + this.service = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'service' field has been set */ + public boolean hasService() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'service' field */ + public argo.avro.Downtime.Builder clearService() { + service = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'start_time' field */ + public java.lang.String getStartTime() { + return start_time; + } + + /** Sets the value of the 'start_time' field */ + public argo.avro.Downtime.Builder setStartTime(java.lang.String value) { + validate(fields()[2], value); + this.start_time = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'start_time' field has been set */ + public boolean hasStartTime() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'start_time' field */ + public argo.avro.Downtime.Builder clearStartTime() { + start_time = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'end_time' field */ + public java.lang.String getEndTime() { + return end_time; + } + + /** Sets the value of the 'end_time' field */ + public argo.avro.Downtime.Builder setEndTime(java.lang.String value) { + validate(fields()[3], value); + this.end_time = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'end_time' field has been set */ + public boolean hasEndTime() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'end_time' field */ + public argo.avro.Downtime.Builder clearEndTime() { + end_time = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public Downtime build() { + try { + Downtime record = new Downtime(); + record.hostname = fieldSetFlags()[0] ? this.hostname : (java.lang.String) defaultValue(fields()[0]); + record.service = fieldSetFlags()[1] ? this.service : (java.lang.String) defaultValue(fields()[1]); + record.start_time = fieldSetFlags()[2] ? this.start_time : (java.lang.String) defaultValue(fields()[2]); + record.end_time = fieldSetFlags()[3] ? this.end_time : (java.lang.String) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java new file mode 100644 index 00000000..72fbc7ef --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java @@ -0,0 +1,189 @@ +package profilesmanager; + + + +import org.apache.log4j.Logger; + +import argo.avro.Downtime; + + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import org.apache.avro.Schema; + +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumReader; +import org.apache.avro.util.Utf8; +import org.apache.commons.io.IOUtils; + +/** + * DowntimeManager manages supplementary downtime information that is needed in computation of a/r scores for endpoint groups + * Information can be loaded either directly from an avro file or from a list of avro objects + */ +public class DowntimeManager { + + /** + * List of Downtime information items + */ + private ArrayList list; + private static final Logger LOG = Logger.getLogger(DowntimeManager.class.getName()); + + /** + * Inner class that holds information about a downtime item which is a actually a 4-tuple (hostname,service,startTime,endTime) + */ + private class DowntimeItem { + String hostname; // name of host + String service; // name of service + String startTime; // declare start time of downtime + String endTime; // declare end time of downtime + + + + public DowntimeItem(String hostname, String service, String startTime, String endTime) { + this.hostname = hostname; + this.service = service; + this.startTime = startTime; + this.endTime = endTime; + } + + } + + public DowntimeManager() { + this.list = new ArrayList(); + } + /** + * Inserts new downtime information to Donwtime Manager (hostname,service,startTime,endTime) + */ + public int insert(String hostname, String service, String startTime, String endTime) { + DowntimeItem tmpItem = new DowntimeItem(hostname, service, startTime, endTime); + this.list.add(tmpItem); + return 0; // All good + } + + /** + * Returns the downtime period (if any) for a specific service endpoint: (hostname,service) + */ + public ArrayList getPeriod(String hostname, String service) { + + ArrayList period = new ArrayList(); + + for (DowntimeItem item : this.list) { + + if (item.hostname.equals(hostname)) { + if (item.service.equals(service)) { + period.add(item.startTime); + period.add(item.endTime); + return period; + } + } + } + + return null; + + } + + /** + * Loads downtime information from an avro file + *

+ * This method loads downtimes information contained in an .avro file with + * specific avro schema. + * + *

+ * The following fields are expected to be found in each avro row: + *

    + *
  1. start_time: string
  2. + *
  3. end_time: string
  4. + *
  5. service: string
  6. + *
  7. hostname: string
  8. + *
  9. [optional] tags: hashmap (contains a map of arbitrary key values) + *
  10. + *
+ * + * @param avroFile + * a File object of the avro file that will be opened + * @throws IOException + * if there is an error during opening of the avro file + */ + @SuppressWarnings("unchecked") + public void loadAvro(File avroFile) throws IOException { + + // Prepare Avro File Readers + DatumReader datumReader = new GenericDatumReader(); + DataFileReader dataFileReader = null; + + try { + dataFileReader = new DataFileReader(avroFile, datumReader); + + // Grab avro schema + Schema avroSchema = dataFileReader.getSchema(); + + // Generate 1st level generic record reader (rows) + GenericRecord avroRow = new GenericData.Record(avroSchema); + + // For all rows in file repeat + while (dataFileReader.hasNext()) { + // read the row + avroRow = dataFileReader.next(avroRow); + HashMap tagMap = new HashMap(); + + // Generate 2nd level generic record reader (tags) + + HashMap tags = (HashMap) (avroRow.get("tags")); + + if (tags != null) { + for (Utf8 item : tags.keySet()) { + tagMap.put(item.toString(), String.valueOf(tags.get(item))); + } + } + + // Grab 1st level mandatory fields + String hostname = avroRow.get("hostname").toString(); + String service = avroRow.get("service").toString(); + String startTime = avroRow.get("start_time").toString(); + String endTime = avroRow.get("end_time").toString(); + // insert data to list + this.insert(hostname, service, startTime, endTime); + + } // end of avro rows + + } catch (IOException ex) { + LOG.error("Could not open avro file:" + avroFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(dataFileReader); + } + + } + + + /** + * Loads downtime information from a list of downtime objects + * + */ + + public void loadFromList( List dnt) { + // IF no downtimes collected return + if (dnt==null) return; + + // For each downtime object in list + for (Downtime item : dnt){ + String hostname = item.getHostname(); + String service = item.getService(); + String startTime = item.getStartTime(); + String endTime = item.getEndTime(); + // Insert data to list + if (hostname != null) this.insert(hostname,service,startTime,endTime); + } + + + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java index c084ebc4..9d3b92be 100644 --- a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/MetricProfileManager.java @@ -1,7 +1,6 @@ package profilesmanager; import argo.avro.MetricProfile; - import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/OperationsManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/OperationsManager.java new file mode 100644 index 00000000..27cdb782 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/OperationsManager.java @@ -0,0 +1,392 @@ +package profilesmanager; + +//import argo.utils.RequestManager; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import org.apache.log4j.Logger; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.io.Serializable; +import org.apache.commons.io.IOUtils; + +/** + * OperationsManager class implements objects that store the information parsed + * from a json object containing operation profile data or loaded from a json + * file * + * + * The OperationsManager keeps info of the defined statuses, the defined + * operations, creates a truth table containing all the combinations of statuses + * per operation , also it convert string operations and statuses to integer + * based on their position in the list storage + */ +public class OperationsManager implements Serializable { + + private static final Logger LOG = Logger.getLogger(OperationsManager.class.getName()); + + private HashMap states; + private HashMap ops; + private ArrayList revStates; + private ArrayList revOps; + + private int[][][] truthTable; + + private String defaultDownState; + private String defaultMissingState; + private String defaultUnknownState; + + private boolean order; + // private final String url = "/operations_profiles"; + + /** + * Constructs an OperationsManager object initializing fields + */ + public OperationsManager() { + this.states = new HashMap(); + this.ops = new HashMap(); + this.revStates = new ArrayList(); + this.revOps = new ArrayList(); + + this.truthTable = null; + + this.order = false; + } + + public String getDefaultDown() { + return this.defaultDownState; + } + + public String getDefaultUnknown() { + return this.defaultUnknownState; + } + + public int getDefaultUnknownInt() { + return this.getIntStatus(this.defaultUnknownState); + } + + public int getDefaultDownInt() { + return this.getIntStatus(this.defaultDownState); + } + + public String getDefaultMissing() { + return this.defaultMissingState; + } + + public int getDefaultMissingInt() { + return this.getIntStatus(this.defaultMissingState); + } + +/** + * Clears the OperationsManager fields + */ + + public void clear() { + this.states = new HashMap(); + this.ops = new HashMap(); + this.revStates = new ArrayList(); + this.revOps = new ArrayList(); + + this.truthTable = null; + } + +/** +* Retrieves the status which is a combination of 2 statuses based on the truth table of the operation, as an int +* @param op , the operation (e.g 0, 1) +* @param a , the 1st status (e.g 3 ) +* @param b , the 2nd status (e.g 2) +* @return the final status which is the combination of the two statuses retrieved from the operation's truth table +*/ + public int opInt(int op, int a, int b) { + int result = -1; + try { + result = this.truthTable[op][a][b]; + } catch (IndexOutOfBoundsException ex) { + LOG.info(ex); + result = -1; + } + + return result; + } +/** + * Retrieves the status which is a combination of 2 statuses based on the truth table of the operation, as an int + * @param op , the operation in the form of a string (e.g AND , OR) + * @param a , the 1st status in the form of a string (e.g OK , MISSING) + * @param b , the 2nd status in the form of a string (e.g OK, MISSING) + * @return . the final status which is the combination of the two statuses retrieved from the operation's truth table + */ + public int opInt(String op, String a, String b) { + + int opInt = this.ops.get(op); + int aInt = this.states.get(a); + int bInt = this.states.get(b); + + return this.truthTable[opInt][aInt][bInt]; + } +/** + * Retrieves the status which is a combination of 2 statuses based on the truth table of the operation, as a string + * @param op , the operation as an int (e.g 0, 1) + * @param a , the 1st status as an int (e.g 1, 3) + * @param b , the 2nd status as an int (e.g 1, 3) + * @return the final status which is the combination of the two statuses , as a string, + * retrieved from the operation's truth table + */ + public String op(int op, int a, int b) { + return this.revStates.get(this.truthTable[op][a][b]); + } +/** + * Retrieves the status which is a combination of 2 statuses based on the truth table of the operation, as a string + * @param op, the operation as a string (e.g AND, OR) + * @param a , the 1st status as a string (e.g OK, MISSING) + * @param b, the 1st status as a string (e.g OK , MISSING) + * @return the final status which is the combination of the two statuses , as a string, + * retrieved from the operation's truth table + */ + public String op(String op, String a, String b) { + int opInt = this.ops.get(op); + int aInt = this.states.get(a); + int bInt = this.states.get(b); + + return this.revStates.get(this.truthTable[opInt][aInt][bInt]); + } +/** +* Maps a status as string to an int based on the position of the status in the stored list of statuses + * @param status a status as an int (e.g 1 ,2) + * @return the status as a string + */ + public String getStrStatus(int status) { + return this.revStates.get(status); + } +/** + * Maps a status as string to an int + * @param status ,a status as a string (e.g OK,MISSING) + * @return the status as an int + */ + public int getIntStatus(String status) { + return this.states.get(status); + } +/** + * Maps an operation as int to a string based on the position of the operation in the stored list of operations + * @param op , an operation as an int + * @return the operation as a string + */ + public String getStrOperation(int op) { + return this.revOps.get(op); + } +/** + * Maps an operation as string to an int + * @param op, an operation as a string + * @return the operation as an int + */ + public int getIntOperation(String op) { + return this.ops.get(op); + } + + public ArrayList availableStates() { + + return this.revStates; + } + + public ArrayList availableOps() { + return this.revOps; + } +/** + * reads from a json file and stores the necessary information to the OperationsManager object fields + * @param jsonFile a json file containing information about the operation profiles + * @throws IOException + */ + public void loadJson(File jsonFile) throws IOException { + // Clear data + this.clear(); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + JsonObject jRoot = j_element.getAsJsonObject(); + JsonArray jData = jRoot.get("data").getAsJsonArray(); + JsonElement jItem = jData.get(0); + + readJson(jItem); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } +/** + * reads from a JsonElement and stores the necessary information to the OperationsManager object fields + * @param j_element , a JsonElement containing the operations profiles data + */ + private void readJson(JsonElement j_element) { + JsonObject j_obj = j_element.getAsJsonObject(); + JsonArray j_states = j_obj.getAsJsonArray("available_states"); + JsonArray j_ops = j_obj.getAsJsonArray("operations"); + this.defaultMissingState = j_obj.getAsJsonObject("defaults").getAsJsonPrimitive("missing").getAsString(); + this.defaultDownState = j_obj.getAsJsonObject("defaults").getAsJsonPrimitive("down").getAsString(); + this.defaultUnknownState = j_obj.getAsJsonObject("defaults").getAsJsonPrimitive("unknown").getAsString(); + // Collect the available states + for (int i = 0; i < j_states.size(); i++) { + this.states.put(j_states.get(i).getAsString(), i); + this.revStates.add(j_states.get(i).getAsString()); + + } + + // Collect the available operations + int i = 0; + for (JsonElement item : j_ops) { + JsonObject jObjItem = item.getAsJsonObject(); + this.ops.put(jObjItem.getAsJsonPrimitive("name").getAsString(), i); + this.revOps.add(jObjItem.getAsJsonPrimitive("name").getAsString()); + i++; + } + // Initialize the truthtable + int num_ops = this.revOps.size(); + int num_states = this.revStates.size(); + this.truthTable = new int[num_ops][num_states][num_states]; + + for (int[][] surface : this.truthTable) { + for (int[] line : surface) { + Arrays.fill(line, -1); + } + } + + // Fill the truth table + for (JsonElement item : j_ops) { + JsonObject jObjItem = item.getAsJsonObject(); + String opname = jObjItem.getAsJsonPrimitive("name").getAsString(); + JsonArray tops = jObjItem.getAsJsonArray("truth_table"); + // System.out.println(tops); + + for (int j = 0; j < tops.size(); j++) { + // System.out.println(opname); + JsonObject row = tops.get(j).getAsJsonObject(); + + int a_val = this.states.get(row.getAsJsonPrimitive("a").getAsString()); + int b_val = this.states.get(row.getAsJsonPrimitive("b").getAsString()); + int x_val = this.states.get(row.getAsJsonPrimitive("x").getAsString()); + int op_val = this.ops.get(opname); + + // Fill in truth table + // Check if order sensitivity is off so to insert two truth + // values + // ...[a][b] and [b][a] + this.truthTable[op_val][a_val][b_val] = x_val; + if (!this.order) { + this.truthTable[op_val][b_val][a_val] = x_val; + } + } + } + + } +/** + * Calls a JsonParser to read from a list of strings containing the operations profiles data , extracts the JsonElement and + * calls the readJson() to read and store the operations profiles data + * @param opsJson , a list of strings + * @throws JsonParseException + */ + public void loadJsonString(List opsJson) throws JsonParseException { + // Clear data + this.clear(); + + JsonParser json_parser = new JsonParser(); + // Grab the first - and only line of json from ops data + JsonElement j_element = json_parser.parse(opsJson.get(0)); + readJson(j_element); + } + + public int[][][] getTruthTable() { + return truthTable; + } + + public void setTruthTable(int[][][] truthTable) { + this.truthTable = truthTable; + } + + public HashMap getStates() { + return states; + } + + public void setStates(HashMap states) { + this.states = states; + } + + public HashMap getOps() { + return ops; + } + + public void setOps(HashMap ops) { + this.ops = ops; + } + + public ArrayList getRevStates() { + return revStates; + } + + public void setRevStates(ArrayList revStates) { + this.revStates = revStates; + } + + public ArrayList getRevOps() { + return revOps; + } + + public void setRevOps(ArrayList revOps) { + this.revOps = revOps; + } + + public String getDefaultDownState() { + return defaultDownState; + } + + public void setDefaultDownState(String defaultDownState) { + this.defaultDownState = defaultDownState; + } + + public String getDefaultMissingState() { + return defaultMissingState; + } + + public void setDefaultMissingState(String defaultMissingState) { + this.defaultMissingState = defaultMissingState; + } + + public String getDefaultUnknownState() { + return defaultUnknownState; + } + + public void setDefaultUnknownState(String defaultUnknownState) { + this.defaultUnknownState = defaultUnknownState; + } + + public boolean isOrder() { + return order; + } + + public void setOrder(boolean order) { + this.order = order; + } + + public static Logger getLOG() { + return LOG; + } + +} diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ThresholdManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ThresholdManager.java new file mode 100644 index 00000000..d9bf4103 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/ThresholdManager.java @@ -0,0 +1,758 @@ +package profilesmanager; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; + +/** + * @author kaggis + * + */ +public class ThresholdManager { + + private static final Logger LOG = Logger.getLogger(ThresholdManager.class.getName()); + + // Nested map that holds rule definitions: "groups/hosts/metrics" -> label -> + // threshold + // rules" + private Map> rules; + + // Reverse index checks for group, host, metrics + private HashSet metrics; + private HashSet hosts; + private HashSet groups; + private String aggregationOp = "AND"; + + public Map> getRules() { + return this.rules; + } + + /** + * Threshold class implements objects that hold threshold values as they are + * parsed by a threshold expression such as the following one: + * + * label=30s;0:50,50:100,0,100 + * + * A Threshold object can be directly constructed from a string including an + * expression as the above + * + * Each threshold object stores the threshold expression and the individual + * parsed items such as value, uom, warning range, critical range and + * min,max values + * + */ + class Threshold { + + private static final String defWarning = "WARNING"; + private static final String defCritical = "CRITICAL"; + + private String expression; + private String label; + private Float value; + private String uom; + private Range warning; + private Range critical; + private Float min; + private Float max; + + /** + * Constructs a threshold from a string containing a threshold + * expression + * + * @param expression A string containing a threshold exception as the + * following one: label=30s;0:50,50:100,0,100 + * + */ + public Threshold(String expression) { + Threshold temp = parseAndSet(expression); + this.expression = temp.expression; + this.label = temp.label; + this.value = temp.value; + this.uom = temp.uom; + this.warning = temp.warning; + this.critical = temp.critical; + this.min = temp.min; + this.max = temp.max; + + } + + /** + * Create a new threshold object by providing each parameter + * + * @param expression string containing the threshold expression + * @param label threshold label + * @param value threshold value + * @param uom unit of measurement - optional + * @param warning a range determining warning statuses + * @param critical a range determining critical statuses + * @param min minimum value available for this threshold + * @param max maximum value available for this threshold + */ + public Threshold(String expression, String label, float value, String uom, Range warning, Range critical, + float min, float max) { + + this.expression = expression; + this.label = label; + this.value = value; + this.uom = uom; + this.warning = warning; + this.critical = critical; + this.min = min; + this.max = max; + } + + public String getExpression() { + return expression; + } + + public String getLabel() { + return label; + } + + public float getValue() { + return value; + } + + public String getUom() { + return uom; + } + + public Range getWarning() { + return warning; + } + + public Range getCritical() { + return critical; + } + + public float getMin() { + return min; + } + + public float getMax() { + return max; + } + + /** + * Parses a threshold expression string and returns a Threshold object + * + * @param threshold string containing the threshold expression + * @return Threshold object + */ + public Threshold parseAndSet(String threshold) { + + String pThresh = threshold; + String curLabel = ""; + String curUom = ""; + Float curValue = Float.NaN; + Range curWarning = new Range(); // empty range + Range curCritical = new Range(); // emtpy range + Float curMin = Float.NaN; + Float curMax = Float.NaN; + // find label by splitting at = + String[] tokens = pThresh.split("="); + // Must have two tokens to continue, label=something + if (tokens.length == 2) { + curLabel = tokens[0]; + + // Split right value by ; to find the array of arguments + String[] subtokens = tokens[1].split(";"); + // Must have size > 0 at least a value + if (subtokens.length > 0) { + curUom = getUOM(subtokens[0]); + curValue = Float.parseFloat(subtokens[0].replaceAll(curUom, "")); + if (subtokens.length > 1) { + // iterate over rest of subtokens + for (int i = 1; i < subtokens.length; i++) { + if (i == 1) { + // parse warning range + curWarning = new Range(subtokens[i]); + continue; + } else if (i == 2) { + // parse critical + curCritical = new Range(subtokens[i]); + continue; + } else if (i == 3) { + // parse min + curMin = Float.parseFloat(subtokens[i]); + continue; + } else if (i == 4) { + // parse min + curMax = Float.parseFloat(subtokens[i]); + } + } + } + + } + + } + + return new Threshold(threshold, curLabel, curValue, curUom, curWarning, curCritical, curMin, curMax); + + } + + /** + * Reads a threshold string value and extracts the unit of measurement + * if present + * + * @param value String containing a representation of the value and uom + * @return String representing the uom. + */ + public String getUOM(String value) { + // check if ends with digit + if (Character.isDigit(value.charAt(value.length() - 1))) { + return ""; + } + + // check if ends with seconds + if (value.endsWith("s")) { + return "s"; + } + if (value.endsWith("us")) { + return "us"; + } + if (value.endsWith("ms")) { + return "ms"; + } + if (value.endsWith("%")) { + return "%"; + } + if (value.endsWith("B")) { + return "B"; + } + if (value.endsWith("KB")) { + return "KB"; + } + if (value.endsWith("MB")) { + return "MB"; + } + if (value.endsWith("TB")) { + return "TB"; + } + if (value.endsWith("c")) { + return "c"; + } + + // Not valid range + throw new RuntimeException("Invalid Unit of measurement: " + value); + + } + + /** + * Checks an external value against a threshold's warning,critical + * ranges. If a range contains the value (warning or critical) the + * corresponding status is returned as string "WARNING" or "CRITICAL". + * If the threshold doesn't provide the needed data to decide on status + * an "" is returned back. + * + * @return string with the status result "WARNING", "CRITICAL" + */ + public String calcStatusWithValue(Float value) { + + if (!Float.isFinite(this.value)) { + return ""; + } + if (!this.warning.isUndef()) { + if (this.warning.contains(value)) { + return defWarning; + } + } + if (!this.critical.isUndef()) { + if (this.critical.contains(value)) { + return defCritical; + } + } + + return ""; + } + + /** + * Checks a threshold's value against warning,critical ranges. If a + * range contains the value (warning or critical) the corresponding + * status is returned as string "WARNING" or "CRITICAL". If the + * threshold doesn't provide the needed data to decide on status an "" + * is returned back. + * + * @return string with the status result "WARNING", "CRITICAL" + */ + public String calcStatus() { + + if (!Float.isFinite(this.value)) { + return ""; + } + if (!this.warning.isUndef()) { + if (this.warning.contains(this.value)) { + return defWarning; + } + } + if (!this.critical.isUndef()) { + if (this.critical.contains(this.value)) { + return defCritical; + } + } + + return ""; + } + + public String toString() { + String strWarn = ""; + String strCrit = ""; + String strMin = ""; + String strMax = ""; + + if (this.warning != null) { + strWarn = this.warning.toString(); + } + if (this.critical != null) { + strCrit = this.critical.toString(); + } + if (this.min != null) { + strMin = this.min.toString(); + } + if (this.max != null) { + strMax = this.max.toString(); + } + + return "[expression=" + this.expression + ", label=" + this.label + ", value=" + this.value + ", uom=" + + this.uom + ", warning=" + strWarn + ", critical=" + strCrit + ", min=" + strMin + ", max=" + + strMax + ")"; + } + + } + + /** + * Range implements a simple object that holds a threshold's critical or + * warning range. It includes a floor,ceil as floats and an exclude flag + * when a range is supposed to be used for exclusion and not inclusion. The + * threshold spec uses an '@' character in front of a range to define + * inversion(exclusion) + * + * Inclusion assumes that floor < value < ceil and not floor <= value <= + * ceil + * + */ + class Range { + + Float floor; + Float ceil; + Boolean exclude; + + /** + * Creates an empty range. Invert is false and limits are NaN + */ + public Range() { + this.floor = Float.NaN; + this.ceil = Float.NaN; + this.exclude = false; + } + + /** + * Creates a range by parameters + * + * @param floor Float that defines the lower limit of the range + * @param ceil Float that defines the upper limit of the range + * @param exclude boolean that defines if the range is used for + * inclusion (true) or exlusion (false) + */ + public Range(Float floor, Float ceil, Boolean exclude) { + this.floor = floor; + this.ceil = ceil; + this.exclude = exclude; + } + + /** + * Creates a range by parsing a range expression string like the + * following one: '0:10' + * + * @param range string including a range expression + */ + public Range(String range) { + Range tmp = parseAndSet(range); + this.floor = tmp.floor; + this.ceil = tmp.ceil; + this.exclude = tmp.exclude; + } + + /** + * Checks if a Range is undefined (float,ceiling are NaN) + * + * @return boolean + */ + public boolean isUndef() { + return this.floor == Float.NaN || this.ceil == Float.NaN; + } + + /** + * Checks if a value is included in range (or truly excluded if range is + * an exclusion) + * + * @param value Float + * @return boolean + */ + public boolean contains(Float value) { + boolean result = value > this.floor && value < this.ceil; + if (this.exclude) { + return !result; + } + return result; + } + + /** + * Parses a range expression string and creates a Range object Range + * expressions can be in the following forms: + *
    + *
  • 10 - range starting from 0 to 10
  • + *
  • 10: - range starting from 10 to infinity
  • + *
  • ~:20 - range starting from negative inf. up to 20
  • + *
  • 20:30 - range between two numbers
  • + *
  • @20:30 - inverted range, excludes betweeen two numbers + *
+ * + * @param expression String containing a range expression + * @return + */ + public Range parseAndSet(String expression) { + String parsedRange = expression; + Float curFloor = 0F; + Float curCeil = 0F; + boolean curInv = false; + if (parsedRange.replaceAll(" ", "").equals("")) { + return new Range(); + } + // check if invert + if (parsedRange.startsWith("@")) { + curInv = true; + // after check remove @ from range string + parsedRange = parsedRange.replaceAll("^@", ""); + } + + // check if range string doesn't have separator : + if (!parsedRange.contains(":")) { + // then we are in the case of a single number like 10 + // which defines the rule 0 --> 10 so + curFloor = 0F; + curCeil = Float.parseFloat(parsedRange); + + return new Range(curFloor, curCeil, curInv); + } + + // check if range end with separator : + if (parsedRange.endsWith(":")) { + parsedRange = parsedRange.replaceAll(":$", ""); + // then we are in the case of a signle number like 10: + // which defines the rule 10 --> positive infinity + curFloor = Float.parseFloat(parsedRange); + curCeil = Float.POSITIVE_INFINITY; + return new Range(curFloor, curCeil, curInv); + } + + // tokenize string without prefixes + String[] tokens = parsedRange.split(":"); + if (tokens.length == 2) { + // check if token[0] is negative infinity ~ + if (tokens[0].equalsIgnoreCase("~")) { + curFloor = Float.NEGATIVE_INFINITY; + } else { + curFloor = Float.parseFloat(tokens[0]); + } + + curCeil = Float.parseFloat(tokens[1]); + return new Range(curFloor, curCeil, curInv); + } + + // Not valid range + throw new RuntimeException("Invalid threshold: " + expression); + + } + + public String toString() { + return "(floor=" + this.floor + ",ceil=" + this.ceil + ",invert=" + this.exclude.toString() + ")"; + } + + } + + /** + * Creates a Manager that parses rules files with thresholds and stores them + * internally as objects. A ThresholdManager can be used to automatically + * calculate statuses about a monitoring item (group,host,metric) based on + * the most relevant threshold rules stored in it. + */ + public ThresholdManager() { + + this.rules = new HashMap>(); + this.hosts = new HashSet(); + this.groups = new HashSet(); + this.metrics = new HashSet(); + + } + + /** + * Return the default operation when aggregating statuses generated from + * multiple threshold rules + * + * @return + */ + public String getAggregationOp() { + return this.aggregationOp; + } + + /** + * @param op string with the name of the operation to be used in the + * aggregation (AND,OR,custom one) + */ + public void setAggregationOp(String op) { + this.aggregationOp = op; + } + + /** + * Returns a status calculation for a specific rule key Each rule key is + * defined as follows: 'group/host/metric' and leads to a threshold rule. + * Group and host parts are optional as such: 'group//metric' or + * '/host/metric' or '//metric' + * + * @param rule string containing a rule key + * @param opsMgr an OpsManager Object to handle status aggregations + * @param opType an OpsManager operation to be used (like 'OR', 'AND') + * @return string with status result + */ + public String getStatusByRule(String rule, OperationsManager opsMgr, String opType) { + + if (!rules.containsKey(rule)) { + return ""; + } + String status = ""; + Map tholds = rules.get(rule); + for (Entry thold : tholds.entrySet()) { + // first step + if (status == "") { + status = thold.getValue().calcStatus(); + continue; + } + String statusNext = thold.getValue().calcStatus(); + if (statusNext != "") { + status = opsMgr.op(opType, status, statusNext); + } + } + return status; + } + + /** + * Returns a status calculation for a specific rule key Each rule key is + * defined as follows: 'group/host/metric' and leads to a threshold rule. + * Group and host parts are optional as such: 'group//metric' or + * '/host/metric' or '//metric' + * + * @param rule string containing a rule key + * @param opsMgr an OpsManager Object to handle status aggregations + * @param opType an OpsManager operation to be used (like 'OR', 'AND') + * @return string array with two elements. First element is the status + * result and second one the rule applied + */ + public String[] getStatusByRuleAndValues(String rule, OperationsManager opsMgr, String opType, Map values) { + + if (!rules.containsKey(rule)) { + return new String[]{"", ""}; + } + String status = ""; + String explain = ""; + Map tholds = rules.get(rule); + + for (Entry value : values.entrySet()) { + String label = value.getKey(); + if (tholds.containsKey(label)) { + Threshold th = tholds.get(label); + // first step + if (status == "") { + + status = th.calcStatusWithValue(value.getValue()); + explain = th.getExpression(); + continue; + } + + String statusNext = th.calcStatusWithValue(value.getValue()); + + if (statusNext != "") { + status = opsMgr.op(opType, status, statusNext); + explain = explain + " " + th.getExpression(); + } + } + } + + return new String[]{status, explain}; + + } + + /** + * Gets the most relevant rule based on a monitoring item + * (group,host,metric) using the following precedence (specific to least + * specific) (group, host, metric) #1 ( , host, metric) #2 (group, , metric) + * #3 ( , , metric) #4 + * + * @param group string with name of the monitored endpoint group + * @param host string with name of the monitored host + * @param metric string with name of the monitored metric + * @return a string with the relevant rule key + */ + public String getMostRelevantRule(String group, String host, String metric) { + if (!this.metrics.contains(metric)) { + return ""; // nothing found + } else { + + // order or precedence: more specific first + // group,host,metric #1 + // ,host,metric #2 + // group ,metric #3 + // ,metric #4 + if (this.hosts.contains(host)) { + if (this.groups.contains(group)) { + // check if combined entry indeed exists + String key = String.format("%s/%s/%s", group, host, metric); + if (this.rules.containsKey(key)) { + return key; // #1 + } + } else { + return String.format("/%s/%s", host, metric); // #2 + } + } + + if (this.groups.contains(group)) { + // check if combined entry indeed exists + String key = String.format("%s//%s", group, metric); // #3 + if (this.rules.containsKey(key)) { + return key; + } + } + + return String.format("//%s", metric); + } + + } + + /** + * Parses an expression that might contain multiple labels=thresholds + * separated by whitespace and creates a HashMap of labels to parsed + * threshold objects + * + * @param thresholds an expression that might contain multiple thresholds + * @return a HashMap to Threshold objects + */ + public Map parseThresholds(String thresholds) { + Map subMap = new HashMap(); + // Tokenize with lookahead on the point when a new label starts + String[] tokens = thresholds.split("(;|[ ]+)(?=[a-zA-Z])"); + for (String token : tokens) { + Threshold curTh = new Threshold(token); + if (curTh != null) { + subMap.put(curTh.getLabel(), curTh); + } + } + return subMap; + } + + /** + * Parses an expression that might contain multiple labels=thresholds + * separated by whitespace and creates a HashMap of labels to parsed Float + * values + * + * @param thresholds an expression that might contain multiple thresholds + * @return a HashMap to Floats + */ + public Map getThresholdValues(String thresholds) { + Map subMap = new HashMap(); + // tokenize thresholds by whitespace + String[] tokens = thresholds.split("(;|[ ]+)(?=[a-zA-Z])"); + for (String token : tokens) { + Threshold curTh = new Threshold(token); + if (curTh != null) { + subMap.put(curTh.getLabel(), curTh.getValue()); + } + } + return subMap; + } + + /** + * Parses a JSON threshold rule file and populates the ThresholdManager + * + * @param jsonFile File to be parsed + * @return boolean signaling whether operation succeeded or not + */ + public boolean parseJSONFile(File jsonFile) { + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + String jsonStr = IOUtils.toString(br); + if (!parseJSON(jsonStr)) { + return false; + } + + } catch (IOException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + return false; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + return false; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + return true; + + } + + /** + * Parses a json string with the appropriate threshold rule schema and + * populates the ThresholdManager + * + * @param jsonString string containing threshold rules in json format + * @return boolean signaling whether the parse information succeded or not + */ + public boolean parseJSON(String jsonString) { + + JsonParser json_parser = new JsonParser(); + JsonObject jRoot = json_parser.parse(jsonString).getAsJsonObject(); + JsonArray jRules = jRoot.getAsJsonArray("rules"); + for (JsonElement jRule : jRules) { + JsonObject jRuleObj = jRule.getAsJsonObject(); + String ruleMetric = jRuleObj.getAsJsonPrimitive("metric").getAsString(); + String ruleHost = ""; + String ruleEgroup = ""; + + if (jRuleObj.has("host")) { + ruleHost = jRuleObj.getAsJsonPrimitive("host").getAsString(); + } + if (jRuleObj.has("endpoint_group")) { + ruleEgroup = jRuleObj.getAsJsonPrimitive("endpoint_group").getAsString(); + } + + String ruleThr = jRuleObj.getAsJsonPrimitive("thresholds").getAsString(); + this.metrics.add(ruleMetric); + if (ruleHost != "") { + this.hosts.add(ruleHost); + } + if (ruleEgroup != "") { + this.groups.add(ruleEgroup); + } + String full = ruleEgroup + "/" + ruleHost + "/" + ruleMetric; + Map thrMap = parseThresholds(ruleThr); + this.rules.put(full, thrMap); + } + + return true; + } +} diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-algorithm.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-algorithm.json new file mode 100644 index 00000000..767113da --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-algorithm.json @@ -0,0 +1 @@ +{"status":{"message":"Success","code":"200"},"data":[{"id":"1b0318f0-429d-44fc-8bba-07184354c73b","name":"egi_ops","available_states":["OK","WARNING","UNKNOWN","MISSING","CRITICAL","DOWNTIME"],"defaults":{"down":"DOWNTIME","missing":"MISSING","unknown":"UNKNOWN"},"operations":[{"name":"AND","truth_table":[{"a":"OK","b":"OK","x":"OK"},{"a":"OK","b":"WARNING","x":"WARNING"},{"a":"OK","b":"UNKNOWN","x":"UNKNOWN"},{"a":"OK","b":"MISSING","x":"MISSING"},{"a":"OK","b":"CRITICAL","x":"CRITICAL"},{"a":"OK","b":"DOWNTIME","x":"DOWNTIME"},{"a":"WARNING","b":"WARNING","x":"WARNING"},{"a":"WARNING","b":"UNKNOWN","x":"UNKNOWN"},{"a":"WARNING","b":"MISSING","x":"MISSING"},{"a":"WARNING","b":"CRITICAL","x":"CRITICAL"},{"a":"WARNING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"UNKNOWN","b":"UNKNOWN","x":"UNKNOWN"},{"a":"UNKNOWN","b":"MISSING","x":"MISSING"},{"a":"UNKNOWN","b":"CRITICAL","x":"CRITICAL"},{"a":"UNKNOWN","b":"DOWNTIME","x":"DOWNTIME"},{"a":"MISSING","b":"MISSING","x":"MISSING"},{"a":"MISSING","b":"CRITICAL","x":"CRITICAL"},{"a":"MISSING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"CRITICAL","b":"CRITICAL","x":"CRITICAL"},{"a":"CRITICAL","b":"DOWNTIME","x":"CRITICAL"},{"a":"DOWNTIME","b":"DOWNTIME","x":"DOWNTIME"}]},{"name":"OR","truth_table":[{"a":"OK","b":"OK","x":"OK"},{"a":"OK","b":"WARNING","x":"OK"},{"a":"OK","b":"UNKNOWN","x":"OK"},{"a":"OK","b":"MISSING","x":"OK"},{"a":"OK","b":"CRITICAL","x":"OK"},{"a":"OK","b":"DOWNTIME","x":"OK"},{"a":"WARNING","b":"WARNING","x":"WARNING"},{"a":"WARNING","b":"UNKNOWN","x":"WARNING"},{"a":"WARNING","b":"MISSING","x":"WARNING"},{"a":"WARNING","b":"CRITICAL","x":"WARNING"},{"a":"WARNING","b":"DOWNTIME","x":"WARNING"},{"a":"UNKNOWN","b":"UNKNOWN","x":"UNKNOWN"},{"a":"UNKNOWN","b":"MISSING","x":"UNKNOWN"},{"a":"UNKNOWN","b":"CRITICAL","x":"CRITICAL"},{"a":"UNKNOWN","b":"DOWNTIME","x":"UNKNOWN"},{"a":"MISSING","b":"MISSING","x":"MISSING"},{"a":"MISSING","b":"CRITICAL","x":"CRITICAL"},{"a":"MISSING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"CRITICAL","b":"CRITICAL","x":"CRITICAL"},{"a":"CRITICAL","b":"DOWNTIME","x":"CRITICAL"},{"a":"DOWNTIME","b":"DOWNTIME","x":"DOWNTIME"}]}]}]} diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-rules.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-rules.json new file mode 100644 index 00000000..76fc5f5c --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/EGI-rules.json @@ -0,0 +1 @@ +{"rules":[{"metric":"org.bdii.Freshness","thresholds":"freshness=10s;30;50:60;0;100 entries=5;0:10;20:30;50;30"},{"metric":"org.bdii.Entries","thresholds":"time=-35s;~:10;15:;-100;300 entries=55;20;50:60;50;30"},{"metric":"org.bdii.Freshness","thresholds":"freshness=10s; entries=29;;30:50","host":"bdii.host3.example.foo"},{"metric":"org.bdii.Freshness","thresholds":"freshness=10s;30;50:60;0;100 entries=29;0:10;20:30;0;30","host":"bdii.host1.example.foo"},{"metric":"org.bdii.Freshness","thresholds":"freshness=10s;30;50:60;0;100 entries=5;0:10;20:30;50;30","host":"bdii.host1.example.foo","endpoint_group":"SITE-101"},{"metric":"org.bdii.Freshness","thresholds":"freshness=10s;30;50:60;0;100 entries=5;0:10;20:30;50;30","endpoint_group":"SITE-101"}]} diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/operations.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/operations.json new file mode 100644 index 00000000..8486d509 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/operations.json @@ -0,0 +1 @@ +{"status":{"message":"Success","code":"200"},"data":[{"id":"8ce59c4d-3761-4f25-a364-f019e394bf8b","date":"2015-01-01","name":"egi_ops","available_states":["OK","WARNING","UNKNOWN","MISSING","CRITICAL","DOWNTIME"],"defaults":{"down":"DOWNTIME","missing":"MISSING","unknown":"UNKNOWN"},"operations":[{"name":"AND","truth_table":[{"a":"OK","b":"OK","x":"OK"},{"a":"OK","b":"WARNING","x":"WARNING"},{"a":"OK","b":"UNKNOWN","x":"UNKNOWN"},{"a":"OK","b":"MISSING","x":"MISSING"},{"a":"OK","b":"CRITICAL","x":"CRITICAL"},{"a":"OK","b":"DOWNTIME","x":"DOWNTIME"},{"a":"WARNING","b":"WARNING","x":"WARNING"},{"a":"WARNING","b":"UNKNOWN","x":"UNKNOWN"},{"a":"WARNING","b":"MISSING","x":"MISSING"},{"a":"WARNING","b":"CRITICAL","x":"CRITICAL"},{"a":"WARNING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"UNKNOWN","b":"UNKNOWN","x":"UNKNOWN"},{"a":"UNKNOWN","b":"MISSING","x":"MISSING"},{"a":"UNKNOWN","b":"CRITICAL","x":"CRITICAL"},{"a":"UNKNOWN","b":"DOWNTIME","x":"DOWNTIME"},{"a":"MISSING","b":"MISSING","x":"MISSING"},{"a":"MISSING","b":"CRITICAL","x":"CRITICAL"},{"a":"MISSING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"CRITICAL","b":"CRITICAL","x":"CRITICAL"},{"a":"CRITICAL","b":"DOWNTIME","x":"CRITICAL"},{"a":"DOWNTIME","b":"DOWNTIME","x":"DOWNTIME"}]},{"name":"OR","truth_table":[{"a":"OK","b":"OK","x":"OK"},{"a":"OK","b":"WARNING","x":"OK"},{"a":"OK","b":"UNKNOWN","x":"OK"},{"a":"OK","b":"MISSING","x":"OK"},{"a":"OK","b":"CRITICAL","x":"OK"},{"a":"OK","b":"DOWNTIME","x":"OK"},{"a":"WARNING","b":"WARNING","x":"WARNING"},{"a":"WARNING","b":"UNKNOWN","x":"WARNING"},{"a":"WARNING","b":"MISSING","x":"WARNING"},{"a":"WARNING","b":"CRITICAL","x":"WARNING"},{"a":"WARNING","b":"DOWNTIME","x":"WARNING"},{"a":"UNKNOWN","b":"UNKNOWN","x":"UNKNOWN"},{"a":"UNKNOWN","b":"MISSING","x":"UNKNOWN"},{"a":"UNKNOWN","b":"CRITICAL","x":"CRITICAL"},{"a":"UNKNOWN","b":"DOWNTIME","x":"UNKNOWN"},{"a":"MISSING","b":"MISSING","x":"MISSING"},{"a":"MISSING","b":"CRITICAL","x":"CRITICAL"},{"a":"MISSING","b":"DOWNTIME","x":"DOWNTIME"},{"a":"CRITICAL","b":"CRITICAL","x":"CRITICAL"},{"a":"CRITICAL","b":"DOWNTIME","x":"CRITICAL"},{"a":"DOWNTIME","b":"DOWNTIME","x":"DOWNTIME"}]}]}]} \ No newline at end of file diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/truthtable.json b/flink_jobs/ProfilesManager/src/main/resources/profiles/truthtable.json new file mode 100644 index 00000000..705de7f4 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/main/resources/profiles/truthtable.json @@ -0,0 +1,587 @@ +{ "data":[{ + "operations": ["AND","OR"], + "available_states": [ + "OK", + "WARNING", + "UNKNOWN", + "MISSING", + "CRITICAL", + "DOWNTIME" + ], + + "inputs": [ + { + "name":"AND", + "truth_table":[ + { + "a": "OK", + "b": "OK", + "x": "OK" + }, + { + "a": "OK", + "b": "WARNING", + "x": "WARNING" + }, + { + "a": "OK", + "b": "UNKNOWN", + "x": "UNKNOWN" + }, + { + "a": "OK", + "b": "MISSING", + "x": "MISSING" + }, + { + "a": "OK", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "OK", + "b": "DOWNTIME", + "x": "DOWNTIME" + }, + { + "a": "WARNING", + "b": "WARNING", + "x": "WARNING" + }, + { + "a": "WARNING", + "b": "UNKNOWN", + "x": "UNKNOWN" + }, + { + "a": "WARNING", + "b": "MISSING", + "x": "MISSING" + }, + { + "a": "WARNING", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "WARNING", + "b": "DOWNTIME", + "x": "DOWNTIME" + }, + { + "a": "UNKNOWN", + "b": "UNKNOWN", + "x": "UNKNOWN" + }, + { + "a": "UNKNOWN", + "b": "MISSING", + "x": "MISSING" + }, + { + "a": "UNKNOWN", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "UNKNOWN", + "b": "DOWNTIME", + "x": "DOWNTIME" + }, + { + "a": "MISSING", + "b": "MISSING", + "x": "MISSING" + }, + { + "a": "MISSING", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "MISSING", + "b": "DOWNTIME", + "x": "DOWNTIME" + }, + { + "a": "CRITICAL", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "CRITICAL", + "b": "DOWNTIME", + "x": "CRITICAL" + }, + { + "a": "DOWNTIME", + "b": "DOWNTIME", + "x": "DOWNTIME" + } + ] + },{ + "name": "OR", + "truth_table": [ + { + "a": "OK", + "b": "OK", + "x": "OK" + }, + { + "a": "OK", + "b": "WARNING", + "x": "OK" + }, + { + "a": "OK", + "b": "UNKNOWN", + "x": "OK" + }, + { + "a": "OK", + "b": "MISSING", + "x": "OK" + }, + { + "a": "OK", + "b": "CRITICAL", + "x": "OK" + }, + { + "a": "OK", + "b": "DOWNTIME", + "x": "OK" + }, + { + "a": "WARNING", + "b": "WARNING", + "x": "WARNING" + }, + { + "a": "WARNING", + "b": "UNKNOWN", + "x": "WARNING" + }, + { + "a": "WARNING", + "b": "MISSING", + "x": "WARNING" + }, + { + "a": "WARNING", + "b": "CRITICAL", + "x": "WARNING" + }, + { + "a": "WARNING", + "b": "DOWNTIME", + "x": "WARNING" + }, + { + "a": "UNKNOWN", + "b": "UNKNOWN", + "x": "UNKNOWN" + }, + { + "a": "UNKNOWN", + "b": "MISSING", + "x": "UNKNOWN" + }, + { + "a": "UNKNOWN", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "UNKNOWN", + "b": "DOWNTIME", + "x": "UNKNOWN" + }, + { + "a": "MISSING", + "b": "MISSING", + "x": "MISSING" + }, + { + "a": "MISSING", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "MISSING", + "b": "DOWNTIME", + "x": "DOWNTIME" + }, + { + "a": "CRITICAL", + "b": "CRITICAL", + "x": "CRITICAL" + }, + { + "a": "CRITICAL", + "b": "DOWNTIME", + "x": "CRITICAL" + }, + { + "a": "DOWNTIME", + "b": "DOWNTIME", + "x": "DOWNTIME" + } + ] + } +], +"output":[ + {"op":"1", + "a":"0", + "b":"0", + "x":"0"}, + {"op":"1", + "a":"0", + "b":"2", + "x":"0"}, + {"op":"1", + "a":"0", + "b":"3", + "x":"0"}, + {"op":"1", + "a":"0", + "b":"4", + "x":"0"}, + {"op":"1", + "a":"0", + "b":"5", + "x":"0"}, + + {"op":"1", + "a":"1", + "b":"0", + "x":"0"}, + {"op":"1", + "a":"1", + "b":"1", + "x":"1"}, + + + {"op":"1", + "a":"1", + "b":"2", + "x":"1"}, + + {"op":"1", + "a":"1", + "b":"3", + "x":"1"}, + {"op":"1", + "a":"1", + "b":"4", + "x":"1"}, + {"op":"1", + "a":"1", + "b":"5", + "x":"1"}, + + {"op":"1", + "a":"2", + "b":"0", + "x":"0"}, + + {"op":"1", + "a":"2", + "b":"1", + "x":"1"}, + {"op":"1", + "a":"2", + "b":"2", + "x":"2"}, + + {"op":"1", + "a":"2", + "b":"3", + "x":"2"}, + {"op":"1", + "a":"2", + "b":"4", + "x":"4"}, + + {"op":"1", + "a":"2", + "b":"5", + "x":"2"}, + + {"op":"1", + "a":"3", + "b":"0", + "x":"0"}, + + {"op":"1", + "a":"3", + "b":"1", + "x":"1"}, + + {"op":"1", + "a":"3", + "b":"2", + "x":"2"}, + + {"op":"1", + "a":"3", + "b":"3", + "x":"3"}, + + {"op":"1", + "a":"3", + "b":"4", + "x":"4"}, + + {"op":"1", + "a":"3", + "b":"5", + "x":"5"}, + + + {"op":"1", + "a":"4", + "b":"0", + "x":"0"}, + + {"op":"1", + "a":"4", + "b":"1", + "x":"1"}, + + {"op":"1", + "a":"4", + "b":"2", + "x":"4"}, + + {"op":"1", + "a":"4", + "b":"3", + "x":"4"}, + + + {"op":"1", + "a":"4", + "b":"4", + "x":"4"}, + + {"op":"1", + "a":"4", + "b":"5", + "x":"4"}, + + + {"op":"1", + "a":"5", + "b":"0", + "x":"0"}, + + + {"op":"1", + "a":"5", + "b":"1", + "x":"1"}, + + {"op":"1", + "a":"5", + "b":"2", + "x":"2"}, + + {"op":"1", + "a":"5", + "b":"3", + "x":"5"}, + + {"op":"1", + "a":"5", + "b":"4", + "x":"4"}, + + {"op":"1", + "a":"5", + "b":"5", + "x":"5"}, + + + {"op":"0", + "a":"0", + "b":"0", + "x":"0"}, + + + {"op":"0", + "a":"0", + "b":"1", + "x":"1"}, + + {"op":"0", + "a":"0", + "b":"2", + "x":"2"}, + + {"op":"0", + "a":"0", + "b":"3", + "x":"3"}, + + {"op":"0", + "a":"0", + "b":"4", + "x":"4"}, + + {"op":"0", + "a":"0", + "b":"5", + "x":"5"}, + + {"op":"0", + "a":"1", + "b":"0", + "x":"1"}, + + {"op":"0", + "a":"1", + "b":"1", + "x":"1"}, + + {"op":"0", + "a":"1", + "b":"2", + "x":"2"}, + + {"op":"0", + "a":"1", + "b":"3", + "x":"3"}, + + {"op":"0", + "a":"1", + "b":"4", + "x":"4"}, + + {"op":"0", + "a":"1", + "b":"5", + "x":"5"}, + + + {"op":"0", + "a":"2", + "b":"0", + "x":"2"}, + + {"op":"0", + "a":"2", + "b":"1", + "x":"2"}, + + {"op":"0", + "a":"2", + "b":"2", + "x":"2"}, + + {"op":"0", + "a":"2", + "b":"3", + "x":"3"}, + + {"op":"0", + "a":"2", + "b":"4", + "x":"4"}, + + {"op":"0", + "a":"2", + "b":"5", + "x":"5"}, + + + {"op":"0", + "a":"3", + "b":"0", + "x":"3"}, + {"op":"0", + "a":"3", + "b":"1", + "x":"3"}, + {"op":"0", + "a":"3", + "b":"2", + "x":"3"}, + {"op":"0", + "a":"3", + "b":"3", + "x":"3"}, + + {"op":"0", + "a":"3", + "b":"4", + "x":"4"}, + {"op":"0", + "a":"3", + "b":"5", + "x":"5"}, + {"op":"0", + "a":"4", + "b":"0", + "x":"4"}, + + {"op":"0", + "a":"4", + "b":"1", + "x":"4"}, + + {"op":"0", + "a":"4", + "b":"2", + "x":"4"}, + + {"op":"0", + "a":"4", + "b":"3", + "x":"4"}, + + {"op":"0", + "a":"4", + "b":"4", + "x":"4"}, + + {"op":"0", + "a":"4", + "b":"5", + "x":"4"}, + + {"op":"0", + "a":"5", + "b":"0", + "x":"5"}, + + {"op":"0", + "a":"5", + "b":"1", + "x":"5"}, + + {"op":"0", + "a":"5", + "b":"2", + "x":"5"}, + + {"op":"0", + "a":"5", + "b":"3", + "x":"5"}, + + {"op":"0", + "a":"5", + "b":"4", + "x":"4"}, + + {"op":"0", + "a":"5", + "b":"5", + "x":"5"} + ] + +} +] +} \ No newline at end of file diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/OperationsManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/OperationsManagerTest.java new file mode 100644 index 00000000..e77cd81c --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/OperationsManagerTest.java @@ -0,0 +1,672 @@ +/*s + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; +import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertNotNull; +import org.junit.After; +import org.junit.AfterClass; +import static org.junit.Assert.assertArrayEquals; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/* + * A unit test class to test OperationsManager + */ +public class OperationsManagerTest { + + public OperationsManagerTest() { + } + + @BeforeClass + public static void setUpClass() { + assertNotNull("Test file missing", OperationsManagerTest.class.getResource("/profiles/operations.json")); + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + /** + * Test of loadOperationProfile method, of class OperationsParser. + */ + /** + * Test of getDefaultDown method, of class OperationsManager. + */ + @Test + public void testGetDefaultDown() throws IOException { + System.out.println("getDefaultDown"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + String expResult = "DOWNTIME"; + String result = instance.getDefaultDown(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultUnknown method, of class OperationsManager. + */ + @Test + public void testGetDefaultUnknown() throws IOException { + System.out.println("getDefaultUnknown"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "UNKNOWN"; + String result = instance.getDefaultUnknown(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultUnknownInt method, of class OperationsManager. + */ + @Test + public void testGetDefaultUnknownInt() throws IOException { + System.out.println("getDefaultUnknownInt"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 2; + int result = instance.getDefaultUnknownInt(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultDownInt method, of class OperationsManager. + */ + @Test + public void testGetDefaultDownInt() throws IOException { + System.out.println("getDefaultDownInt"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 5; + int result = instance.getDefaultDownInt(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultMissing method, of class OperationsManager. + */ + @Test + public void testGetDefaultMissing() throws IOException { + System.out.println("getDefaultMissing"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "MISSING"; + String result = instance.getDefaultMissing(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultMissingInt method, of class OperationsManager. + */ + @Test + public void testGetDefaultMissingInt() throws IOException { + System.out.println("getDefaultMissingInt"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 3; + int result = instance.getDefaultMissingInt(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); + } + + /** + * Test of clear method, of class OperationsManager. + */ + @Test + public void testClear() throws IOException { + System.out.println("clear"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + instance.clear(); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of opInt method, of class OperationsManager. + */ + @Test + public void testOpInt_3args_1() throws IOException { + System.out.println("opInt"); + int op = 0; + int a = 0; + int b = 0; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 0; + int result = instance.opInt(op, a, b); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of opInt method, of class OperationsManager. + */ + @Test + public void testOpInt_3args_2() throws IOException { + System.out.println("opInt"); + String op = "AND"; + String a = "OK"; + String b = "OK"; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 0; + int result = instance.opInt(op, a, b); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of op method, of class OperationsManager. + */ + @Test + public void testOp_3args_1() throws IOException { + System.out.println("op"); + int op = 0; + int a = 0; + int b = 0; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "OK"; + String result = instance.op(op, a, b); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of op method, of class OperationsManager. + */ + @Test + public void testOp_3args_2() throws IOException { + System.out.println("op"); + String op = "AND"; + String a = "OK"; + String b = "OK"; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "OK"; + String result = instance.op(op, a, b); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getStrStatus method, of class OperationsManager. + */ + @Test + public void testGetStrStatus() throws IOException { + System.out.println("getStrStatus"); + int status = 0; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "OK"; + String result = instance.getStrStatus(status); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getIntStatus method, of class OperationsManager. + */ + @Test + public void testGetIntStatus() throws IOException { + System.out.println("getIntStatus"); + String status = "WARNING"; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 1; + int result = instance.getIntStatus(status); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getStrOperation method, of class OperationsManager. + */ + @Test + public void testGetStrOperation() throws IOException { + System.out.println("getStrOperation"); + int op = 1; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "OR"; + String result = instance.getStrOperation(op); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getIntOperation method, of class OperationsManager. + */ + @Test + public void testGetIntOperation() throws IOException { + System.out.println("getIntOperation"); + String op = "OR"; + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + int expResult = 1; + int result = instance.getIntOperation(op); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of availableStates method, of class OperationsManager. + */ + @Test + public void testAvailableStates() throws IOException { + System.out.println("availableStates"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + ArrayList expResult = new ArrayList(); + expResult.add("OK"); + expResult.add("WARNING"); + expResult.add("UNKNOWN"); + expResult.add("MISSING"); + expResult.add("CRITICAL"); + expResult.add("DOWNTIME"); + + ArrayList result = instance.availableStates(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of availableOps method, of class OperationsManager. + */ + @Test + public void testAvailableOps() throws IOException { + System.out.println("availableOps"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + ArrayList expResult = new ArrayList<>(); + expResult.add("AND"); + expResult.add("OR"); + ArrayList result = instance.availableOps(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of loadJson method, of class OperationsManager. + */ + @Test + public void testLoadJson() throws Exception { + System.out.println("loadJson"); + File jsonFile = new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile()); + OperationsManager instance = new OperationsManager(); + + instance.loadJson(jsonFile); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of loadJsonString method, of class OperationsManager. + */ +// @Test +// public void testLoadJsonString() { +// System.out.println("loadJsonString"); +// List opsJson = null; +// OperationsManager instance = new OperationsManager(); +// instance.loadJson(new File(OperationsManagerTest.class.getResource("/operations/operations.json").getFile())); +// +// instance.loadJsonString(opsJson); +// // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); +// } + /** + * Test of getTruthTable method, of class OperationsManager. + */ + @Test + public void testGetTruthTable() throws IOException, FileNotFoundException, ParseException { + System.out.println("getTruthTable"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + Utils utils=new Utils(); + + int[][][] expResult = utils.readTruthTable(); + int[][][] result = instance.getTruthTable(); + assertArrayEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setTruthTable method, of class OperationsManager. + */ + @Test + public void testSetTruthTable() throws IOException, FileNotFoundException, ParseException { + System.out.println("setTruthTable"); + Utils utils=new Utils(); + + + int[][][] truthTable = utils.readTruthTable(); + OperationsManager instance = new OperationsManager(); + instance.setTruthTable(truthTable); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getStates method, of class OperationsManager. + */ + @Test + public void testGetStates() throws IOException { + System.out.println("getStates"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + HashMap expResult = new HashMap<>(); + expResult.put("OK", 0); + expResult.put("WARNING", 1); + expResult.put("UNKNOWN", 2); + expResult.put("MISSING", 3); + expResult.put("CRITICAL", 4); + expResult.put("DOWNTIME", 5); + HashMap result = instance.getStates(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of setStates method, of class OperationsManager. + */ + @Test + public void testSetStates() { + System.out.println("setStates"); + HashMap states = new HashMap<>(); + states.put("OK", 0); + states.put("WARNING", 1); + states.put("UNKNOWN", 2); + states.put("MISSING", 3); + states.put("CRITICAL", 4); + states.put("DOWNTIME", 5); + OperationsManager instance = new OperationsManager(); + instance.setStates(states); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getOps method, of class OperationsManager. + */ + @Test + public void testGetOps() throws IOException { + System.out.println("getOps"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + HashMap expResult = new HashMap<>(); + expResult.put("AND", 0); + expResult.put("OR", 1); + HashMap result = instance.getOps(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of setOps method, of class OperationsManager. + */ + @Test + public void testSetOps() { + System.out.println("setOps"); + HashMap ops = new HashMap<>(); + ops.put("AND", 0); + ops.put("OR", 1); + + OperationsManager instance = new OperationsManager(); + instance.setOps(ops); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getRevStates method, of class OperationsManager. + */ + @Test + public void testGetRevStates() throws IOException { + System.out.println("getRevStates"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + ArrayList expResult = new ArrayList<>(); + expResult.add("OK"); + expResult.add("WARNING"); + expResult.add("UNKNOWN"); + expResult.add("MISSING"); + expResult.add("CRITICAL"); + expResult.add("DOWNTIME"); + ArrayList result = instance.getRevStates(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setRevStates method, of class OperationsManager. + */ + @Test + public void testSetRevStates() { + System.out.println("setRevStates"); + ArrayList revStates = new ArrayList<>(); + revStates.add("OK"); + revStates.add("WARNING"); + revStates.add("UNKNWON"); + revStates.add("MISSING"); + revStates.add("CRITICAL"); + revStates.add("DOWNTIME"); + + OperationsManager instance = new OperationsManager(); + instance.setRevStates(revStates); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getRevOps method, of class OperationsManager. + */ + @Test + public void testGetRevOps() throws IOException { + System.out.println("getRevOps"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + ArrayList expResult = new ArrayList<>(); + expResult.add("AND"); + expResult.add("OR"); + ArrayList result = instance.getRevOps(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setRevOps method, of class OperationsManager. + */ + @Test + public void testSetRevOps() { + System.out.println("setRevOps"); + ArrayList revOps = new ArrayList<>(); + revOps.add("AND"); + revOps.add("OR"); + OperationsManager instance = new OperationsManager(); + instance.setRevOps(revOps); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getDefaultDownState method, of class OperationsManager. + */ + @Test + public void testGetDefaultDownState() throws IOException { + System.out.println("getDefaultDownState"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "DOWNTIME"; + String result = instance.getDefaultDownState(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setDefaultDownState method, of class OperationsManager. + */ + @Test + public void testSetDefaultDownState() { + System.out.println("setDefaultDownState"); + String defaultDownState = "DOWNTIME"; + OperationsManager instance = new OperationsManager(); + instance.setDefaultDownState(defaultDownState); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of getDefaultMissingState method, of class OperationsManager. + */ + @Test + public void testGetDefaultMissingState() throws IOException { + System.out.println("getDefaultMissingState"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + String expResult = "MISSING"; + String result = instance.getDefaultMissingState(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setDefaultMissingState method, of class OperationsManager. + */ + @Test + public void testSetDefaultMissingState() { + System.out.println("setDefaultMissingState"); + String defaultMissingState = "MISSING"; + OperationsManager instance = new OperationsManager(); + instance.setDefaultMissingState(defaultMissingState); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of getDefaultUnknownState method, of class OperationsManager. + */ + @Test + public void testGetDefaultUnknownState() throws IOException { + System.out.println("getDefaultUnknownState"); + + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + String expResult = "UNKNOWN"; + String result = instance.getDefaultUnknownState(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + + /** + * Test of setDefaultUnknownState method, of class OperationsManager. + */ + @Test + public void testSetDefaultUnknownState() { + System.out.println("setDefaultUnknownState"); + String defaultUnknownState = "UNKNOWN"; + OperationsManager instance = new OperationsManager(); + instance.setDefaultUnknownState(defaultUnknownState); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + + /** + * Test of isOrder method, of class OperationsManager. + */ + @Test + public void testIsOrder() throws IOException { + System.out.println("isOrder"); + OperationsManager instance = new OperationsManager(); + instance.loadJson(new File(OperationsManagerTest.class.getResource("/profiles/operations.json").getFile())); + + boolean expResult = false; + boolean result = instance.isOrder(); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. +// fail("The test case is a prototype."); + } + + /** + * Test of setOrder method, of class OperationsManager. + */ + @Test + public void testSetOrder() { + System.out.println("setOrder"); + boolean order = false; + OperationsManager instance = new OperationsManager(); + instance.setOrder(order); + // TODO review the generated test code and remove the default call to fail. + // fail("The test case is a prototype."); + } + +} diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ThresholdManagerTest.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ThresholdManagerTest.java new file mode 100644 index 00000000..b8b98e89 --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/ThresholdManagerTest.java @@ -0,0 +1,91 @@ +package profilesmanager; + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; + +import org.junit.BeforeClass; +import org.junit.Test; + +public class ThresholdManagerTest { + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + // Assert that files are present + assertNotNull("Test file missing", ThresholdManagerTest.class.getResource("/profiles/EGI-algorithm.json")); + assertNotNull("Test file missing", ThresholdManagerTest.class.getResource("/profiles/EGI-rules.json")); + } + + @Test + public void test() throws IOException, URISyntaxException { + + // Prepare Resource File + URL opsJsonFile = ThresholdManagerTest.class.getResource("/profiles/EGI-algorithm.json"); + File opsFile = new File(opsJsonFile.toURI()); + // Instantiate class + OperationsManager opsMgr = new OperationsManager(); + // Test loading file + opsMgr.loadJson(opsFile); + + // Prepare Resource File + URL thrJsonFile = ThresholdManagerTest.class.getResource("/profiles/EGI-rules.json"); + File thrFile = new File(thrJsonFile.toURI()); + // Instantiate class + ThresholdManager t = new ThresholdManager(); + t.parseJSONFile(thrFile); + + String[] expectedRules = new String[] { "//org.bdii.Freshness", "//org.bdii.Entries", + "/bdii.host1.example.foo/org.bdii.Freshness", "/bdii.host3.example.foo/org.bdii.Freshness", + "SITE-101/bdii.host1.example.foo/org.bdii.Freshness", "SITE-101//org.bdii.Freshness" }; + + assertEquals(expectedRules.length, t.getRules().entrySet().size()); + + for (String rule : expectedRules) { + assertEquals(true, t.getRules().keySet().contains(rule)); + } + + assertEquals("SITE-101/bdii.host1.example.foo/org.bdii.Freshness", + t.getMostRelevantRule("SITE-101", "bdii.host1.example.foo", "org.bdii.Freshness")); + + assertEquals("SITE-101//org.bdii.Freshness", + t.getMostRelevantRule("SITE-101", "bdii.host2.example.foo", "org.bdii.Freshness")); + + assertEquals("//org.bdii.Freshness", + t.getMostRelevantRule("SITE-202", "bdii.host2.example.foo", "org.bdii.Freshness")); + + assertEquals("//org.bdii.Freshness", + t.getMostRelevantRule("SITE-202", "bdii.host2.example.foo", "org.bdii.Freshness")); + + assertEquals("//org.bdii.Entries", + t.getMostRelevantRule("SITE-101", "bdii.host1.example.foo", "org.bdii.Entries")); + + assertEquals("", t.getMostRelevantRule("SITE-101", "bdii.host1.example.foo", "org.bdii.Foo")); + + assertEquals("WARNING", t.getStatusByRule("SITE-101/bdii.host1.example.foo/org.bdii.Freshness", opsMgr, "AND")); + assertEquals("CRITICAL", t.getStatusByRule("//org.bdii.Entries", opsMgr, "AND")); + assertEquals("WARNING", t.getStatusByRule("//org.bdii.Entries", opsMgr, "OR")); + assertEquals("CRITICAL", t.getStatusByRule("/bdii.host1.example.foo/org.bdii.Freshness", opsMgr, "AND")); + assertEquals("WARNING", t.getStatusByRule("/bdii.host1.example.foo/org.bdii.Freshness", opsMgr, "OR")); + assertEquals("",t.getStatusByRule("/bdii.host3.example.foo/org.bdii.Freshness", opsMgr, "AND")); //no critical or warning ranges defined + + // Test parsing of label=value lists including space separation or not + assertEquals("{size=6754.0, time=3.714648}",t.getThresholdValues("time=3.714648s;;;0.000000 size=6754B;;;0").toString()); + assertEquals("{time=0.037908}",t.getThresholdValues("time=0.037908s;;;0.000000;120.000000").toString()); + assertEquals("{time=0.041992}",t.getThresholdValues("time=0.041992s;;;0.000000;120.000000").toString()); + assertEquals("{entries=1.0, time=0.15}",t.getThresholdValues("time=0.15s;entries=1").toString()); + assertEquals("{entries=1.0, freshness=111.0}",t.getThresholdValues("freshness=111s;entries=1").toString()); + assertEquals("{entries=1.0, freshness=111.0}",t.getThresholdValues("freshness=111s; entries=1").toString()); + assertEquals("{entries=1.0, freshness=111.0}",t.getThresholdValues("freshness=111s;;;entries=1").toString()); + assertEquals("{entries=1.0, freshness=111.0}",t.getThresholdValues("freshness=111s;; entries=1").toString()); + assertEquals("{TSSInstances=1.0}",t.getThresholdValues("TSSInstances=1").toString()); + + String thBig = "tls_ciphers=105.47s dir_head=0.69s dir_get=0.89s file_put=0.82s file_get=0.45s file_options=0.39s file_move=0.42s file_head=0.40s file_head_on_non_existent=0.38s file_propfind=0.40s file_delete=0.72s file_delete_on_non_existent=0.37s"; + String expThBig = "{file_head_on_non_existent=0.38, file_put=0.82, file_delete_on_non_existent=0.37, file_delete=0.72, dir_head=0.69, file_head=0.4, file_propfind=0.4, dir_get=0.89, file_move=0.42, file_options=0.39, file_get=0.45, tls_ciphers=105.47}"; + + assertEquals(expThBig,t.getThresholdValues(thBig).toString()); + } + +} diff --git a/flink_jobs/ProfilesManager/src/test/java/profilesmanager/Utils.java b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/Utils.java new file mode 100644 index 00000000..e097977b --- /dev/null +++ b/flink_jobs/ProfilesManager/src/test/java/profilesmanager/Utils.java @@ -0,0 +1,123 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package profilesmanager; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; + +/** + * A utils class to process resource files for tests and provide the information + */ +public class Utils { + + public int[][][] readTruthTable() throws IOException, FileNotFoundException, ParseException, java.text.ParseException { + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(Utils.class.getResource("/profiles/truthtable.json").getFile())); + + JsonParser json_parser = new JsonParser(); + JsonElement j_element = json_parser.parse(br); + JsonObject jRoot = j_element.getAsJsonObject(); + JsonArray jData = jRoot.get("data").getAsJsonArray(); + JsonElement jItem = jData.get(0); + int[][][] truthTable = readJson(jItem); + return truthTable; + } catch (FileNotFoundException ex) { + + throw ex; + + } + + } + + private int[][][] readJson(JsonElement j_element) { + HashMap operations = new HashMap(); + ArrayList revOps = new ArrayList(); + HashMap states = new HashMap(); + ArrayList revStates = new ArrayList(); + + JsonObject j_obj = j_element.getAsJsonObject(); + JsonArray j_ops = j_obj.getAsJsonArray("operations"); + int i = 0; + for (JsonElement item : j_ops) { + String jObjItem = item.getAsString(); + operations.put(jObjItem, i); + revOps.add(jObjItem); + i++; + } + JsonArray j_states = j_obj.getAsJsonArray("available_states"); + i = 0; + for (JsonElement item : j_states) { + String jObjItem = item.getAsString(); + states.put(jObjItem, i); + revStates.add(jObjItem); + i++; + } + + int num_ops = revOps.size(); + int num_states = revStates.size(); + int[][][] truthTable = new int[num_ops][num_states][num_states]; + + for (int[][] surface : truthTable) { + for (int[] line : surface) { + Arrays.fill(line, -1); + } + } + JsonArray input = j_obj.getAsJsonArray("inputs"); + + // Fill the truth table + for (JsonElement item : input) { + JsonObject jObjItem = item.getAsJsonObject(); + String opname = jObjItem.getAsJsonPrimitive("name").getAsString(); + JsonArray tops = jObjItem.getAsJsonArray("truth_table"); + // System.out.println(tops); + + for (int j = 0; j < tops.size(); j++) { + // System.out.println(opname); + JsonObject row = tops.get(j).getAsJsonObject(); + + int a_val = revStates.indexOf(row.getAsJsonPrimitive("a").getAsString()); + int b_val = revStates.indexOf(row.getAsJsonPrimitive("b").getAsString()); + int x_val = revStates.indexOf(row.getAsJsonPrimitive("x").getAsString()); + int op_val = revOps.indexOf(opname); + + // Fill in truth table + // Check if order sensitivity is off so to insert two truth + // values + // ...[a][b] and [b][a] + truthTable[op_val][a_val][b_val] = x_val; + truthTable[op_val][b_val][a_val] = x_val; + + } + } + + int[][][] outputTruthTable = new int[num_ops][num_states][num_states]; + JsonArray output = j_obj.getAsJsonArray("output"); + + // Fill the truth table + for (JsonElement item : output) { + JsonObject jObjItem = item.getAsJsonObject(); + int op = jObjItem.getAsJsonPrimitive("op").getAsInt(); + int a = jObjItem.getAsJsonPrimitive("a").getAsInt(); + int b = jObjItem.getAsJsonPrimitive("b").getAsInt(); + int x = jObjItem.getAsJsonPrimitive("x").getAsInt(); + outputTruthTable[op][a][b] = x; + + } + return outputTruthTable; + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java index e8333c41..405d0c53 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java @@ -32,7 +32,6 @@ import org.joda.time.DateTime; /** - * Implements an ARGO Status Trends Job in flink , to count the number of status * changes that occur to all levels of the topology hierarchy * diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java index 9a50fe4c..1d31d459 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchGroupFlipFlopTrends.java @@ -30,7 +30,6 @@ * Implements an ARGO Status Trends Job in flink , to count the number of status * changes that occur to the level of group of the topology hierarchy * - * Submit job in flink cluster using the following parameters * --date:the date * for which the job runs and need to return results , yyyy-MM-dd * --yesterdayData: path to the metric profile data, of the previous day , for diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java index ab7d007b..77ac39ff 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchMetricFlipFlopTrends.java @@ -105,7 +105,8 @@ private static void calcFlipFlops() { DataSet filteredYesterdayData = yesterdayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())).groupBy("hostname", "service", "metric").reduceGroup(new CalcLastTimeStatus()); DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); - DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + + DataSet metricData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); if (rankNum != null) { metricData = metricData.sortPartition("flipflops", Order.DESCENDING).first(rankNum); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java index ede09526..e86934bb 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java @@ -17,7 +17,6 @@ public class ServiceFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); - private AggregationProfileManager aggregationProfileParser; public ServiceFilter(AggregationProfileManager aggregationProfileParser) { @@ -28,9 +27,7 @@ public ServiceFilter(AggregationProfileManager aggregationProfileParser) { //if the status field value in Tuple equals the given status returns true, else returns false @Override public boolean filter(EndpointTrends t) throws Exception { - return aggregationProfileParser.checkServiceExistance(t.getService()); - } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java index d5cd5fac..da2e8860 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java @@ -33,6 +33,7 @@ public class CalcServiceFlipFlop implements GroupReduceFunction< EndpointTrends, public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { this.operationsParser = operationsParser; this.serviceFunctionsMap = aggregationProfileParser.retrieveServiceOperations(); + } @Override @@ -48,7 +49,6 @@ public void reduce(Iterable in, Collector< ServiceTrends> out) t service = endpointTrend.getService(); timelineList.put(endpointTrend.getEndpoint(), endpointTrend.getTimeline()); } - String operation = serviceFunctionsMap.get(service); TimelineAggregator timelineAggregator = new TimelineAggregator(timelineList); timelineAggregator.aggregate(operationsParser.getTruthTable(), operationsParser.getIntOperation(operation)); diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ConfigManager.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ConfigManager.java new file mode 100644 index 00000000..9bde37f9 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ConfigManager.java @@ -0,0 +1,336 @@ +package argo.profiles; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.List; +import java.util.TreeMap; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; +import java.util.ArrayList; +import java.util.Iterator; + +/** + * + * + * ConfigManager class implements objects that store the information parsed from + * a json object containing the profile data of a tenant's report or loaded from + * a json file + */ + +public class ConfigManager { + + private static final Logger LOG = Logger.getLogger(ConfigManager.class.getName()); + + public String id; // report uuid reference + public String report; + public String tenant; + public String egroup; // endpoint group + public String ggroup; // group of groups + public String weight; // weight factor type + public TreeMap egroupTags; + public TreeMap ggroupTags; + public TreeMap mdataTags; + private Threshold threshold; + ArrayList profiles; + + /** + * Constructor to a ConfigManager object to store the tenant's report + * information + */ + public ConfigManager() { + this.report = null; + this.id = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.egroupTags = new TreeMap(); + this.ggroupTags = new TreeMap(); + this.mdataTags = new TreeMap(); + this.profiles = new ArrayList<>(); + + } + + /** + * Clears the stored data of a ConfigManager object + */ + public void clear() { + this.id = null; + this.report = null; + this.tenant = null; + this.egroup = null; + this.ggroup = null; + this.weight = null; + this.threshold = null; + this.egroupTags.clear(); + this.ggroupTags.clear(); + this.mdataTags.clear(); + this.profiles.clear(); + + } + + public String getReportID() { + return id; + } + + public String getReport() { + return report; + } + + public String getTenant() { + return tenant; + } + + public String getEgroup() { + return egroup; + } + + /** + * loads from a json file that contain the tenant's report , and stores the + * info to the corresponding fields + * + * @param jsonFile + * @throws IOException + */ + public void loadJson(File jsonFile) throws IOException { + // Clear data + this.clear(); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(jsonFile)); + + JsonParser jsonParser = new JsonParser(); + JsonElement jElement = jsonParser.parse(br); + readJson(jElement); + } catch (FileNotFoundException ex) { + LOG.error("Could not open file:" + jsonFile.getName()); + throw ex; + + } catch (JsonParseException ex) { + LOG.error("File is not valid json:" + jsonFile.getName()); + throw ex; + } finally { + // Close quietly without exceptions the buffered reader + IOUtils.closeQuietly(br); + } + + } + + /** + * Loads Report config information from a config json string + * + */ + public void loadJsonString(List confJson) throws JsonParseException { + // Clear data + this.clear(); + + try { + + JsonParser jsonParser = new JsonParser(); + // Grab the first - and only line of json from ops data + JsonElement jElement = jsonParser.parse(confJson.get(0)); + readJson(jElement); + + } catch (JsonParseException ex) { + LOG.error("Not valid json contents"); + throw ex; + } + + } + + /** + * reads from a JsonElement array and stores the necessary information to + * the ConfigManager objects and add them to the list + * + * @param jElement , a JsonElement containing the tenant's report data + * @return + */ + public void readJson(JsonElement jElement) { + + JsonObject jObj = jElement.getAsJsonObject(); + // Get the simple fields + this.id = jObj.get("id").getAsString(); + this.tenant = jObj.get("tenant").getAsString(); + this.report = jObj.get("info").getAsJsonObject().get("name").getAsString(); + + // get topology schema names + JsonObject topoGroup = jObj.get("topology_schema").getAsJsonObject().getAsJsonObject("group"); + this.ggroup = topoGroup.get("type").getAsString(); + this.egroup = topoGroup.get("group").getAsJsonObject().get("type").getAsString(); + + // optional weight filtering + this.weight = ""; + if (jObj.has("weight")) { + this.weight = jObj.get("weight").getAsString(); + } + // Get compound fields + JsonArray jTags = jObj.getAsJsonArray("filter_tags"); + + // Iterate tags + if (jTags != null) { + for (JsonElement tag : jTags) { + JsonObject jTag = tag.getAsJsonObject(); + String name = jTag.get("name").getAsString(); + String value = jTag.get("value").getAsString(); + String ctx = jTag.get("context").getAsString(); + if (ctx.equalsIgnoreCase("group_of_groups")) { + this.ggroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("endpoint_groups")) { + this.egroupTags.put(name, value); + } else if (ctx.equalsIgnoreCase("metric_data")) { + this.mdataTags.put(name, value); + } + + } + } + + JsonObject thresholdsObject = jObj.get("thresholds").getAsJsonObject(); + + this.threshold = new Threshold(thresholdsObject.get("availability").getAsLong(), thresholdsObject.get("reliability").getAsLong(), thresholdsObject.get("uptime").getAsDouble(), + thresholdsObject.get("unknown").getAsDouble(), thresholdsObject.get("downtime").getAsDouble()); + + JsonArray profilesArray = jObj.get("profiles").getAsJsonArray(); + + Iterator profileIter = profilesArray.iterator(); + while (profileIter.hasNext()) { + JsonObject profileObject = profileIter.next().getAsJsonObject(); + Profiles profile = new Profiles(profileObject.get("id").getAsString(), profileObject.get("name").getAsString(), profileObject.get("type").getAsString()); + profiles.add(profile); + } + + } + + /** + * Threshold class is an inner class that stores the info of the thresholds as described from the tenant's report + */ + public class Threshold { + + private Long availability; + private Long reliability; + private Double uptime; + private Double unknown; + private Double downtime; + + public Threshold(Long availability, Long reliability, Double uptime, Double unknown, Double downtime) { + this.availability = availability; + this.reliability = reliability; + this.uptime = uptime; + this.unknown = unknown; + this.downtime = downtime; + } + + public Long getAvailability() { + return availability; + } + + public Long getReliability() { + return reliability; + } + + public Double getUptime() { + return uptime; + } + + public Double getUnknown() { + return unknown; + } + + public Double getDowntime() { + return downtime; + } + + } +/** + * Profiles class is a class that stores the info of the profiles as described from the tenant's report + */ + private class Profiles { + + private String id; + private String name; + private String type; + + public Profiles(String id, String name, String type) { + this.id = id; + this.name = name; + this.type = type; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + } +/** + * Returns the aggregation's report id , as described in the profiles of tenant's report + * @return + */ + public String getAggregationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.AGGREGATION.name())) { + return profile.id; + } + } + } + return null; + } + + + /** + * Returns the metric's report id , as described in the profiles of tenant's report + * @return + */ + public String getMetricReportId() { + + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.METRIC.name())) { + return profile.id; + } + } + } + return null; + } + + /** + * Returns the operation's report id , as described in the profiles of tenant's report + * @return + */ + public String getOperationReportId() { + if (profiles != null) { + for (Profiles profile : profiles) { + if (profile.getType().equalsIgnoreCase(ProfileType.OPERATIONS.name())) { + return profile.id; + } + } + } + return null; + } + + public enum ProfileType { + + METRIC, + AGGREGATION, + OPERATIONS + + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java index ccf538c4..faa3f63f 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java @@ -19,8 +19,10 @@ public class ProfilesLoader { + private ReportManager reportParser; + private EndpointGroupManager topologyEndpointParser; private MetricProfileManager metricProfileParser; @@ -48,7 +50,6 @@ public ProfilesLoader(ParameterTool params) throws IOException, ParseException { metricId = reportParser.getMetricReportId(); operationsId = reportParser.getOperationReportId(); - JsonElement opProfileJson = RequestManager.operationsProfileRequest(params.getRequired("apiUri"), operationsId, params.getRequired("key"), params.get("proxy"), params.get("date")); operationParser = new OperationsParser(); @@ -65,14 +66,11 @@ public ProfilesLoader(ParameterTool params) throws IOException, ParseException { aggregationProfileParser.readJson(aggregationProfileJson); JsonArray endpointGroupProfileJson = RequestManager.endpointGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportName, params.get("date")); - - topologyEndpointParser = new EndpointGroupManager(); topologyEndpointParser.loadGroupEndpointProfile(endpointGroupProfileJson); JsonArray groupGroupProfileJson = RequestManager.groupGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportName, params.get("date")); - topolGroupParser = new GroupGroupManager(); topolGroupParser.loadGroupGroupProfile(groupGroupProfileJson); } diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java index 2df79a15..53b26a84 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java @@ -83,7 +83,6 @@ public static JsonElement operationsProfileRequest(String apiUri, String operati return loadProfile(uri, key, proxy).get(0); } - public static JsonElement metricProfileRequest(String apiUri, String metricId, String key, String proxy, String dateStr) throws IOException, ParseException { String uri = apiUri + "/metric_profiles" + "/" + metricId; @@ -101,8 +100,6 @@ public static JsonElement aggregationProfileRequest(String apiUri, String aggreg uri = uri + "?date=" + dateStr; } return loadProfile(uri, key, proxy).get(0); - - } public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname, String dateStr) throws IOException, ParseException { @@ -114,30 +111,37 @@ public static JsonArray endpointGroupProfileRequest(String apiUri, String key, S return loadProfile(uri, key, proxy); } - public static JsonArray groupGroupProfileRequest(String apiUri, String key, String proxy, String reportname,String dateStr) throws IOException, ParseException { - - String uri = apiUri + "/topology/groups/by_report" + "/" + reportname; + + public static JsonArray groupGroupProfileRequest(String apiUri, String key, String proxy, String reportname, String dateStr) throws IOException, ParseException { + + String uri = apiUri + "/topology/groups/by_report" + "/" + reportname; if (dateStr != null) { uri = uri + "?date=" + dateStr; } return loadProfile(uri, key, proxy); - } public static JsonElement reportProfileRequest(String apiUri, String key, String proxy, String reportId) throws IOException, ParseException { + // String uri = apiUri + "/reports/"; +// if (dateStr != null) { +// uri = uri + "?date=" + dateStr; +// } String uri = apiUri + "/reports/" + reportId; return loadProfile(uri, key, proxy).get(0); - } + public static JsonArray loadProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); JsonObject jsonObj = jsonElement.getAsJsonObject(); JsonArray dataObj = jsonObj.getAsJsonArray("data"); - return dataObj; + + //JsonElement dataElement = dataObj.get(0); + + return dataObj; } } \ No newline at end of file From 8fd15c2d53bac33b88799e0a5f32a127b9315f54 Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Thu, 17 Jun 2021 09:03:31 +0300 Subject: [PATCH 005/112] ARGO-3209 Create Downtimes Profile Parser independent Component --- flink_jobs/ProfilesManager/.gitignore | 1 - .../java/profilesmanager/DowntimeManager.java | 15 ++-- .../main/resources/profiles/downtimes_03.avro | Bin 0 -> 2911 bytes .../main/resources/profiles/downtimes_v2.avro | Bin 0 -> 3612 bytes .../profilesmanager/DowntimeManagerTest.java | 68 ++++++++++++++++++ .../calctimelines/ServiceFilter.java | 1 - .../java/argo/profiles/ProfilesLoader.java | 3 - .../main/java/argo/utils/RequestManager.java | 14 +++- 8 files changed, 88 insertions(+), 14 deletions(-) create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_03.avro create mode 100644 flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_v2.avro create mode 100644 flink_jobs/ProfilesManager/src/test/java/profilesmanager/DowntimeManagerTest.java diff --git a/flink_jobs/ProfilesManager/.gitignore b/flink_jobs/ProfilesManager/.gitignore index 6c4e323f..0b24544a 100644 --- a/flink_jobs/ProfilesManager/.gitignore +++ b/flink_jobs/ProfilesManager/.gitignore @@ -5,4 +5,3 @@ .classpath /nbproject nbactions.xml - diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java index 72fbc7ef..29620ba0 100644 --- a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/DowntimeManager.java @@ -20,7 +20,6 @@ import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumReader; -import org.apache.avro.util.Utf8; import org.apache.commons.io.IOUtils; /** @@ -135,13 +134,13 @@ public void loadAvro(File avroFile) throws IOException { // Generate 2nd level generic record reader (tags) - HashMap tags = (HashMap) (avroRow.get("tags")); - - if (tags != null) { - for (Utf8 item : tags.keySet()) { - tagMap.put(item.toString(), String.valueOf(tags.get(item))); - } - } +// HashMap tags = (HashMap) (avroRow.get("tags")); +// +// if (tags != null) { +// for (Utf8 item : tags.keySet()) { +// tagMap.put(item.toString(), String.valueOf(tags.get(item))); +// } +// } // Grab 1st level mandatory fields String hostname = avroRow.get("hostname").toString(); diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_03.avro b/flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_03.avro new file mode 100644 index 0000000000000000000000000000000000000000..ff0cd90a693676fec2ce6cf4fd3946f4574491f6 GIT binary patch literal 2911 zcmb`JTWb?R6vq`yDH23V@u7&sASH(EVK2RC9)z?;DI}&OUZ5f}nVsxTyF0Va>~0gZ z-_l24`~ZFc-<(Zin{Gps-4yoe%-P?$?AiaEcJJiw$|DpD4idY>N5AfUFNMQ^lxoG2 zAealgR4tYm@`(%(9+{DFI3%z9KS_LTe#Szoyb@!d63>>({P_|lLqVA{{ff(Fn1D;? z)#6Qll8Bh9(DU3vB*L#r4)Z(-vnPIE&NoK2V{Q|?#v;$V^T&Grd-Lxm{D@?`(0*Ee z^RxUI3xa%1fSyQ@^x;9LQEzDnjk2j5dzx;k=e@4iM$hrI(X?tiFO~hc;*!8)15@9H zAOPV3!VsMn>YamIz-%~88WOF3)M!oz&y|tWYzzoq9J7Uq0gj+Hc%(;c5`&0VvUFw^ zU@coIddz!#fPI-xr0}t=sgvcZS??yz-G6;}xEv?P)NDy&rfXX5c3R6NQn-z8oNwmj zw@6J&YFsK@I50ZVFj5)>-NbA08h5eBBU|g)l!8($B%z0ZvOWVE)<^`zp)hhuj9mCA zq?|$GJ~xK3H~FcnV?Y}J>Iy#gH6JnL5TCFRIL6Bdc$V~r2ZYHG;ZtBdMvEuCKW+@d zm`6&RnRBf@a3%js$Hik5k-$AcrE0On60n+YjBj^LueImji5A zFH-FRMnVXP=y%)i>K)ie%s!)b=q`z^T*Wp~NEjYwfFdt&Q7YF5t>Ka2gAAA_XX>Vm zC`c45p_HeqpIR9jR>gR-4S7_zl%$%01${AlG?U#YS%0Q$G>i*Pog!`DOfsxlq@9Uz b);)P&-IK979eB<@G!lUHkCK&=ULOAe+|nG1 literal 0 HcmV?d00001 diff --git a/flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_v2.avro b/flink_jobs/ProfilesManager/src/main/resources/profiles/downtimes_v2.avro new file mode 100644 index 0000000000000000000000000000000000000000..b31d809c4e03cc89d88cc97cebad422e48e61e31 GIT binary patch literal 3612 zcmcJSO>Wab6vshSRTouNRRtkbp*zznHw1AZj@h5KsuspS&iiyH?78yfS?@2+9YrNq*s zM}{_YIaoTcma_a9reoqidYYRHsr!*@nC6Ks?b7eV`FapLvNepBEYC3Rbgh%kSHI4` zSN}fw_Vvfl`|p3(AB}C30ZY>dz^3}rn}beoOI2#yvf_W99@XkQ+v<*@ycLX%OQXuh zR8i|dH=v(l+tn%207SqWEuJJeiW29jKKwa1UU1YoaT+O{h4!uxLEV3Bww#)hf>~-F zw%P)SC#L{tb+6lMcI3U*JYdUuyLE=q?5Kx~npZVn>~%ZQqsp_CkZL?g9_NNf0D>++ zeP;mrbm4ekI2*b}jcRDDB8s6YBdn-VMk1-flM+b{DUVP6hK)#ww_5h;E!TGBzD9&7 zR*!t+?u-2#v6O+teVy|aHZpg{H-sV+`Uj^A& zk^If#33}GBNkFsv@>sZVvneHqiL(PN>;ebVLGR#ovkP{irJWMZ9Y&Kf9a)Pex*{Um z(8U(Y(Mj`(R6=8OAmpp!?u1Wra|6im6nYz}Mh;Y8N zTjV~GvD~b3@J>c32BRUQBYO-yxZLdS#a=zKo1q*mOc9c2RfSC@IcGqm%)Y2XMj1NT zh*5Ig$XTGAV_VVtvPWcVL;yAzV30y!88f{Tg(^4#;W(mzjwGZ=h0^5|m timePeriod = new ArrayList(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T23:59:00Z"); + assertEquals("Test timeperiod #1", dt.getPeriod("cream-ce01.gridpp.rl.ac.uk", "CREAM-CE"), timePeriod); + // test for px.ire.kharkov.ua, MyProxy + timePeriod.clear(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T23:59:00Z"); + assertEquals("Test timeperiod #2", dt.getPeriod("px.ire.kharkov.ua", "MyProxy"), timePeriod); + // test for gb-ui-nki.els.sara.nl, UI + timePeriod.clear(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T23:59:00Z"); + assertEquals("Test timeperiod #3", dt.getPeriod("gb-ui-nki.els.sara.nl", "UI"), timePeriod); + // test for cream-ce01.gridpp.rl.ac.uk, gLExec + timePeriod.clear(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T23:59:00Z"); + assertEquals("Test timeperiod #4", dt.getPeriod("cream-ce01.gridpp.rl.ac.uk", "gLExec"), timePeriod); + // test for gcvmfs.cat.cbpf.br, org.squid-cache.Squid + timePeriod.clear(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T20:00:00Z"); + assertEquals("Test timeperiod #5", dt.getPeriod("cvmfs.cat.cbpf.br", "org.squid-cache.Squid"), timePeriod); + // test for apel.ire.kharkov.ua, APEL + timePeriod.clear(); + timePeriod.add("2015-05-07T00:00:00Z"); + timePeriod.add("2015-05-07T23:59:00Z"); + assertEquals("Test timeperiod #6", dt.getPeriod("apel.ire.kharkov.ua", "APEL"), timePeriod); + + } + +} \ No newline at end of file diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java index e86934bb..2c5a7e80 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java @@ -18,7 +18,6 @@ public class ServiceFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); private AggregationProfileManager aggregationProfileParser; - public ServiceFilter(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java index faa3f63f..bb9f5173 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java @@ -18,8 +18,6 @@ */ public class ProfilesLoader { - - private ReportManager reportParser; @@ -28,7 +26,6 @@ public class ProfilesLoader { private OperationsParser operationParser; private AggregationProfileManager aggregationProfileParser; - private GroupGroupManager topolGroupParser; private String aggregationId; diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java index 53b26a84..85b08f6f 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java @@ -79,10 +79,18 @@ public static JsonElement operationsProfileRequest(String apiUri, String operati } else { uri = uri + "?date=" + dateStr; } - return loadProfile(uri, key, proxy).get(0); } +// public static JsonElement loadOperationProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { +// JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); +// JsonObject jsonObj=jsonElement.getAsJsonObject(); +// JsonArray dataObj = jsonObj.getAsJsonArray("data"); +// JsonElement dataElement=dataObj.get(0); +// +// +// return dataElement; +// } public static JsonElement metricProfileRequest(String apiUri, String metricId, String key, String proxy, String dateStr) throws IOException, ParseException { String uri = apiUri + "/metric_profiles" + "/" + metricId; @@ -100,8 +108,12 @@ public static JsonElement aggregationProfileRequest(String apiUri, String aggreg uri = uri + "?date=" + dateStr; } return loadProfile(uri, key, proxy).get(0); + } + + + public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname, String dateStr) throws IOException, ParseException { String uri = apiUri + "/topology/endpoints/by_report" + "/" + reportname; From 7dd35450f04b756de7e7a46d5148a9fd0e90c5cb Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Thu, 17 Jun 2021 09:03:31 +0300 Subject: [PATCH 006/112] fixes in profiles and zero filter for status trends --- flink_jobs/ProfilesManager/.gitignore | 1 + .../profilesmanager/EndpointGroupManager.java | 211 +++++++++++++++--- .../argo/batch/BatchFlipFlopCollection.java | 2 + .../java/argo/batch/BatchStatusTrends.java | 12 +- .../zero/flipflops/ZeroStatusTrends.java | 30 +++ .../calctimelines/ServiceFilter.java | 6 +- .../calctimelines/TopologyMetricFilter.java | 6 +- .../calctrends/CalcMetricFlipFlopTrends.java | 5 +- .../calctrends/CalcServiceFlipFlop.java | 4 +- .../calctrends/CalcStatusTrends.java | 4 +- .../argo/profiles/EndpointGroupManager.java | 208 ++++++++++++++--- .../java/argo/profiles/ProfilesLoader.java | 8 +- .../main/java/argo/utils/RequestManager.java | 12 - .../java/timelines/TimelineUtils.java | 2 - 14 files changed, 417 insertions(+), 94 deletions(-) create mode 100644 flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroStatusTrends.java rename flink_jobs/status_trends/src/{test => main}/java/timelines/TimelineUtils.java (99%) diff --git a/flink_jobs/ProfilesManager/.gitignore b/flink_jobs/ProfilesManager/.gitignore index 0b24544a..6c4e323f 100644 --- a/flink_jobs/ProfilesManager/.gitignore +++ b/flink_jobs/ProfilesManager/.gitignore @@ -5,3 +5,4 @@ .classpath /nbproject nbactions.xml + diff --git a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java index 606739bd..728e0f68 100644 --- a/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java +++ b/flink_jobs/ProfilesManager/src/main/java/profilesmanager/EndpointGroupManager.java @@ -24,9 +24,12 @@ import com.google.gson.JsonObject; import java.io.Serializable; import java.util.Arrays; + +import java.util.HashSet; +import java.util.Iterator; import java.util.Map; import java.util.Objects; - +import java.util.Set; /** * * @@ -48,10 +51,14 @@ public class EndpointGroupManager implements Serializable { private ArrayList list; // the list of EndpointItems that are included in the profile data private ArrayList fList; // the list of the filter EndpointItems that correspond to the criteria - private HashMap> topologyEndpoint = new HashMap<>(); + + private Map> topologyGrouplist; + private Map> groupIndex; + private String defaultType = null; //* A EndpointItem class implements an object containing info of an group endpoint included in the topology - protected class EndpointItem implements Serializable { // the object + public class EndpointItem implements Serializable { // the object + String type; // type of group String group; // name of the group @@ -118,12 +125,60 @@ public boolean equals(Object obj) { return true; } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getGroup() { + return group; + } + + public void setGroup(String group) { + this.group = group; + } + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getHostname() { + return hostname; + } + + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public HashMap getTags() { + return tags; + } + + public void setTags(HashMap tags) { + this.tags = tags; + } + + @Override + public String toString() { + return type + "," + group + "," + service + "," + hostname; + } + } public EndpointGroupManager() { this.list = new ArrayList<>(); this.fList = new ArrayList<>(); + this.groupIndex = new HashMap>(); + this.topologyGrouplist = new HashMap<>(); } /** @@ -261,7 +316,8 @@ public int count() { return this.fList.size(); } /** - * Clear the filtered fList and initialize with all the EndpointItems the list includes + * Clear the filtered fList and initialize with all the EndpointItems the + * list includes */ public void unfilter() { this.fList.clear(); @@ -269,12 +325,15 @@ public void unfilter() { this.fList.add(item); } } -/** - * Applies filter on the tags of the EndpointItems included in the list, based on a map of criteria - * each criteria containing a pair of the tags field name and the value it searches . e.g ("monitored","1") searches tags - * with the specific pair of criteria - * @param fTags a map of criteria - */ + /** + * Applies filter on the tags of the EndpointItems included in the list, + * based on a map of criteria each criteria containing a pair of the tags + * field name and the value it searches . e.g ("monitored","1") searches + * tags with the specific pair of criteria + * + * @param fTags a map of criteria + */ + public void filter(TreeMap fTags) { this.fList.clear(); boolean trim; @@ -368,6 +427,10 @@ public void loadAvro(File avroFile) throws IOException { // Insert data to list this.insert(type, group, service, hostname, tagMap); + + this.insertTopologyGroup(type, group, service, hostname, tagMap); + defaultType = type; + // populateTopologyEndpoint(hostname, service, type, group); } // end of avro rows @@ -419,36 +482,29 @@ public void loadFromList(List egp) { // Insert data to list this.insert(type, group, service, hostname, tagMap); -// populateTopologyEndpoint(hostname, service, type, group); - } + + this.insertTopologyGroup(type, group, service, hostname, tagMap); + defaultType = type; + } this.unfilter(); } -// private void populateTopologyEndpoint(String hostname, String service, String type, String group) { -// -// String topologyEndpointKey = hostname + "-" + service; -// -// HashMap endpMap = new HashMap(); -// if (topologyEndpoint.get(type) != null) { -// endpMap = topologyEndpoint.get(type); -// } -// -// endpMap.put(topologyEndpointKey, group); -// topologyEndpoint.put(type, endpMap); -// } public void loadGroupEndpointProfile(JsonArray element) throws IOException { GroupEndpoint[] groupEndpoints = readJson(element); loadFromList(Arrays.asList(groupEndpoints)); } -/** - * reads from a JsonElement array and stores the necessary information to the - * GroupEndpoint objects and add them to the list + + + /** + * reads from a JsonElement array and stores the necessary information to + * the GroupEndpoint objects and add them to the list * - * @param jElement , a JsonElement containing the topology group endpoint profiles data - * @return + * @param jElement , a JsonElement containing the topology group endpoint + * profiles data + * @return */ public GroupEndpoint[] readJson(JsonArray jElement) { List results = new ArrayList<>(); @@ -473,4 +529,101 @@ public GroupEndpoint[] readJson(JsonArray jElement) { return rArr; } + + /** + * *********************************** + */ + public Set getEndpointSet() { + Set curItems = new HashSet(); + for (String groupKey : this.groupIndex.keySet()) { + ArrayList eList = this.groupIndex.get(groupKey); + for (EndpointItem item : eList) { + curItems.add(item.toString()); + } + } + return curItems; + } + + public ArrayList getGroupList() { + ArrayList results = new ArrayList(); + results.addAll(this.groupIndex.keySet()); + return results; + } + + public ArrayList compareToBeRemoved(EndpointGroupManager egp) { + + ArrayList results = new ArrayList(); + + Set curItems = this.getEndpointSet(); + Set futurItems = egp.getEndpointSet(); + + // lost items is cur items minus future set + curItems.removeAll(futurItems); + + results.addAll(curItems); + + return results; + } + + public int insertTopologyGroup(String type, String group, String service, String hostname, HashMap tags) { + EndpointItem itemNew = new EndpointItem(type, group, service, hostname, tags); + String key = type + "|" + hostname + "|" + service; + if (!topologyGrouplist.containsKey(key)) { + Map subList = new HashMap(); + subList.put(group, itemNew); + topologyGrouplist.put(key, subList); + + } else { + Map subList = topologyGrouplist.get(key); + subList.put(group, itemNew); + } + // Add item to the secondary group index + if (!groupIndex.containsKey(group)) { + groupIndex.put(group, new ArrayList(Arrays.asList(itemNew))); + } else { + groupIndex.get(group).add(itemNew); + } + + return 0; // All good + } + + public boolean checkEndpointGroup(String hostname, String service) { + + String key = defaultType + "|" + hostname + "|" + service; + return topologyGrouplist.containsKey(key); + } + + public ArrayList getGroupFull(String type, String hostname, String service) { + + String key = type + "|" + hostname + "|" + service; + Map sublist = topologyGrouplist.get(key); + if (sublist != null) { + return new ArrayList(topologyGrouplist.get(key).keySet()); + } + + return new ArrayList(); + + } + + public Iterator getGroupIter(String group) { + ArrayList list = groupIndex.get(group); + if (list != null) { + return list.iterator(); + } + + return null; + } + + public ArrayList getGroup(String hostname, String service) { + + String key = defaultType + "|" + hostname + "|" + service; + Map sublist = topologyGrouplist.get(key); + if (sublist != null) { + return new ArrayList(topologyGrouplist.get(key).keySet()); + } + + return new ArrayList(); + + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java index 405d0c53..d6e2ef6d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchFlipFlopCollection.java @@ -120,6 +120,8 @@ private static void calcFlipFlops() { //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + + DataSet noZeroServiceEndpointMetricGroupData = serviceEndpointMetricGroupData.filter(new ZeroMetricTrendsFilter()); if (rankNum != null) { //sort and rank data noZeroServiceEndpointMetricGroupData = noZeroServiceEndpointMetricGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java index ccbb4c03..e6d0c328 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java @@ -1,10 +1,12 @@ package argo.batch; import argo.avro.MetricData; +import argo.filter.zero.flipflops.ZeroStatusTrends; import argo.functions.calctrends.CalcStatusTrends; import argo.functions.calctimelines.TopologyMetricFilter; import argo.functions.calctimelines.CalcLastTimeStatus; import argo.functions.calctimelines.StatusFilter; +import argo.pojos.MetricTrends; import argo.utils.Utils; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.operators.Order; @@ -115,18 +117,18 @@ private static DataSet> // filter the data based on status (CRITICAL,WARNING,UNKNOWN), rank and write top N in seperate files for each status private static void filterByStatusAndWrite(String uri, DataSet> data, String status) { - String collectionUri = mongoUri + "." + uri; DataSet> filteredData = data.filter(new StatusFilter(status)); - + DataSet> noZeroStatusTrends = filteredData.filter(new ZeroStatusTrends()); + if (rankNum != null) { - filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1).first(rankNum); + noZeroStatusTrends = noZeroStatusTrends.sortPartition(5, Order.DESCENDING).setParallelism(1).first(rankNum); } else { - filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1); + noZeroStatusTrends = noZeroStatusTrends.sortPartition(5, Order.DESCENDING).setParallelism(1); } MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, uri, MongoTrendsOutput.TrendsType.TRENDS_STATUS, reportId, profilesDateStr, clearMongo); - DataSet trends = filteredData.map(new MapFunction, Trends>() { + DataSet trends = noZeroStatusTrends.map(new MapFunction, Trends>() { @Override public Trends map(Tuple6 in) throws Exception { diff --git a/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroStatusTrends.java new file mode 100644 index 00000000..4a011304 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/filter/zero/flipflops/ZeroStatusTrends.java @@ -0,0 +1,30 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package argo.filter.zero.flipflops; + +import org.apache.flink.api.common.functions.FilterFunction; +import org.apache.flink.api.java.tuple.Tuple6; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * ZeroStatusTrends, filters Status Trends data and rejects the ones with flipflop=0 + */ + public class ZeroStatusTrends implements FilterFunction> { + + static Logger LOG = LoggerFactory.getLogger(ZeroStatusTrends.class); + + + //if the status field value in Tuple equals the given status returns true, else returns false + @Override + public boolean filter(Tuple6 t) throws Exception { + if (t.f5>0) { + return true; + } + return false; + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java index 2c5a7e80..aabb5812 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/ServiceFilter.java @@ -17,7 +17,9 @@ public class ServiceFilter implements FilterFunction { static Logger LOG = LoggerFactory.getLogger(ServiceFilter.class); + private AggregationProfileManager aggregationProfileParser; + public ServiceFilter(AggregationProfileManager aggregationProfileParser) { this.aggregationProfileParser = aggregationProfileParser; @@ -26,7 +28,9 @@ public ServiceFilter(AggregationProfileManager aggregationProfileParser) { //if the status field value in Tuple equals the given status returns true, else returns false @Override public boolean filter(EndpointTrends t) throws Exception { - return aggregationProfileParser.checkServiceExistance(t.getService()); + + return aggregationProfileParser.checkServiceExistance(t.getService()); + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java index ba545ab6..1285902c 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/TopologyMetricFilter.java @@ -38,10 +38,8 @@ public TopologyMetricFilter(MetricProfileManager metricProfileParser, EndpointGr public boolean filter(MetricData t) throws Exception { String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); - ArrayList groups = topologyEndpointParser.getGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), t.getHostname().toString(), t.getService().toString()); - //boolean hasGroup = false; - - for (String group : groups) { + ArrayList groups = topologyEndpointParser.getGroupFull(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), t.getHostname().toString(), t.getService().toString()); + for (String group : groups) { if (topologyGroupParser.checkSubGroup(group) && metricProfileParser.containsMetric(t.getService().toString(), t.getMetric().toString())) { return true; } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java index 740226f6..029798b0 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java @@ -6,13 +6,11 @@ * and open the template in the editor. */ import argo.avro.MetricData; -//import argo.pojos.Timeline; import argo.pojos.MetricTrends; import argo.profiles.AggregationProfileManager; import argo.profiles.EndpointGroupManager; import argo.profiles.GroupGroupManager; import argo.profiles.OperationsParser; -import argo.profiles.TopologyGroupParser; import argo.utils.Utils; import java.util.ArrayList; import java.util.TreeMap; @@ -68,7 +66,8 @@ public void reduce(Iterable in, Collector out) throws String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); // group = topologyEndpointParser.retrieveGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString() + "-" + md.getService().toString()); - groups = topologyEndpointParser.getGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString(), md.getService().toString()); + + groups = topologyEndpointParser.getGroupFull(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString(), md.getService().toString()); int st = operationsParser.getIntStatus(md.getStatus().toString()); timeStatusMap.put(Utils.convertStringtoDate(format, md.getTimestamp().toString()), st); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java index da2e8860..aaa7f718 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcServiceFlipFlop.java @@ -31,6 +31,7 @@ public class CalcServiceFlipFlop implements GroupReduceFunction< EndpointTrends, private OperationsParser operationsParser; public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfileManager aggregationProfileParser) { + this.operationsParser = operationsParser; this.serviceFunctionsMap = aggregationProfileParser.retrieveServiceOperations(); @@ -40,7 +41,7 @@ public CalcServiceFlipFlop(OperationsParser operationsParser, AggregationProfile public void reduce(Iterable in, Collector< ServiceTrends> out) throws Exception { String group = null; String service = null; - //construct a timeline containing all the timestamps of each metric timeline + //construct a timeline containing all the timestamps of each metric timeline HashMap timelineList = new HashMap<>(); @@ -57,7 +58,6 @@ public void reduce(Iterable in, Collector< ServiceTrends> out) t int flipflops = timeline.calcStatusChanges(); if (group != null && service != null) { ServiceTrends serviceTrends = new ServiceTrends(group, service, timeline, flipflops); - System.out.println("group: " + group + " service: " + service + " flipflops: " + flipflops); out.collect(serviceTrends); } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java index a040ff44..d5e02b48 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java @@ -56,8 +56,8 @@ public void reduce(Iterable in, Collector list; // the list of EndpointItems that are included in the profile data private ArrayList fList; // the list of the filter EndpointItems that correspond to the criteria - private HashMap> topologyEndpoint = new HashMap<>(); + + private Map> topologyGrouplist; + private Map> groupIndex; + private String defaultType = null; //* A EndpointItem class implements an object containing info of an group endpoint included in the topology - protected class EndpointItem implements Serializable { // the object + public class EndpointItem implements Serializable { // the object + String type; // type of group String group; // name of the group @@ -119,12 +125,60 @@ public boolean equals(Object obj) { return true; } + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getGroup() { + return group; + } + + public void setGroup(String group) { + this.group = group; + } + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getHostname() { + return hostname; + } + + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public HashMap getTags() { + return tags; + } + + public void setTags(HashMap tags) { + this.tags = tags; + } + + @Override + public String toString() { + return type + "," + group + "," + service + "," + hostname; + } + } public EndpointGroupManager() { this.list = new ArrayList<>(); this.fList = new ArrayList<>(); + this.groupIndex = new HashMap>(); + this.topologyGrouplist = new HashMap<>(); + } /** @@ -262,7 +316,8 @@ public int count() { return this.fList.size(); } /** - * Clear the filtered fList and initialize with all the EndpointItems the list includes + * Clear the filtered fList and initialize with all the EndpointItems the + * list includes */ public void unfilter() { this.fList.clear(); @@ -270,12 +325,15 @@ public void unfilter() { this.fList.add(item); } } -/** - * Applies filter on the tags of the EndpointItems included in the list, based on a map of criteria - * each criteria containing a pair of the tags field name and the value it searches . e.g ("monitored","1") searches tags - * with the specific pair of criteria - * @param fTags a map of criteria - */ + + /** + * Applies filter on the tags of the EndpointItems included in the list, + * based on a map of criteria each criteria containing a pair of the tags + * field name and the value it searches . e.g ("monitored","1") searches + * tags with the specific pair of criteria + * + * @param fTags a map of criteria + */ public void filter(TreeMap fTags) { this.fList.clear(); boolean trim; @@ -369,7 +427,8 @@ public void loadAvro(File avroFile) throws IOException { // Insert data to list this.insert(type, group, service, hostname, tagMap); -// populateTopologyEndpoint(hostname, service, type, group); + this.insertTopologyGroup(type, group, service, hostname, tagMap); + defaultType = type; } // end of avro rows this.unfilter(); @@ -420,36 +479,27 @@ public void loadFromList(List egp) { // Insert data to list this.insert(type, group, service, hostname, tagMap); + this.insertTopologyGroup(type, group, service, hostname, tagMap); + defaultType = type; + // populateTopologyEndpoint(hostname, service, type, group); } this.unfilter(); } - -// private void populateTopologyEndpoint(String hostname, String service, String type, String group) { -// -// String topologyEndpointKey = hostname + "-" + service; -// -// HashMap endpMap = new HashMap(); -// if (topologyEndpoint.get(type) != null) { -// endpMap = topologyEndpoint.get(type); -// } -// -// endpMap.put(topologyEndpointKey, group); -// topologyEndpoint.put(type, endpMap); -// } - public void loadGroupEndpointProfile(JsonArray element) throws IOException { GroupEndpoint[] groupEndpoints = readJson(element); loadFromList(Arrays.asList(groupEndpoints)); } -/** - * reads from a JsonElement array and stores the necessary information to the - * GroupEndpoint objects and add them to the list + + /** + * reads from a JsonElement array and stores the necessary information to + * the GroupEndpoint objects and add them to the list * - * @param jElement , a JsonElement containing the topology group endpoint profiles data - * @return + * @param jElement , a JsonElement containing the topology group endpoint + * profiles data + * @return */ public GroupEndpoint[] readJson(JsonArray jElement) { List results = new ArrayList<>(); @@ -474,4 +524,100 @@ public GroupEndpoint[] readJson(JsonArray jElement) { return rArr; } + /** + * *********************************** + */ + public Set getEndpointSet() { + Set curItems = new HashSet(); + for (String groupKey : this.groupIndex.keySet()) { + ArrayList eList = this.groupIndex.get(groupKey); + for (EndpointItem item : eList) { + curItems.add(item.toString()); + } + } + return curItems; + } + + public ArrayList getGroupList() { + ArrayList results = new ArrayList(); + results.addAll(this.groupIndex.keySet()); + return results; + } + + public ArrayList compareToBeRemoved(EndpointGroupManager egp) { + + ArrayList results = new ArrayList(); + + Set curItems = this.getEndpointSet(); + Set futurItems = egp.getEndpointSet(); + + // lost items is cur items minus future set + curItems.removeAll(futurItems); + + results.addAll(curItems); + + return results; + } + + public int insertTopologyGroup(String type, String group, String service, String hostname, HashMap tags) { + EndpointItem itemNew = new EndpointItem(type, group, service, hostname, tags); + String key = type + "|" + hostname + "|" + service; + if (!topologyGrouplist.containsKey(key)) { + Map subList = new HashMap(); + subList.put(group, itemNew); + topologyGrouplist.put(key, subList); + + } else { + Map subList = topologyGrouplist.get(key); + subList.put(group, itemNew); + } + // Add item to the secondary group index + if (!groupIndex.containsKey(group)) { + groupIndex.put(group, new ArrayList(Arrays.asList(itemNew))); + } else { + groupIndex.get(group).add(itemNew); + } + + return 0; // All good + } + + public boolean checkEndpointGroup(String hostname, String service) { + + String key = defaultType + "|" + hostname + "|" + service; + return topologyGrouplist.containsKey(key); + } + + public ArrayList getGroupFull(String type, String hostname, String service) { + + String key = type + "|" + hostname + "|" + service; + Map sublist = topologyGrouplist.get(key); + if (sublist != null) { + return new ArrayList(topologyGrouplist.get(key).keySet()); + } + + return new ArrayList(); + + } + + public Iterator getGroupIter(String group) { + ArrayList list = groupIndex.get(group); + if (list != null) { + return list.iterator(); + } + + return null; + } + + public ArrayList getGroup(String hostname, String service) { + + String key = defaultType + "|" + hostname + "|" + service; + Map sublist = topologyGrouplist.get(key); + if (sublist != null) { + return new ArrayList(topologyGrouplist.get(key).keySet()); + } + + return new ArrayList(); + + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java index bb9f5173..17e2ec4b 100644 --- a/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java +++ b/flink_jobs/status_trends/src/main/java/argo/profiles/ProfilesLoader.java @@ -18,14 +18,15 @@ */ public class ProfilesLoader { - private ReportManager reportParser; + private ReportManager reportParser; private EndpointGroupManager topologyEndpointParser; private MetricProfileManager metricProfileParser; private OperationsParser operationParser; private AggregationProfileManager aggregationProfileParser; + private GroupGroupManager topolGroupParser; private String aggregationId; @@ -36,8 +37,7 @@ public ProfilesLoader() { } public ProfilesLoader(ParameterTool params) throws IOException, ParseException { - - JsonElement reportProfileJson = RequestManager.reportProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("reportId")); + JsonElement reportProfileJson = RequestManager.reportProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), params.getRequired("reportId")); reportParser = new ReportManager(); reportParser.readJson(reportProfileJson); @@ -63,6 +63,7 @@ public ProfilesLoader(ParameterTool params) throws IOException, ParseException { aggregationProfileParser.readJson(aggregationProfileJson); JsonArray endpointGroupProfileJson = RequestManager.endpointGroupProfileRequest(params.getRequired("apiUri"), params.getRequired("key"), params.get("proxy"), reportName, params.get("date")); + topologyEndpointParser = new EndpointGroupManager(); topologyEndpointParser.loadGroupEndpointProfile(endpointGroupProfileJson); @@ -72,6 +73,7 @@ public ProfilesLoader(ParameterTool params) throws IOException, ParseException { topolGroupParser.loadGroupGroupProfile(groupGroupProfileJson); } + public ReportManager getReportParser() { return reportParser; } diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java index 85b08f6f..774221f6 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/RequestManager.java @@ -110,10 +110,6 @@ public static JsonElement aggregationProfileRequest(String apiUri, String aggreg return loadProfile(uri, key, proxy).get(0); } - - - - public static JsonArray endpointGroupProfileRequest(String apiUri, String key, String proxy, String reportname, String dateStr) throws IOException, ParseException { String uri = apiUri + "/topology/endpoints/by_report" + "/" + reportname; @@ -136,23 +132,15 @@ public static JsonArray groupGroupProfileRequest(String apiUri, String key, Stri public static JsonElement reportProfileRequest(String apiUri, String key, String proxy, String reportId) throws IOException, ParseException { - // String uri = apiUri + "/reports/"; -// if (dateStr != null) { -// uri = uri + "?date=" + dateStr; -// } String uri = apiUri + "/reports/" + reportId; return loadProfile(uri, key, proxy).get(0); } - public static JsonArray loadProfile(String uri, String key, String proxy) throws IOException, org.json.simple.parser.ParseException { JsonElement jsonElement = RequestManager.callRequest(uri, key, proxy); JsonObject jsonObj = jsonElement.getAsJsonObject(); JsonArray dataObj = jsonObj.getAsJsonArray("data"); - - //JsonElement dataElement = dataObj.get(0); - return dataObj; } diff --git a/flink_jobs/status_trends/src/test/java/timelines/TimelineUtils.java b/flink_jobs/status_trends/src/main/java/timelines/TimelineUtils.java similarity index 99% rename from flink_jobs/status_trends/src/test/java/timelines/TimelineUtils.java rename to flink_jobs/status_trends/src/main/java/timelines/TimelineUtils.java index a281c684..f0a6ee24 100644 --- a/flink_jobs/status_trends/src/test/java/timelines/TimelineUtils.java +++ b/flink_jobs/status_trends/src/main/java/timelines/TimelineUtils.java @@ -22,8 +22,6 @@ import org.json.simple.parser.ParseException; /** - * - * @author cthermolia */ public class TimelineUtils { From 3ec1d6175da0a3e8087b5bef3a4d0e5fa0774025 Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Tue, 3 Aug 2021 13:15:47 +0300 Subject: [PATCH 007/112] ARGO-3211 Compute Status Trends for upper levels --- .../src/main/java/timelines/Timeline.java | 24 ++ .../src/test/java/timelines/TimelineTest.java | 16 + .../java/argo/batch/BatchStatusTrends.java | 2 +- .../argo/batch/BatchTrendsCalculations.java | 309 ++++++++++++++++++ .../java/argo/batch/MongoTrendsOutput.java | 22 +- .../functions/calctimelines/StatusFilter.java | 7 +- .../CalcEndpointFlipFlopTrends.java | 2 + .../calctrends/CalcMetricFlipFlopTrends.java | 22 +- .../calctrends/CalcStatusTrends.java | 4 +- .../main/java/argo/pojos/EndpointTrends.java | 5 +- .../main/java/argo/pojos/MetricTrends.java | 43 ++- .../status/trends/EndpointTrendsCounter.java | 65 ++++ .../status/trends/GroupTrendsCounter.java | 61 ++++ .../status/trends/MetricTrendsCounter.java | 53 +++ .../status/trends/ServiceTrendsCounter.java | 65 ++++ .../src/main/java/timelines/Timeline.java | 25 ++ 16 files changed, 708 insertions(+), 17 deletions(-) create mode 100644 flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/status/trends/EndpointTrendsCounter.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java create mode 100644 flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java diff --git a/flink_jobs/Timelines/src/main/java/timelines/Timeline.java b/flink_jobs/Timelines/src/main/java/timelines/Timeline.java index 576a5b48..527809b3 100644 --- a/flink_jobs/Timelines/src/main/java/timelines/Timeline.java +++ b/flink_jobs/Timelines/src/main/java/timelines/Timeline.java @@ -482,5 +482,29 @@ public int opInt(int[][][] truthTable, int op, int a, int b) { return result; } + + /** + * Calculates the times a specific status appears on the timeline + * @param status , the status to calculate the appearances + * @return , the num of the times the specific status appears on the timeline + */ + + public int countStatusAppearances(int status) { + + System.out.println("status is : "+status); + int count=0; + for (Map.Entry entry : this.samples.entrySet()) { + System.out.println("value status is : "+entry.getValue()); + if (status == entry.getValue()) { + + + count++; + } + + } + return count; + + } + } diff --git a/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java b/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java index 747b9aa9..8d3620f5 100644 --- a/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java +++ b/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java @@ -496,4 +496,20 @@ private TreeMap createMerged() throws ParseException { return map; } + /** + * Test of countStatusAppearances method, of class Timeline. + */ + @Test + public void testCountStatusAppearances() throws ParseException { + System.out.println("countStatusAppearances"); + int status = 0; + Timeline instance = new Timeline(); + instance.insertDateTimeStamps(createTimestampList()); + int expResult =1; + int result = instance.countStatusAppearances(status); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + //fail("The test case is a prototype."); + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java index e6d0c328..b90fcdc3 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchStatusTrends.java @@ -126,7 +126,7 @@ private static void filterByStatusAndWrite(String uri, DataSet trends = noZeroStatusTrends.map(new MapFunction, Trends>() { diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java new file mode 100644 index 00000000..3b727c00 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java @@ -0,0 +1,309 @@ +package argo.batch; + +import argo.avro.MetricData; +import argo.filter.zero.flipflops.ZeroEndpointTrendsFilter; +import argo.filter.zero.flipflops.ZeroGroupTrendsFilter; +import argo.filter.zero.flipflops.ZeroMetricTrendsFilter; +import argo.filter.zero.flipflops.ZeroServiceTrendsFilter; +import argo.functions.calctimelines.CalcLastTimeStatus; +import argo.functions.calctimelines.MapServices; +import argo.functions.calctimelines.ServiceFilter; +import argo.functions.calctimelines.StatusFilter; +import argo.functions.calctimelines.TopologyMetricFilter; +import argo.functions.calctrends.CalcEndpointFlipFlopTrends; +import argo.functions.calctrends.CalcGroupFlipFlop; +import argo.functions.calctrends.CalcGroupFunctionFlipFlop; +import argo.functions.calctrends.CalcMetricFlipFlopTrends; +import argo.functions.calctrends.CalcServiceFlipFlop; +import argo.pojos.EndpointTrends; +import argo.pojos.GroupFunctionTrends; +import argo.pojos.GroupTrends; +import argo.pojos.MetricTrends; +import argo.pojos.ServiceTrends; +import argo.profiles.ProfilesLoader; +import argo.status.trends.EndpointTrendsCounter; +import argo.status.trends.GroupTrendsCounter; +import argo.status.trends.MetricTrendsCounter; +import argo.status.trends.ServiceTrendsCounter; +import argo.utils.Utils; +import de.javakaffee.kryoserializers.jodatime.JodaDateTimeSerializer; +import org.apache.flink.api.common.functions.MapFunction; +import org.apache.flink.api.common.operators.Order; +import org.apache.flink.api.java.DataSet; +import org.apache.flink.api.java.ExecutionEnvironment; +import org.apache.flink.api.java.io.AvroInputFormat; +import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.utils.ParameterTool; +import org.apache.flink.core.fs.Path; +import org.joda.time.DateTime; + +/** + * Implements an ARGO Trends Job in flink , to count the number of status + * changes and also the num of appearances of the status CRITICAL,WARNING,UNKNOWN. + * that occur to all levels of the topology hierarchy + * + * + * Submit job in flink cluster using the following parameters * --date:the date + * for which the job runs and need to return results , e.g yyyy-MM-dd + * --yesterdayData: path to the metric profile data, of the previous day , for + * which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --todayData: path to the metric profile data, of the current day , for which + * the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --mongoUri: path to MongoDB destination (eg + * mongodb://localhost:27017/database --apiUri: path to the mongo db the , for + * which the jobs runs profile (For hdfs use: hdfs://namenode:port/path/to/file) + * --key: ARGO web api token --reportId: the id of the report the job will need + * to process --apiUri: ARGO wep api to connect to e.g msg.example.com Optional: + * -- clearMongo: option to clear the mongo db before saving the new result or + * not, e.g true -- N : the number of the result the job will provide, if the + * parameter exists , e.g 10 + * + */ +public class BatchTrendsCalculations { + + private static DataSet yesterdayData; + private static DataSet todayData; + private static Integer rankNum; + private static final String groupTrends = "flipflop_trends_endpoint_groups"; + private static final String metricTrends = "flipflop_trends_metrics"; + private static final String endpointTrends = "flipflop_trends_endpoints"; + private static final String serviceTrends = "flipflop_trends_services"; + private static String mongoUri; + private static ProfilesLoader profilesLoader; + private static DateTime profilesDate; + private static String format = "yyyy-MM-dd"; + private static String reportId; + private static boolean clearMongo = false; + private static String profilesDateStr; + private static final String statusTrendsMetricCol = "status_trends_metrics"; + private static final String statusTrendsEndpointCol = "status_trends_endpoints"; + private static final String statusTrendsServiceCol = "status_trends_services"; + private static final String statusTrendsGroupCol = "status_trends_groups"; + + public static void main(String[] args) throws Exception { + // set up the batch execution environment + final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); + env.addDefaultKryoSerializer(DateTime.class, JodaDateTimeSerializer.class); + ParameterTool params = ParameterTool.fromArgs(args); + //check if all required parameters exist and if not exit program + if (!Utils.checkParameters(params, "yesterdayData", "todayData", "mongoUri", "apiUri", "key", "date", "reportId")) { + System.exit(0); + } + + if (params.get("clearMongo") != null && params.getBoolean("clearMongo") == true) { + clearMongo = true; + } + reportId = params.getRequired("reportId"); + profilesDate = Utils.convertStringtoDate(format, params.getRequired("date")); + profilesDateStr = Utils.convertDateToString(format, profilesDate); + + if (params.get("N") != null) { + rankNum = params.getInt("N"); + } + mongoUri = params.get("mongoUri"); + profilesLoader = new ProfilesLoader(params); + yesterdayData = readInputData(env, params, "yesterdayData"); + todayData = readInputData(env, params, "todayDatas"); + + // calculate on data + calcFlipFlops(); + +// execute program + StringBuilder jobTitleSB = new StringBuilder(); + jobTitleSB.append("Calculation of Flip Flops & Status Trends for: "); + jobTitleSB.append(profilesLoader.getReportParser().getTenant()); + jobTitleSB.append("/"); + jobTitleSB.append(profilesLoader.getReportParser().getReport()); + jobTitleSB.append("/"); + jobTitleSB.append(profilesDate); + env.execute(jobTitleSB.toString()); + + } + +// filter yesterdaydata and exclude the ones not contained in topology and metric profile data and get the last timestamp data for each service endpoint metric +// filter todaydata and exclude the ones not contained in topology and metric profile data , union yesterday data and calculate status changes for each service endpoint metric +// rank results +// private static DataSet calcFlipFlops() { + private static void calcFlipFlops() { + + DataSet filteredYesterdayData = yesterdayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())).groupBy("hostname", "service", "metric").reduceGroup(new CalcLastTimeStatus()); + DataSet filteredTodayData = todayData.filter(new TopologyMetricFilter(profilesLoader.getMetricProfileParser(), profilesLoader.getTopologyEndpointParser(), profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser())); + + //group data by service enpoint metric and return for each group , the necessary info and a treemap containing timestamps and status + DataSet serviceEndpointMetricGroupData = filteredTodayData.union(filteredYesterdayData).groupBy("hostname", "service", "metric").reduceGroup(new CalcMetricFlipFlopTrends(profilesLoader.getOperationParser(), profilesLoader.getTopologyEndpointParser(),profilesLoader.getTopolGroupParser(), profilesLoader.getAggregationProfileParser(), profilesDate)); + + DataSet noZeroServiceEndpointMetricGroupData = serviceEndpointMetricGroupData.filter(new ZeroMetricTrendsFilter()); + if (rankNum != null) { //sort and rank data + noZeroServiceEndpointMetricGroupData = noZeroServiceEndpointMetricGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); + } else { + noZeroServiceEndpointMetricGroupData = noZeroServiceEndpointMetricGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); + } + + MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, metricTrends, MongoTrendsOutput.TrendsType.TRENDS_METRIC, reportId, profilesDateStr, clearMongo); + + DataSet trends = noZeroServiceEndpointMetricGroupData.map(new MapFunction() { + + @Override + public Trends map(MetricTrends in) throws Exception { + return new Trends(in.getGroup(), in.getService(), in.getEndpoint(), in.getMetric(), in.getFlipflops()); + } + }); + trends.output(metricMongoOut); + //flatMap dataset to tuples and count the apperances of each status type to the timeline + DataSet> metricStatusTrendsData = serviceEndpointMetricGroupData.flatMap(new MetricTrendsCounter(profilesLoader.getOperationParser())); + //filter dataset for each status type and write to mongo db + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_METRIC, statusTrendsMetricCol, metricStatusTrendsData, "critical"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_METRIC, statusTrendsMetricCol, metricStatusTrendsData, "warning"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_METRIC, statusTrendsMetricCol, metricStatusTrendsData, "unknown"); + + /** + * ***************************************************************************************************************** + */ + //group data by service endpoint and count flip flops + DataSet serviceEndpointGroupData = serviceEndpointMetricGroupData.groupBy("group", "endpoint", "service").reduceGroup(new CalcEndpointFlipFlopTrends(profilesLoader.getAggregationProfileParser().getMetricOpByProfile(), profilesLoader.getOperationParser())); + DataSet noZeroserviceEndpointGroupData = serviceEndpointGroupData.filter(new ZeroEndpointTrendsFilter()); + + if (rankNum != null) { //sort and rank data + noZeroserviceEndpointGroupData = noZeroserviceEndpointGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); + } else { + noZeroserviceEndpointGroupData = noZeroserviceEndpointGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); + } + metricMongoOut = new MongoTrendsOutput(mongoUri, endpointTrends, MongoTrendsOutput.TrendsType.TRENDS_ENDPOINT, reportId, profilesDateStr, clearMongo); + + trends = noZeroserviceEndpointGroupData.map(new MapFunction() { + + @Override + public Trends map(EndpointTrends in) throws Exception { + return new Trends(in.getGroup(), in.getService(), in.getEndpoint(), in.getFlipflops()); + } + }); + trends.output(metricMongoOut); + //flatMap dataset to tuples and count the apperances of each status type to the timeline + DataSet> endpointStatusTrendsData = serviceEndpointGroupData.flatMap(new EndpointTrendsCounter(profilesLoader.getOperationParser())); + //filter dataset for each status type and write to mongo db + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT, statusTrendsEndpointCol, endpointStatusTrendsData, "critical"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT, statusTrendsEndpointCol, endpointStatusTrendsData, "warning"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT, statusTrendsEndpointCol, endpointStatusTrendsData, "unknown"); + + /** + * ************************************************************************************************************************** + */ + //group data by service and count flip flops + DataSet serviceGroupData = serviceEndpointGroupData.filter(new ServiceFilter(profilesLoader.getAggregationProfileParser())).groupBy("group", "service").reduceGroup(new CalcServiceFlipFlop(profilesLoader.getOperationParser(), profilesLoader.getAggregationProfileParser())); + DataSet noZeroserviceGroupData = serviceGroupData.filter(new ZeroServiceTrendsFilter()); + + if (rankNum != null) { //sort and rank data + noZeroserviceGroupData = noZeroserviceGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); + } else { + noZeroserviceGroupData = noZeroserviceGroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); + } + metricMongoOut = new MongoTrendsOutput(mongoUri, serviceTrends, MongoTrendsOutput.TrendsType.TRENDS_SERVICE, reportId, profilesDateStr, clearMongo); + + trends = noZeroserviceGroupData.map(new MapFunction() { + + @Override + public Trends map(ServiceTrends in) throws Exception { + return new Trends(in.getGroup(), in.getService(), in.getFlipflops()); + } + }); + trends.output(metricMongoOut); + //flatMap dataset to tuples and count the apperances of each status type to the timeline + DataSet> serviceStatusTrendsData = serviceGroupData.flatMap(new ServiceTrendsCounter(profilesLoader.getOperationParser())); + //filter dataset for each status type and write to mongo db + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_SERVICE, statusTrendsServiceCol, serviceStatusTrendsData, "critical"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_SERVICE, statusTrendsServiceCol, serviceStatusTrendsData, "warning"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_SERVICE, statusTrendsServiceCol, serviceStatusTrendsData, "unknown"); + + /** + * **************************************************************************************************** + */ +//flat map data to add function as described in aggregation profile groups + serviceGroupData = serviceGroupData.flatMap(new MapServices(profilesLoader.getAggregationProfileParser())); + + //group data by group,function and count flip flops + DataSet groupFunction = serviceGroupData.groupBy("group", "function").reduceGroup(new CalcGroupFunctionFlipFlop(profilesLoader.getOperationParser(), profilesLoader.getAggregationProfileParser())); + // DataSet noZerogroupFunction =groupFunction.filter(new ZeroGroupFunctionFilter()); + + //group data by group and count flip flops + DataSet groupData = groupFunction.groupBy("group").reduceGroup(new CalcGroupFlipFlop(profilesLoader.getOperationParser(), profilesLoader.getAggregationProfileParser())); + DataSet noZerogroupData = groupData.filter(new ZeroGroupTrendsFilter()); + + if (rankNum != null) { //sort and rank data + noZerogroupData = noZerogroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1).first(rankNum); + } else { + noZerogroupData = noZerogroupData.sortPartition("flipflops", Order.DESCENDING).setParallelism(1); + } + + metricMongoOut = new MongoTrendsOutput(mongoUri, groupTrends, MongoTrendsOutput.TrendsType.TRENDS_GROUP, reportId, profilesDateStr, clearMongo); + + trends = noZerogroupData.map(new MapFunction() { + + @Override + public Trends map(GroupTrends in) throws Exception { + return new Trends(in.getGroup(), in.getFlipflops()); + } + }); + trends.output(metricMongoOut); + //flatMap dataset to tuples and count the apperances of each status type to the timeline + DataSet> groupStatusTrendsData = groupData.flatMap(new GroupTrendsCounter(profilesLoader.getOperationParser())); + //filter dataset for each status type and write to mongo db + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_GROUP, statusTrendsGroupCol, groupStatusTrendsData, "critical"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_GROUP, statusTrendsGroupCol, groupStatusTrendsData, "warning"); + filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_GROUP, statusTrendsGroupCol, groupStatusTrendsData, "unknown"); + + } + + //read input from file + private static DataSet readInputData(ExecutionEnvironment env, ParameterTool params, String path) { + DataSet inputData; + Path input = new Path(params.getRequired(path)); + + AvroInputFormat inputAvroFormat = new AvroInputFormat(input, MetricData.class + ); + inputData = env.createInput(inputAvroFormat); + return inputData; + } + + //filters the dataset to appear result by status type and status appearances>0 and write to mongo db + private static void filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType mongoTrendsType, String uri, DataSet> data, String status) { + + DataSet> filteredData = data.filter(new StatusFilter(status)); //filter dataset by status type and status appearances>0 + + if (rankNum != null) { + filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1).first(rankNum); + } else { + filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1); + } + writeStatusTrends(filteredData, uri, mongoTrendsType, data, status); //write to mongo db + } + + // write status trends to mongo db + private static void writeStatusTrends(DataSet> outputData, String uri, final MongoTrendsOutput.TrendsType mongoCase, DataSet> data, String status) { + + //MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT + MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, uri, mongoCase, reportId, profilesDateStr, clearMongo); + + DataSet trends = outputData.map(new MapFunction, Trends>() { + + @Override + public Trends map(Tuple6 in) throws Exception { + switch (mongoCase) { + case TRENDS_STATUS_METRIC: + return new Trends(in.f0.toString(), in.f1.toString(), in.f2.toString(), in.f3.toString(), in.f4.toString(), in.f5); + case TRENDS_STATUS_ENDPOINT: + return new Trends(in.f0.toString(), in.f1.toString(), in.f2.toString(), null, in.f4.toString(), in.f5); + case TRENDS_STATUS_SERVICE: + return new Trends(in.f0.toString(), in.f1.toString(), null, null, in.f4.toString(), in.f5); + case TRENDS_STATUS_GROUP: + return new Trends(in.f0.toString(), null, null, null, in.f4.toString(), in.f5); + default: + return null; + } + } + }); + trends.output(metricMongoOut); + + // writeToMongo(collectionUri, filteredData); + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java index 9ba6761c..8bf427b7 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java @@ -20,7 +20,7 @@ public class MongoTrendsOutput implements OutputFormat { // Select the type of status input public enum TrendsType { - TRENDS_STATUS, TRENDS_METRIC, TRENDS_ENDPOINT, TRENDS_SERVICE, TRENDS_GROUP + TRENDS_STATUS_METRIC, TRENDS_STATUS_ENDPOINT,TRENDS_STATUS_SERVICE, TRENDS_STATUS_GROUP,TRENDS_METRIC, TRENDS_ENDPOINT, TRENDS_SERVICE, TRENDS_GROUP } private static final long serialVersionUID = 1L; @@ -117,7 +117,7 @@ private Document prepDoc(Trends record) { doc.append("metric", record.getMetric()); doc.append("flipflop", record.getFlipflop()); break; - case TRENDS_STATUS: + case TRENDS_STATUS_METRIC: doc.append("group", record.getGroup()); doc.append("service", record.getService()); doc.append("endpoint", record.getEndpoint()); @@ -125,6 +125,24 @@ private Document prepDoc(Trends record) { doc.append("status", record.getStatus()); doc.append("trends", record.getTrends()); break; + case TRENDS_STATUS_ENDPOINT: + doc.append("group", record.getGroup()); + doc.append("service", record.getService()); + doc.append("endpoint", record.getEndpoint()); + doc.append("status", record.getStatus()); + doc.append("trends", record.getTrends()); + break; + case TRENDS_STATUS_SERVICE: + doc.append("group", record.getGroup()); + doc.append("service", record.getService()); + doc.append("status", record.getStatus()); + doc.append("trends", record.getTrends()); + break; + case TRENDS_STATUS_GROUP: + doc.append("group", record.getGroup()); + doc.append("status", record.getStatus()); + doc.append("trends", record.getTrends()); + break; default: break; } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java index 921d78c7..0dd20987 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java @@ -11,7 +11,8 @@ import org.slf4j.LoggerFactory; /** - * StatusFilter, filters data by status + + * StatusFilter, filters data by status and status appearances */ public class StatusFilter implements FilterFunction> { @@ -21,12 +22,12 @@ public class StatusFilter implements FilterFunction0 returns true, else returns false @Override public boolean filter(Tuple6 t) throws Exception { - if (t.f4.toString().equalsIgnoreCase(status)) { + if (t.f4.toString().equalsIgnoreCase(status) && t.f5>0) { return true; } return false; diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java index ddb200da..1b419ca6 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java @@ -58,6 +58,7 @@ public void reduce(Iterable in, Collector< EndpointTrends> out) th Timeline timeline = time.getTimeline(); timelinelist.put(time.getMetric(), timeline); + } // merge the timelines into one timeline , @@ -68,6 +69,7 @@ public void reduce(Iterable in, Collector< EndpointTrends> out) th Timeline mergedTimeline = timelineAggregator.getOutput(); //collect all timelines that correspond to the group service endpoint group , merge them in order to create one timeline Integer flipflops = mergedTimeline.calcStatusChanges();//calculate flip flops on the concluded merged timeline + if (group != null && service != null && hostname != null) { EndpointTrends endpointTrends = new EndpointTrends(group, service, hostname, mergedTimeline, flipflops); out.collect(endpointTrends); diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java index 029798b0..bdb73a5d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java @@ -29,7 +29,6 @@ public class CalcMetricFlipFlopTrends implements GroupReduceFunction in, Collector out) throws String hostname = null; String service = null; String metric = null; + String status = null; + int criticalSum = 0, warningSum = 0, unknownSum = 0; + ArrayList critical=new ArrayList<>(); + ArrayList warning=new ArrayList<>(); + ArrayList unknown=new ArrayList<>(); + for (MetricData md : in) { hostname = md.getHostname().toString(); service = md.getService().toString(); metric = md.getMetric().toString(); + status = md.getStatus().toString(); // group = groupEndpoints.get(md.getHostname().toString() + "-" + md.getService()); //retrieve the group for the service, as contained in file String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); @@ -70,7 +76,15 @@ public void reduce(Iterable in, Collector out) throws groups = topologyEndpointParser.getGroupFull(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString(), md.getService().toString()); int st = operationsParser.getIntStatus(md.getStatus().toString()); timeStatusMap.put(Utils.convertStringtoDate(format, md.getTimestamp().toString()), st); - + + if (status.equalsIgnoreCase("critical")) { + criticalSum++; + } else if (status.equalsIgnoreCase("warning")) { + warningSum++; + } else if (status.equalsIgnoreCase("unknown")) { + unknownSum++; + } + } Timeline timeline = new Timeline(); @@ -78,14 +92,16 @@ public void reduce(Iterable in, Collector out) throws timeline.replacePreviousDateStatus(date, new ArrayList<>(operationsParser.getStates().keySet()));//handle the first timestamp to contain the previous days timestamp status if necessary and the last timestamp to contain the status of the last timelines's entry Integer flipflop = timeline.calcStatusChanges(); + if (service != null && hostname != null && metric != null) { for (String group : groups) { if (topologyGroupParser.checkSubGroup(group)) { - MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop); + MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop,criticalSum, warningSum,unknownSum ); out.collect(metricTrends); } } + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java index d5e02b48..a9ff730d 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcStatusTrends.java @@ -53,12 +53,10 @@ public void reduce(Iterable in, Collector> { + + private OperationsParser operationsParser; + + public EndpointTrendsCounter(OperationsParser operationsParser) { + this.operationsParser = operationsParser; + } + + /** + * if the service exist in one or more function groups , timeline trends are + * produced for each function that the service belongs and the function info + * is added to the timelinetrend + * + * @param t + * @param out + * @throws Exception + */ + @Override + public void flatMap(EndpointTrends t, Collector< Tuple6> out) throws Exception { + + int criticalstatus = operationsParser.getIntStatus("CRITICAL"); + int warningstatus = operationsParser.getIntStatus("WARNING"); + int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); + + Timeline timeline = t.getTimeline(); + int criticalSum = timeline.countStatusAppearances(criticalstatus); + int warningSum = timeline.countStatusAppearances(warningstatus); + int unknownSum = timeline.countStatusAppearances(unknownstatus); + + Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), t.getService(),t.getEndpoint(),null, "CRITICAL", criticalSum); + out.collect(tupleCritical); + + Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( + t.getGroup(), t.getService(),t.getEndpoint(), null,"WARNING", warningSum); + + out.collect(tupleWarning); + + Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), t.getService(),t.getEndpoint(),null, "UNKNWON", unknownSum); + out.collect(tupleUnknown); + + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java new file mode 100644 index 00000000..9b823497 --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java @@ -0,0 +1,61 @@ +package argo.status.trends; +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +import argo.pojos.GroupTrends; +import argo.profiles.OperationsParser; +import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.util.Collector; +import timelines.Timeline; + +/** + * GroupTrendsCounter calculates on the dataset's timeline the num of appearances of the status + * CRITICAL, WARNING,UNKNOWN and produces a dataset of tuples that contain these calculations + */ +public class GroupTrendsCounter implements FlatMapFunction> { + + private OperationsParser operationsParser; + + public GroupTrendsCounter(OperationsParser operationsParser) { + this.operationsParser = operationsParser; + } + + /** + * if the service exist in one or more function groups , timeline trends are + * produced for each function that the service belongs and the function info + * is added to the timeline trend + * + * @param t + * @param out + * @throws Exception + */ + @Override + public void flatMap(GroupTrends t, Collector< Tuple6> out) throws Exception { + + int criticalstatus = operationsParser.getIntStatus("CRITICAL"); + int warningstatus = operationsParser.getIntStatus("WARNING"); + int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); + + Timeline timeline = t.getTimeline(); + int criticalSum = timeline.countStatusAppearances(criticalstatus); + int warningSum = timeline.countStatusAppearances(warningstatus); + int unknownSum = timeline.countStatusAppearances(unknownstatus); + + Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), null,null,null, "CRITICAL", criticalSum); + out.collect(tupleCritical); + + Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( + t.getGroup(), null,null, null,"WARNING", warningSum); + + out.collect(tupleWarning); + + Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), null,null,null, "UNKNOWN", unknownSum); + out.collect(tupleUnknown); + + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java new file mode 100644 index 00000000..c512f4eb --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java @@ -0,0 +1,53 @@ +package argo.status.trends; + +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +import argo.pojos.MetricTrends; +import argo.profiles.OperationsParser; +import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.util.Collector; + +/** + * MetricTrendsCounter calculates on the dataset's timeline the num of + * appearances of the status CRITICAL, WARNING,UNKNOWN and produces a dataset of + * tuples that contain these calculations + */ +public class MetricTrendsCounter implements FlatMapFunction> { + + private OperationsParser operationsParser; + + public MetricTrendsCounter(OperationsParser operationsParser) { + this.operationsParser = operationsParser; + } + + /** + * if the service exist in one or more function groups , timeline trends are + * produced for each function that the service belongs and the function info + * is added to the timelinetrend + * + * @param t + * @param out + * @throws Exception + */ + @Override + public void flatMap(MetricTrends t, Collector< Tuple6> out) throws Exception { + + Tuple6 tupleCritical = new Tuple6( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "CRITICAL", t.getCriticalNum()); + out.collect(tupleCritical); + + Tuple6 tupleWarning = new Tuple6( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "WARNING", t.getWarningNum()); + + out.collect(tupleWarning); + + Tuple6 tupleUnknown = new Tuple6( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "UNKNWON", t.getUnknownNum()); + out.collect(tupleUnknown); + + } +} diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java new file mode 100644 index 00000000..b5e74f3c --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java @@ -0,0 +1,65 @@ +package argo.status.trends; + + + +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +import argo.pojos.ServiceTrends; +import argo.profiles.OperationsParser; +import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.util.Collector; +import timelines.Timeline; + + +/** + * ServiceTrendsCounter calculates on the dataset's timeline the num of appearances of the status + * CRITICAL, WARNING,UNKNOWN and produces a dataset of tuples that contain these calculations + */ +public class ServiceTrendsCounter implements FlatMapFunction> { + + private OperationsParser operationsParser; + + public ServiceTrendsCounter(OperationsParser operationsParser) { + this.operationsParser = operationsParser; + } + + /** + * if the service exist in one or more function groups , timeline trends are + * produced for each function that the service belongs and the function info + * is added to the timeline trend + * + * @param t + * @param out + * @throws Exception + */ + @Override + public void flatMap(ServiceTrends t, Collector< Tuple6> out) throws Exception { + + int criticalstatus = operationsParser.getIntStatus("CRITICAL"); + int warningstatus = operationsParser.getIntStatus("WARNING"); + int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); + + Timeline timeline = t.getTimeline(); + int criticalSum = timeline.countStatusAppearances(criticalstatus); + int warningSum = timeline.countStatusAppearances(warningstatus); + int unknownSum = timeline.countStatusAppearances(unknownstatus); + + Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), t.getService(),null,null, "CRITICAL", criticalSum); + out.collect(tupleCritical); + + Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( + t.getGroup(), t.getService(),null, null,"WARNING", warningSum); + + out.collect(tupleWarning); + + Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( + t.getGroup(), t.getService(),null,null, "UNKNOWN", unknownSum); + out.collect(tupleUnknown); + + } +} diff --git a/flink_jobs/status_trends/src/main/java/timelines/Timeline.java b/flink_jobs/status_trends/src/main/java/timelines/Timeline.java index 9616aac8..e39a1f9b 100644 --- a/flink_jobs/status_trends/src/main/java/timelines/Timeline.java +++ b/flink_jobs/status_trends/src/main/java/timelines/Timeline.java @@ -8,6 +8,7 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.TreeMap; @@ -311,4 +312,28 @@ public int opInt(int[][][] truthTable, int op, int a, int b) { return result; } + /** + * Calculates the times a specific status appears on the timeline + * + * @param status , the status to calculate the appearances + * @return , the num of the times the specific status appears on the + * timeline + */ + + public int countStatusAppearances(int status) { + + System.out.println("status is : " + status); + int count = 0; + for (Entry entry : this.samples.entrySet()) { + System.out.println("value status is : " + entry.getValue()); + if (status == entry.getValue()) { + + count++; + } + + } + return count; + + } + } From 27c162ee8a28c9a1f0359a317232d6c8ec3815af Mon Sep 17 00:00:00 2001 From: cthermolia-grnet Date: Tue, 3 Aug 2021 13:15:47 +0300 Subject: [PATCH 008/112] ARGO-3239 Implement status trends calculations based on the duration of each status appearance to the checks --- .../src/main/java/timelines/Timeline.java | 147 ++++++++++++------ .../src/main/java/timelines/Utils.java | 16 ++ .../src/test/java/timelines/TimelineTest.java | 4 +- .../argo/batch/BatchTrendsCalculations.java | 36 ++--- .../java/argo/batch/MongoTrendsOutput.java | 12 +- .../src/main/java/argo/batch/Trends.java | 21 +++ .../StatusAndDurationFilter.java | 35 +++++ .../functions/calctimelines/StatusFilter.java | 1 - .../CalcEndpointFlipFlopTrends.java | 5 +- .../calctrends/CalcMetricFlipFlopTrends.java | 25 ++- .../main/java/argo/pojos/MetricTrends.java | 7 +- .../status/trends/EndpointTrendsCounter.java | 37 +++-- .../status/trends/GroupTrendsCounter.java | 36 +++-- .../status/trends/MetricTrendsCounter.java | 31 ++-- .../status/trends/ServiceTrendsCounter.java | 37 ++--- .../src/main/java/argo/utils/Utils.java | 15 ++ .../src/main/java/timelines/Timeline.java | 52 ++++++- 17 files changed, 347 insertions(+), 170 deletions(-) create mode 100644 flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusAndDurationFilter.java diff --git a/flink_jobs/Timelines/src/main/java/timelines/Timeline.java b/flink_jobs/Timelines/src/main/java/timelines/Timeline.java index 527809b3..56955383 100644 --- a/flink_jobs/Timelines/src/main/java/timelines/Timeline.java +++ b/flink_jobs/Timelines/src/main/java/timelines/Timeline.java @@ -5,6 +5,7 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ +import java.text.ParseException; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; @@ -14,6 +15,7 @@ import org.joda.time.DateTime; import org.joda.time.LocalDate; +import org.joda.time.Minutes; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.slf4j.Logger; @@ -100,8 +102,8 @@ public int get(String timestamp) { DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); tmp_date = fmt.parseDateTime(timestamp); if (this.samples.floorEntry(tmp_date) != null) { - return -1; - // throw new NullPointerException("no item found in timeline, size of timeline:" + this.samples.size() + "," + tmp_date.toString()); + return -1; + // throw new NullPointerException("no item found in timeline, size of timeline:" + this.samples.size() + "," + tmp_date.toString()); } return this.samples.floorEntry(tmp_date).getValue(); @@ -250,7 +252,6 @@ public boolean isEmpty() { * stored when the status of the next timestamp is different from the * previous timestamp's status then both timestamp, status pairs are stored */ - public void optimize() { TreeMap optimal = new TreeMap(); int prevstate = -1; @@ -287,7 +288,6 @@ public Set getPoints() { * @param op aggregate a set of timestamp,status pairs that are stored in a * timeline with a set of timestamp,status pairs of a different timeline, */ - public void aggregate(Timeline second, int[][][] truthTable, int op) { if (this.isEmpty()) { this.bulkInsert(second.getSamples()); @@ -311,14 +311,14 @@ public void aggregate(Timeline second, int[][][] truthTable, int op) { for (DateTime point : result.getPoints()) { int a = this.get(point); int b = second.get(point); - if(a!=-1 && b!=-1){ - int x = -1; - x = truthTable[op][a][b]; - if (x == -1) { - x = truthTable[op][b][a]; - } + if (a != -1 && b != -1) { + int x = -1; + x = truthTable[op][a][b]; + if (x == -1) { + x = truthTable[op][b][a]; + } - result.insert(point, x); + result.insert(point, x); } } @@ -331,12 +331,15 @@ public void aggregate(Timeline second, int[][][] truthTable, int op) { /** * - * @param timestampList, a list of pairs of timestamp, status where status is in the form of string + * @param timestampList, a list of pairs of timestamp, status where status + * is in the form of string * @param states, a list of the existing states * @return a sorted map of timestamp, status pairs in an ascending order - * receives pairs of timestamp , status where status is a string (e.g "OK", "WARNING") and converts the string to an integer - * based on the position of the status in the existing list of the states. Next this pair is stored in the map - * + * receives pairs of timestamp , status where status is a string (e.g "OK", + * "WARNING") and converts the string to an integer based on the position of + * the status in the existing list of the states. Next this pair is stored + * in the map + * */ public TreeMap buildStringTimeStampMap(ArrayList timestampList, ArrayList states) { @@ -351,17 +354,19 @@ public TreeMap buildStringTimeStampMap(ArrayList time return timestampMap; } - + /** * - * @param timestampList, a list of pairs of timestamp, status where status is in the form of string and timestamp is in the form of a datetime + * @param timestampList, a list of pairs of timestamp, status where status + * is in the form of string and timestamp is in the form of a datetime * @param states, a list of the existing states * @return a sorted map of timestamp, status pairs in an ascending order - * receives pairs of timestamp , status where status is a string (e.g "OK", "WARNING") and converts the string to an integer - * based on the position of the status in the existing list of the states. Next this pair is stored in the map - * + * receives pairs of timestamp , status where status is a string (e.g "OK", + * "WARNING") and converts the string to an integer based on the position of + * the status in the existing list of the states. Next this pair is stored + * in the map + * */ - public TreeMap buildDateTimeStampMap(ArrayList timestampList, ArrayList states) { TreeMap timestampMap = new TreeMap(); @@ -376,13 +381,12 @@ public TreeMap buildDateTimeStampMap(ArrayList time return timestampMap; } - + /** - * - * @param timestamp, a timestamp - * removes a pair of timestamp , status from the map + * + * @param timestamp, a timestamp removes a pair of timestamp , status from + * the map */ - public void removeTimeStamp(DateTime timestamp) { if (this.samples.containsKey(timestamp)) { @@ -397,9 +401,11 @@ public void removeTimeStamp(DateTime timestamp) { } } + /** - * - * @return the number of the times a status changes between the timestamps of the timeline , after the map is optimized + * + * @return the number of the times a status changes between the timestamps + * of the timeline , after the map is optimized */ public int calcStatusChanges() { @@ -408,13 +414,13 @@ public int calcStatusChanges() { } /** - * - * @param date, a timestamp + * + * @param date, a timestamp * @param availStates , the list of the available states - * - * checks if in the map the midnight exists and if not it is added with status "MISSING" + * + * checks if in the map the midnight exists and if not it is added with + * status "MISSING" */ - public void replacePreviousDateStatus(DateTime timestamp, ArrayList availStates) { DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); @@ -462,13 +468,16 @@ public boolean equals(Object obj) { } return true; } + /** - * - * @param truthTable the truthtable of the combination of various statuses with each other - * @param op , the operation + * + * @param truthTable the truth table of the combination of various statuses + * with each other + * @param op , the operation * @param a, the status a * @param b, the status b - * @return , the result of the combination as defined from the truth table of the defined operation + * @return , the result of the combination as defined from the truth table + * of the defined operation */ public int opInt(int[][][] truthTable, int op, int a, int b) { @@ -483,28 +492,66 @@ public int opInt(int[][][] truthTable, int op, int a, int b) { return result; } + /** - * Calculates the times a specific status appears on the timeline - * @param status , the status to calculate the appearances - * @return , the num of the times the specific status appears on the timeline - */ - - public int countStatusAppearances(int status) { - - System.out.println("status is : "+status); - int count=0; + * Calculates the times a specific status appears on the timeline + * + * @param status , the status to calculate the appearances + * @return , the num of the times the specific status appears on the + * timeline + */ + public int[] countStatusAppearances(int status) throws ParseException { + int[] statusInfo = new int[2]; + int count = 0; + ArrayList durationTimes = new ArrayList<>(); for (Map.Entry entry : this.samples.entrySet()) { - System.out.println("value status is : "+entry.getValue()); if (status == entry.getValue()) { - - + durationTimes.add(entry.getKey()); count++; } } - return count; + statusInfo[0] = count; + statusInfo[1] = countStatusDuration(durationTimes); + return statusInfo; } + /** + * Calculates the total duration of a status appearance + * + * @param durationTimes + * @return + */ + public int countStatusDuration(ArrayList durationTimes) throws ParseException { + + DateTime firstDt = null; + int minutesInt = 0; + for (DateTime dt : durationTimes) { + if (durationTimes.indexOf(dt) == 0) { + + firstDt = dt; + } else { + Minutes minutes = Minutes.minutesBetween(firstDt, dt); + minutesInt = minutesInt + minutes.getMinutes(); + firstDt = dt; + } + + } + + if (durationTimes.size() == 1 && minutesInt == 0) { + DateTime endDay = durationTimes.get(0); + DateTime startDay = durationTimes.get(0); + + startDay = Utils.setTime("yyyy-MM-dd'T'HH:mm:ss'Z'", startDay, 0, 0, 0, 0); + endDay = Utils.setTime("yyyy-MM-dd'T'HH:mm:ss'Z'", endDay, 23, 59, 59, 59); + + Minutes minutes = Minutes.minutesBetween(startDay, endDay); + minutesInt = minutes.getMinutes(); + + } + return minutesInt; + + } } diff --git a/flink_jobs/Timelines/src/main/java/timelines/Utils.java b/flink_jobs/Timelines/src/main/java/timelines/Utils.java index cbddd3b7..ded499cc 100644 --- a/flink_jobs/Timelines/src/main/java/timelines/Utils.java +++ b/flink_jobs/Timelines/src/main/java/timelines/Utils.java @@ -90,4 +90,20 @@ public static DateTime createDate(String format, int year, int month, int day, i newCalendar.set(Calendar.MILLISECOND, 0); return new DateTime(newCalendar.getTime()); } + + public static DateTime setTime(String format, DateTime dateStr, int hour, int min, int sec, int mill) throws ParseException { + + //String format = "yyyy-MM-dd'T'HH:mm:ss'Z'"; + SimpleDateFormat sdf = new SimpleDateFormat(format); + sdf.setTimeZone(TimeZone.getDefault()); + Calendar newCalendar = Calendar.getInstance(); + newCalendar.setTime(dateStr.toDate()); + + newCalendar.set(Calendar.HOUR_OF_DAY, hour); + newCalendar.set(Calendar.MINUTE, min); + newCalendar.set(Calendar.SECOND, sec); + newCalendar.set(Calendar.MILLISECOND, mill); + return new DateTime( newCalendar.getTime()); + } + } diff --git a/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java b/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java index 8d3620f5..cae8c88f 100644 --- a/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java +++ b/flink_jobs/Timelines/src/test/java/timelines/TimelineTest.java @@ -506,8 +506,8 @@ public void testCountStatusAppearances() throws ParseException { Timeline instance = new Timeline(); instance.insertDateTimeStamps(createTimestampList()); int expResult =1; - int result = instance.countStatusAppearances(status); - assertEquals(expResult, result); + int result[] = instance.countStatusAppearances(status); + assertEquals(expResult, result[0]); // TODO review the generated test code and remove the default call to fail. //fail("The test case is a prototype."); } diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java b/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java index 3b727c00..bdf1c858 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/BatchTrendsCalculations.java @@ -8,7 +8,7 @@ import argo.functions.calctimelines.CalcLastTimeStatus; import argo.functions.calctimelines.MapServices; import argo.functions.calctimelines.ServiceFilter; -import argo.functions.calctimelines.StatusFilter; +import argo.functions.calctimelines.StatusAndDurationFilter; import argo.functions.calctimelines.TopologyMetricFilter; import argo.functions.calctrends.CalcEndpointFlipFlopTrends; import argo.functions.calctrends.CalcGroupFlipFlop; @@ -32,7 +32,7 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.AvroInputFormat; -import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.core.fs.Path; import org.joda.time.DateTime; @@ -103,7 +103,7 @@ public static void main(String[] args) throws Exception { mongoUri = params.get("mongoUri"); profilesLoader = new ProfilesLoader(params); yesterdayData = readInputData(env, params, "yesterdayData"); - todayData = readInputData(env, params, "todayDatas"); + todayData = readInputData(env, params, "todayData"); // calculate on data calcFlipFlops(); @@ -150,7 +150,7 @@ public Trends map(MetricTrends in) throws Exception { }); trends.output(metricMongoOut); //flatMap dataset to tuples and count the apperances of each status type to the timeline - DataSet> metricStatusTrendsData = serviceEndpointMetricGroupData.flatMap(new MetricTrendsCounter(profilesLoader.getOperationParser())); + DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> metricStatusTrendsData = serviceEndpointMetricGroupData.flatMap(new MetricTrendsCounter(profilesLoader.getOperationParser())); //filter dataset for each status type and write to mongo db filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_METRIC, statusTrendsMetricCol, metricStatusTrendsData, "critical"); filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_METRIC, statusTrendsMetricCol, metricStatusTrendsData, "warning"); @@ -179,7 +179,7 @@ public Trends map(EndpointTrends in) throws Exception { }); trends.output(metricMongoOut); //flatMap dataset to tuples and count the apperances of each status type to the timeline - DataSet> endpointStatusTrendsData = serviceEndpointGroupData.flatMap(new EndpointTrendsCounter(profilesLoader.getOperationParser())); + DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> endpointStatusTrendsData = serviceEndpointGroupData.flatMap(new EndpointTrendsCounter(profilesLoader.getOperationParser())); //filter dataset for each status type and write to mongo db filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT, statusTrendsEndpointCol, endpointStatusTrendsData, "critical"); filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT, statusTrendsEndpointCol, endpointStatusTrendsData, "warning"); @@ -208,7 +208,7 @@ public Trends map(ServiceTrends in) throws Exception { }); trends.output(metricMongoOut); //flatMap dataset to tuples and count the apperances of each status type to the timeline - DataSet> serviceStatusTrendsData = serviceGroupData.flatMap(new ServiceTrendsCounter(profilesLoader.getOperationParser())); + DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> serviceStatusTrendsData = serviceGroupData.flatMap(new ServiceTrendsCounter(profilesLoader.getOperationParser())); //filter dataset for each status type and write to mongo db filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_SERVICE, statusTrendsServiceCol, serviceStatusTrendsData, "critical"); filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_SERVICE, statusTrendsServiceCol, serviceStatusTrendsData, "warning"); @@ -245,7 +245,7 @@ public Trends map(GroupTrends in) throws Exception { }); trends.output(metricMongoOut); //flatMap dataset to tuples and count the apperances of each status type to the timeline - DataSet> groupStatusTrendsData = groupData.flatMap(new GroupTrendsCounter(profilesLoader.getOperationParser())); + DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> groupStatusTrendsData = groupData.flatMap(new GroupTrendsCounter(profilesLoader.getOperationParser())); //filter dataset for each status type and write to mongo db filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_GROUP, statusTrendsGroupCol, groupStatusTrendsData, "critical"); filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType.TRENDS_STATUS_GROUP, statusTrendsGroupCol, groupStatusTrendsData, "warning"); @@ -265,37 +265,37 @@ private static DataSet readInputData(ExecutionEnvironment env, Param } //filters the dataset to appear result by status type and status appearances>0 and write to mongo db - private static void filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType mongoTrendsType, String uri, DataSet> data, String status) { + private static void filterByStatusAndWriteMongo(MongoTrendsOutput.TrendsType mongoTrendsType, String uri, DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> data, String status) { - DataSet> filteredData = data.filter(new StatusFilter(status)); //filter dataset by status type and status appearances>0 + DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> filteredData = data.filter(new StatusAndDurationFilter(status)); //filter dataset by status type and status appearances>0 if (rankNum != null) { - filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1).first(rankNum); + filteredData = filteredData.sortPartition(6, Order.DESCENDING).setParallelism(1).first(rankNum); } else { - filteredData = filteredData.sortPartition(5, Order.DESCENDING).setParallelism(1); + filteredData = filteredData.sortPartition(6, Order.DESCENDING).setParallelism(1); } writeStatusTrends(filteredData, uri, mongoTrendsType, data, status); //write to mongo db } // write status trends to mongo db - private static void writeStatusTrends(DataSet> outputData, String uri, final MongoTrendsOutput.TrendsType mongoCase, DataSet> data, String status) { + private static void writeStatusTrends(DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> outputData, String uri, final MongoTrendsOutput.TrendsType mongoCase, DataSet< Tuple7< String,String, String, String, String, Integer,Integer>> data, String status) { //MongoTrendsOutput.TrendsType.TRENDS_STATUS_ENDPOINT MongoTrendsOutput metricMongoOut = new MongoTrendsOutput(mongoUri, uri, mongoCase, reportId, profilesDateStr, clearMongo); - DataSet trends = outputData.map(new MapFunction, Trends>() { + DataSet trends = outputData.map(new MapFunction< Tuple7< String,String, String, String, String, Integer,Integer>, Trends>() { @Override - public Trends map(Tuple6 in) throws Exception { + public Trends map( Tuple7< String,String, String, String, String, Integer,Integer> in) throws Exception { switch (mongoCase) { case TRENDS_STATUS_METRIC: - return new Trends(in.f0.toString(), in.f1.toString(), in.f2.toString(), in.f3.toString(), in.f4.toString(), in.f5); + return new Trends(in.f0, in.f1, in.f2, in.f3, in.f4, in.f5, in.f6); case TRENDS_STATUS_ENDPOINT: - return new Trends(in.f0.toString(), in.f1.toString(), in.f2.toString(), null, in.f4.toString(), in.f5); + return new Trends(in.f0, in.f1, in.f2, null, in.f4, in.f5, in.f6); case TRENDS_STATUS_SERVICE: - return new Trends(in.f0.toString(), in.f1.toString(), null, null, in.f4.toString(), in.f5); + return new Trends(in.f0, in.f1, null, null, in.f4, in.f5, in.f6); case TRENDS_STATUS_GROUP: - return new Trends(in.f0.toString(), null, null, null, in.f4.toString(), in.f5); + return new Trends(in.f0, null, null, null, in.f4, in.f5, in.f6); default: return null; } diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java index 8bf427b7..031b062a 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/MongoTrendsOutput.java @@ -20,7 +20,7 @@ public class MongoTrendsOutput implements OutputFormat { // Select the type of status input public enum TrendsType { - TRENDS_STATUS_METRIC, TRENDS_STATUS_ENDPOINT,TRENDS_STATUS_SERVICE, TRENDS_STATUS_GROUP,TRENDS_METRIC, TRENDS_ENDPOINT, TRENDS_SERVICE, TRENDS_GROUP + TRENDS_STATUS_METRIC, TRENDS_STATUS_ENDPOINT, TRENDS_STATUS_SERVICE, TRENDS_STATUS_GROUP, TRENDS_METRIC, TRENDS_ENDPOINT, TRENDS_SERVICE, TRENDS_GROUP } private static final long serialVersionUID = 1L; @@ -123,24 +123,28 @@ private Document prepDoc(Trends record) { doc.append("endpoint", record.getEndpoint()); doc.append("metric", record.getMetric()); doc.append("status", record.getStatus()); + doc.append("duration", record.getDuration()); doc.append("trends", record.getTrends()); break; - case TRENDS_STATUS_ENDPOINT: + case TRENDS_STATUS_ENDPOINT: doc.append("group", record.getGroup()); doc.append("service", record.getService()); doc.append("endpoint", record.getEndpoint()); doc.append("status", record.getStatus()); + doc.append("duration", record.getDuration()); doc.append("trends", record.getTrends()); break; - case TRENDS_STATUS_SERVICE: + case TRENDS_STATUS_SERVICE: doc.append("group", record.getGroup()); doc.append("service", record.getService()); doc.append("status", record.getStatus()); + doc.append("duration", record.getDuration()); doc.append("trends", record.getTrends()); break; - case TRENDS_STATUS_GROUP: + case TRENDS_STATUS_GROUP: doc.append("group", record.getGroup()); doc.append("status", record.getStatus()); + doc.append("duration", record.getDuration()); doc.append("trends", record.getTrends()); break; default: diff --git a/flink_jobs/status_trends/src/main/java/argo/batch/Trends.java b/flink_jobs/status_trends/src/main/java/argo/batch/Trends.java index 80497d40..2250beb1 100644 --- a/flink_jobs/status_trends/src/main/java/argo/batch/Trends.java +++ b/flink_jobs/status_trends/src/main/java/argo/batch/Trends.java @@ -23,12 +23,24 @@ public class Trends { private String status; private int trends; + private int duration; + + public Trends(String group, String service, String endpoint, String metric, String status, int trends, int duration) { + this.group = group; + this.service = service; + this.endpoint = endpoint; + this.metric = metric; + this.status = status; + this.trends = trends; + this.duration=duration; + } public Trends(String group, String service, String endpoint, String metric, String status, int trends) { this.group = group; this.service = service; this.endpoint = endpoint; this.metric = metric; + this.flipflop = flipflop; this.status = status; this.trends = trends; } @@ -117,6 +129,15 @@ public int getTrends() { public void setTrends(int trends) { this.trends = trends; } + + public int getDuration() { + return duration; + } + + public void setDuration(int duration) { + this.duration = duration; + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusAndDurationFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusAndDurationFilter.java new file mode 100644 index 00000000..d1f54ebc --- /dev/null +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusAndDurationFilter.java @@ -0,0 +1,35 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package argo.functions.calctimelines; + +import org.apache.flink.api.common.functions.FilterFunction; +import org.apache.flink.api.java.tuple.Tuple7; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * StatusFilter, filters data by status and status appearances + */ +public class StatusAndDurationFilter implements FilterFunction> { + + static Logger LOG = LoggerFactory.getLogger(StatusFilter.class); + private String status; + + public StatusAndDurationFilter(String status) { + this.status = status; + } + //if the status field value in Tuple equals the given status and status appearances>0 returns true, else returns false + + @Override + public boolean filter(Tuple7 t) throws Exception { + + if (t.f4.toString().equalsIgnoreCase(status) && t.f6 > 0) { + return true; + } + return false; + } + +} diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java index 0dd20987..23ceb943 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctimelines/StatusFilter.java @@ -11,7 +11,6 @@ import org.slf4j.LoggerFactory; /** - * StatusFilter, filters data by status and status appearances */ public class StatusFilter implements FilterFunction> { diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java index 1b419ca6..c5febf50 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcEndpointFlipFlopTrends.java @@ -56,9 +56,8 @@ public void reduce(Iterable in, Collector< EndpointTrends> out) th service = time.getService(); hostname = time.getEndpoint(); Timeline timeline = time.getTimeline(); - timelinelist.put(time.getMetric(), timeline); - - + + timelinelist.put(time.getMetric(),timeline); } // merge the timelines into one timeline , diff --git a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java index bdb73a5d..af9c6287 100644 --- a/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/functions/calctrends/CalcMetricFlipFlopTrends.java @@ -29,7 +29,7 @@ public class CalcMetricFlipFlopTrends implements GroupReduceFunction in, Collector out) throws String hostname = null; String service = null; String metric = null; - String status = null; - int criticalSum = 0, warningSum = 0, unknownSum = 0; - ArrayList critical=new ArrayList<>(); - ArrayList warning=new ArrayList<>(); - ArrayList unknown=new ArrayList<>(); - + String status = null; + int criticalSum = 0, warningSum = 0, unknownSum = 0; + for (MetricData md : in) { hostname = md.getHostname().toString(); service = md.getService().toString(); - + metric = md.getMetric().toString(); status = md.getStatus().toString(); // group = groupEndpoints.get(md.getHostname().toString() + "-" + md.getService()); //retrieve the group for the service, as contained in file String avProfileName = this.aggregationProfileParser.getAvProfileItem().getName(); // group = topologyEndpointParser.retrieveGroup(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString() + "-" + md.getService().toString()); - groups = topologyEndpointParser.getGroupFull(aggregationProfileParser.getProfileGroupType(avProfileName).toUpperCase(), md.getHostname().toString(), md.getService().toString()); int st = operationsParser.getIntStatus(md.getStatus().toString()); timeStatusMap.put(Utils.convertStringtoDate(format, md.getTimestamp().toString()), st); @@ -86,7 +82,7 @@ public void reduce(Iterable in, Collector out) throws } } - + Timeline timeline = new Timeline(); timeline.insertDateTimeStamps(timeStatusMap); @@ -94,15 +90,16 @@ public void reduce(Iterable in, Collector out) throws Integer flipflop = timeline.calcStatusChanges(); if (service != null && hostname != null && metric != null) { + for (String group : groups) { - + if (topologyGroupParser.checkSubGroup(group)) { - MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop,criticalSum, warningSum,unknownSum ); + MetricTrends metricTrends = new MetricTrends(group, service, hostname, metric, timeline, flipflop, criticalSum, warningSum, unknownSum); out.collect(metricTrends); } } } - } - + } + } diff --git a/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java b/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java index 163e55b0..3d6a77f8 100644 --- a/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java +++ b/flink_jobs/status_trends/src/main/java/argo/pojos/MetricTrends.java @@ -22,7 +22,7 @@ public class MetricTrends { Integer criticalNum; Integer warningNum; Integer unknownNum; - + public MetricTrends() { } @@ -37,8 +37,6 @@ public MetricTrends(String group, String service, String endpoint, String metric this.warningNum = warningNum; this.unknownNum = unknownNum; } - - public Integer getUnknownNum() { return unknownNum; @@ -48,7 +46,6 @@ public void setUnknownNum(Integer unknownNum) { this.unknownNum = unknownNum; } - public String getGroup() { return group; } @@ -113,6 +110,4 @@ public void setWarningNum(Integer warningNum) { this.warningNum = warningNum; } - - } diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/EndpointTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/EndpointTrendsCounter.java index fc8ae443..4319eade 100644 --- a/flink_jobs/status_trends/src/main/java/argo/status/trends/EndpointTrendsCounter.java +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/EndpointTrendsCounter.java @@ -1,7 +1,5 @@ package argo.status.trends; - - /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates @@ -11,15 +9,16 @@ import argo.profiles.OperationsParser; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.util.Collector; import timelines.Timeline; - /** - * EndpointTrendsCounter calculates on the dataset's timeline the num of appearances of the status - * CRITICAL, WARNING,UNKNOWN and produces a dataset of tuples that contain these calculations + * EndpointTrendsCounter calculates on the dataset's timeline the num of + * appearances of the status CRITICAL, WARNING,UNKNOWN and produces a dataset of + * tuples that contain these calculations */ -public class EndpointTrendsCounter implements FlatMapFunction> { +public class EndpointTrendsCounter implements FlatMapFunction> { private OperationsParser operationsParser; @@ -37,29 +36,29 @@ public EndpointTrendsCounter(OperationsParser operationsParser) { * @throws Exception */ @Override - public void flatMap(EndpointTrends t, Collector< Tuple6> out) throws Exception { + public void flatMap(EndpointTrends t, Collector< Tuple7< String, String, String, String, String, Integer, Integer>> out) throws Exception { int criticalstatus = operationsParser.getIntStatus("CRITICAL"); int warningstatus = operationsParser.getIntStatus("WARNING"); int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); Timeline timeline = t.getTimeline(); - int criticalSum = timeline.countStatusAppearances(criticalstatus); - int warningSum = timeline.countStatusAppearances(warningstatus); - int unknownSum = timeline.countStatusAppearances(unknownstatus); + int[] criticalstatusInfo = timeline.countStatusAppearances(criticalstatus); + int[] warningstatusInfo = timeline.countStatusAppearances(warningstatus); + int[] unknownstatusInfo = timeline.countStatusAppearances(unknownstatus); - Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), t.getService(),t.getEndpoint(),null, "CRITICAL", criticalSum); + Tuple7< String, String, String, String, String, Integer, Integer> tupleCritical = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), null, "CRITICAL", criticalstatusInfo[0], criticalstatusInfo[1]); out.collect(tupleCritical); - Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( - t.getGroup(), t.getService(),t.getEndpoint(), null,"WARNING", warningSum); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleWarning = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), null, "WARNING", warningstatusInfo[0], warningstatusInfo[1]); + out.collect(tupleWarning); - Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), t.getService(),t.getEndpoint(),null, "UNKNWON", unknownSum); - out.collect(tupleUnknown); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleUnknown = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), null, "UNKNOWN", unknownstatusInfo[0], unknownstatusInfo[1]); + out.collect(tupleUnknown); + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java index 9b823497..b0ec0f9b 100644 --- a/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/GroupTrendsCounter.java @@ -1,4 +1,5 @@ package argo.status.trends; + /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates @@ -7,15 +8,16 @@ import argo.pojos.GroupTrends; import argo.profiles.OperationsParser; import org.apache.flink.api.common.functions.FlatMapFunction; -import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.util.Collector; import timelines.Timeline; /** - * GroupTrendsCounter calculates on the dataset's timeline the num of appearances of the status - * CRITICAL, WARNING,UNKNOWN and produces a dataset of tuples that contain these calculations + * GroupTrendsCounter calculates on the dataset's timeline the num of + * appearances of the status CRITICAL, WARNING,UNKNOWN and produces a dataset of + * tuples that contain these calculations */ -public class GroupTrendsCounter implements FlatMapFunction> { +public class GroupTrendsCounter implements FlatMapFunction> { private OperationsParser operationsParser; @@ -33,29 +35,29 @@ public GroupTrendsCounter(OperationsParser operationsParser) { * @throws Exception */ @Override - public void flatMap(GroupTrends t, Collector< Tuple6> out) throws Exception { + public void flatMap(GroupTrends t, Collector< Tuple7> out) throws Exception { int criticalstatus = operationsParser.getIntStatus("CRITICAL"); int warningstatus = operationsParser.getIntStatus("WARNING"); int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); Timeline timeline = t.getTimeline(); - int criticalSum = timeline.countStatusAppearances(criticalstatus); - int warningSum = timeline.countStatusAppearances(warningstatus); - int unknownSum = timeline.countStatusAppearances(unknownstatus); + int[] criticalstatusInfo = timeline.countStatusAppearances(criticalstatus); + int[] warningstatusInfo = timeline.countStatusAppearances(warningstatus); + int[] unknownstatusInfo = timeline.countStatusAppearances(unknownstatus); - Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), null,null,null, "CRITICAL", criticalSum); + Tuple7< String, String, String, String, String, Integer, Integer> tupleCritical = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), null, null, null, "CRITICAL", criticalstatusInfo[0], criticalstatusInfo[1]); out.collect(tupleCritical); - Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( - t.getGroup(), null,null, null,"WARNING", warningSum); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleWarning = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), null, null, null, "WARNING", warningstatusInfo[0], warningstatusInfo[1]); + out.collect(tupleWarning); - Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), null,null,null, "UNKNOWN", unknownSum); - out.collect(tupleUnknown); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleUnknown = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), null, null, null, "UNKNOWN", unknownstatusInfo[0], unknownstatusInfo[1]); + out.collect(tupleUnknown); + } } diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java index c512f4eb..c909ba8a 100644 --- a/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/MetricTrendsCounter.java @@ -8,15 +8,17 @@ import argo.pojos.MetricTrends; import argo.profiles.OperationsParser; import org.apache.flink.api.common.functions.FlatMapFunction; -import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.util.Collector; +import timelines.Timeline; +import timelines.TimelineAggregator; /** * MetricTrendsCounter calculates on the dataset's timeline the num of * appearances of the status CRITICAL, WARNING,UNKNOWN and produces a dataset of * tuples that contain these calculations */ -public class MetricTrendsCounter implements FlatMapFunction> { +public class MetricTrendsCounter implements FlatMapFunction> { private OperationsParser operationsParser; @@ -34,20 +36,29 @@ public MetricTrendsCounter(OperationsParser operationsParser) { * @throws Exception */ @Override - public void flatMap(MetricTrends t, Collector< Tuple6> out) throws Exception { + public void flatMap(MetricTrends t, Collector> out) throws Exception { + int criticalstatus = operationsParser.getIntStatus("CRITICAL"); + int warningstatus = operationsParser.getIntStatus("WARNING"); + int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); - Tuple6 tupleCritical = new Tuple6( - t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "CRITICAL", t.getCriticalNum()); + Timeline timeline = t.getTimeline(); + TimelineAggregator timelineAggregator = new TimelineAggregator(); + + int[] criticalstatusInfo = timeline.countStatusAppearances(criticalstatus); + int[] warningstatusInfo = timeline.countStatusAppearances(warningstatus); + int[] unknownstatusInfo = timeline.countStatusAppearances(unknownstatus); + + Tuple7< String, String, String, String, String, Integer, Integer> tupleCritical = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "CRITICAL", t.getCriticalNum(), criticalstatusInfo[1]); out.collect(tupleCritical); - Tuple6 tupleWarning = new Tuple6( - t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "WARNING", t.getWarningNum()); + Tuple7< String, String, String, String, String, Integer, Integer> tupleWarning = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "WARNING", t.getWarningNum(), warningstatusInfo[1]); out.collect(tupleWarning); - Tuple6 tupleUnknown = new Tuple6( - t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "UNKNWON", t.getUnknownNum()); + Tuple7< String, String, String, String, String, Integer, Integer> tupleUnknown = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), t.getEndpoint(), t.getMetric(), "UNKNOWN", t.getUnknownNum(), unknownstatusInfo[1]); out.collect(tupleUnknown); - } } diff --git a/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java b/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java index b5e74f3c..c2455d74 100644 --- a/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java +++ b/flink_jobs/status_trends/src/main/java/argo/status/trends/ServiceTrendsCounter.java @@ -1,7 +1,4 @@ package argo.status.trends; - - - /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates @@ -11,15 +8,16 @@ import argo.profiles.OperationsParser; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.tuple.Tuple6; +import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.util.Collector; import timelines.Timeline; - /** - * ServiceTrendsCounter calculates on the dataset's timeline the num of appearances of the status - * CRITICAL, WARNING,UNKNOWN and produces a dataset of tuples that contain these calculations + * ServiceTrendsCounter calculates on the dataset's timeline the num of + * appearances of the status CRITICAL, WARNING,UNKNOWN and produces a dataset of + * tuples that contain these calculations */ -public class ServiceTrendsCounter implements FlatMapFunction> { +public class ServiceTrendsCounter implements FlatMapFunction> { private OperationsParser operationsParser; @@ -37,29 +35,28 @@ public ServiceTrendsCounter(OperationsParser operationsParser) { * @throws Exception */ @Override - public void flatMap(ServiceTrends t, Collector< Tuple6> out) throws Exception { + public void flatMap(ServiceTrends t, Collector< Tuple7< String, String, String, String, String, Integer, Integer>> out) throws Exception { int criticalstatus = operationsParser.getIntStatus("CRITICAL"); int warningstatus = operationsParser.getIntStatus("WARNING"); int unknownstatus = operationsParser.getIntStatus("UNKNOWN"); Timeline timeline = t.getTimeline(); - int criticalSum = timeline.countStatusAppearances(criticalstatus); - int warningSum = timeline.countStatusAppearances(warningstatus); - int unknownSum = timeline.countStatusAppearances(unknownstatus); + int[] criticalstatusInfo = timeline.countStatusAppearances(criticalstatus); + int[] warningstatusInfo = timeline.countStatusAppearances(warningstatus); + int[] unknownstatusInfo = timeline.countStatusAppearances(unknownstatus); - Tuple6< String,String, String, String, String, Integer> tupleCritical = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), t.getService(),null,null, "CRITICAL", criticalSum); + Tuple7< String, String, String, String, String, Integer, Integer> tupleCritical = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), null, null, "CRITICAL", criticalstatusInfo[0], criticalstatusInfo[1]); out.collect(tupleCritical); - Tuple6< String,String, String, String, String, Integer> tupleWarning = new Tuple6< String, String, String, String, String, Integer>( - t.getGroup(), t.getService(),null, null,"WARNING", warningSum); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleWarning = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), null, null, "WARNING", warningstatusInfo[0], warningstatusInfo[1]); + out.collect(tupleWarning); - Tuple6< String,String, String, String, String, Integer> tupleUnknown = new Tuple6< String,String, String, String, String, Integer>( - t.getGroup(), t.getService(),null,null, "UNKNOWN", unknownSum); - out.collect(tupleUnknown); - + Tuple7< String, String, String, String, String, Integer, Integer> tupleUnknown = new Tuple7< String, String, String, String, String, Integer, Integer>( + t.getGroup(), t.getService(), null, null, "UNKNOWN", unknownstatusInfo[0], unknownstatusInfo[1]); + out.collect(tupleUnknown); } } diff --git a/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java b/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java index a8aa853c..3dc42e89 100644 --- a/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java +++ b/flink_jobs/status_trends/src/main/java/argo/utils/Utils.java @@ -123,5 +123,20 @@ public static HashMap getEndpoints(ArrayList durationTimes = new ArrayList<>(); for (Entry entry : this.samples.entrySet()) { - System.out.println("value status is : " + entry.getValue()); if (status == entry.getValue()) { - + durationTimes.add(entry.getKey()); count++; } } - return count; + statusInfo[0] = count; + statusInfo[1] = countStatusDuration(durationTimes); + return statusInfo; + + } + + /** + * Calculates the total duration of a status appearance + * + * @param durationTimes + * @return + */ + public int countStatusDuration(ArrayList durationTimes) throws ParseException { + + DateTime firstDt = null; + int minutesInt = 0; + for (DateTime dt : durationTimes) { + if (durationTimes.indexOf(dt) == 0) { + + firstDt = dt; + } else { + Minutes minutes = Minutes.minutesBetween(firstDt, dt); + minutesInt = minutesInt + minutes.getMinutes(); + firstDt = dt; + } + + } + if (durationTimes.size() == 1 && minutesInt == 0) { + DateTime endDay = durationTimes.get(0); + DateTime startDay = durationTimes.get(0); + + startDay = Utils.setTime("yyyy-MM-dd'T'HH:mm:ss'Z'", startDay, 0, 0, 0,0); + endDay = Utils.setTime("yyyy-MM-dd'T'HH:mm:ss'Z'", endDay, 23, 59, 59,59); + + Minutes minutes = Minutes.minutesBetween(startDay, endDay); + minutesInt = minutes.getMinutes(); + + } + return minutesInt; } } From 9bd261725912a4bdc4ff69468897c867bab47e95 Mon Sep 17 00:00:00 2001 From: Konstantinos Kagkelidis Date: Tue, 24 Aug 2021 22:05:11 +0300 Subject: [PATCH 009/112] ARGO-3257 Use run.date argument as proper date query parameter when requesting resources from argo-web-api --- flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java | 1 + .../batch_status/src/main/java/argo/batch/ArgoStatusBatch.java | 2 ++ 2 files changed, 3 insertions(+) diff --git a/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java b/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java index e6bf2b85..635ef74c 100644 --- a/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java +++ b/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java @@ -82,6 +82,7 @@ public static void main(String[] args) throws Exception { } amr.setReportID(reportID); + amr.setDate(params.getRequired("run.date")); amr.getRemoteAll(); diff --git a/flink_jobs/batch_status/src/main/java/argo/batch/ArgoStatusBatch.java b/flink_jobs/batch_status/src/main/java/argo/batch/ArgoStatusBatch.java index f96123a1..8683cc2f 100644 --- a/flink_jobs/batch_status/src/main/java/argo/batch/ArgoStatusBatch.java +++ b/flink_jobs/batch_status/src/main/java/argo/batch/ArgoStatusBatch.java @@ -71,8 +71,10 @@ public static void main(String[] args) throws Exception { } amr.setReportID(reportID); + amr.setDate(params.getRequired("run.date")); amr.getRemoteAll(); + DataSourcecfgDS = env.fromElements(amr.getResourceJSON(ApiResource.CONFIG)); DataSourceopsDS = env.fromElements(amr.getResourceJSON(ApiResource.OPS)); From 4d47049c85c856e8f4ab7385cc4718363cf02697 Mon Sep 17 00:00:00 2001 From: Konstantinos Kagkelidis Date: Fri, 27 Aug 2021 00:31:54 +0300 Subject: [PATCH 010/112] ARGO-3240 Add timeout configuration when connecting to argo-web-api --- .../java/argo/amr/ApiResourceManager.java | 16 ++++++-- .../src/main/java/argo/batch/ArgoArBatch.java | 41 ++++++++----------- .../java/argo/amr/ApiResourceManager.java | 16 ++++++-- .../main/java/argo/batch/ArgoStatusBatch.java | 18 ++++---- 4 files changed, 52 insertions(+), 39 deletions(-) diff --git a/flink_jobs/batch_ar/src/main/java/argo/amr/ApiResourceManager.java b/flink_jobs/batch_ar/src/main/java/argo/amr/ApiResourceManager.java index c4375701..8698b492 100644 --- a/flink_jobs/batch_ar/src/main/java/argo/amr/ApiResourceManager.java +++ b/flink_jobs/batch_ar/src/main/java/argo/amr/ApiResourceManager.java @@ -4,7 +4,6 @@ import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.util.ArrayList; import java.util.EnumMap; import java.util.HashMap; @@ -12,7 +11,6 @@ import java.util.Map; import java.util.Map.Entry; -import javax.net.ssl.SSLContext; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.fluent.Executor; @@ -59,6 +57,7 @@ public class ApiResourceManager { private String reportName; private String weightsID; private boolean verify; + private int timeoutSec; public ApiResourceManager(String endpoint, String token) { @@ -74,9 +73,19 @@ public ApiResourceManager(String endpoint, String token) { this.proxy = ""; this.weightsID = ""; this.verify = true; + this.timeoutSec = 5; } + public int getTimeoutSec() { + return timeoutSec; + } + + public void setTimeoutSec(int timeoutSec) { + this.timeoutSec = timeoutSec; + } + + public boolean getVerify() { return verify; } @@ -186,7 +195,7 @@ private String getResource(String fullURL) { r = r.viaProxy(proxy); } - r = r.connectTimeout(1000).socketTimeout(1000); + r = r.connectTimeout(this.timeoutSec * 1000).socketTimeout(this.timeoutSec * 1000); String content = "{}"; @@ -195,6 +204,7 @@ private String getResource(String fullURL) { CloseableHttpClient httpClient = HttpClients.custom().setSSLSocketFactory(selfSignedSSLF()).build(); Executor executor = Executor.newInstance(httpClient); content = executor.execute(r).returnContent().asString(); + httpClient.close(); } else { content = r.execute().returnContent().asString(); diff --git a/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java b/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java index 635ef74c..1d1e767a 100644 --- a/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java +++ b/flink_jobs/batch_ar/src/main/java/argo/batch/ArgoArBatch.java @@ -25,31 +25,18 @@ /** * Represents an ARGO A/R Batch Job in flink - *

- * The specific batch job calculates the availability and reliability results based on the input metric data - * and sync files - *

- * Required arguments: - *