Bladeren bron

Removed 'common' library

Lukas Cerny 4 jaren geleden
bovenliggende
commit
9cf1c802c9
56 gewijzigde bestanden met toevoegingen van 3857 en 2273 verwijderingen
  1. 54 50
      build.gradle
  2. 113 113
      src/main/java/cz/senslog/analyzer/analysis/module/CollectorHandler.java
  3. 113 113
      src/main/java/cz/senslog/analyzer/app/Application.java
  4. 112 113
      src/main/java/cz/senslog/analyzer/app/Configuration.java
  5. 78 78
      src/main/java/cz/senslog/analyzer/app/Parameters.java
  6. 155 155
      src/main/java/cz/senslog/analyzer/domain/DoubleStatistics.java
  7. 70 70
      src/main/java/cz/senslog/analyzer/domain/Group.java
  8. 24 24
      src/main/java/cz/senslog/analyzer/domain/Observation.java
  9. 52 52
      src/main/java/cz/senslog/analyzer/domain/Threshold.java
  10. 32 32
      src/main/java/cz/senslog/analyzer/provider/AnalyzerTask.java
  11. 18 17
      src/main/java/cz/senslog/analyzer/provider/DataProvider.java
  12. 17 18
      src/main/java/cz/senslog/analyzer/provider/DataProviderComponent.java
  13. 9 8
      src/main/java/cz/senslog/analyzer/provider/DataProviderDeployment.java
  14. 16 16
      src/main/java/cz/senslog/analyzer/provider/HttpMiddlewareProvider.java
  15. 18 18
      src/main/java/cz/senslog/analyzer/provider/HttpMiddlewareProviderModule.java
  16. 4 4
      src/main/java/cz/senslog/analyzer/provider/MiddlewareDataProviderConfig.java
  17. 6 6
      src/main/java/cz/senslog/analyzer/provider/ScheduledDataProviderConfig.java
  18. 27 27
      src/main/java/cz/senslog/analyzer/provider/ScheduledDataProviderConfigImpl.java
  19. 43 43
      src/main/java/cz/senslog/analyzer/provider/ScheduledDatabaseProvider.java
  20. 98 92
      src/main/java/cz/senslog/analyzer/provider/task/ObservationAnalyzerTask.java
  21. 180 180
      src/main/java/cz/senslog/analyzer/storage/permanent/repository/StatisticsConfigRepository.java
  22. 174 176
      src/main/java/cz/senslog/analyzer/storage/permanent/repository/StatisticsRepository.java
  23. 136 0
      src/main/java/cz/senslog/analyzer/util/DateTrunc.java
  24. 20 0
      src/main/java/cz/senslog/analyzer/util/LongUtils.java
  25. 22 0
      src/main/java/cz/senslog/analyzer/util/StringUtils.java
  26. 141 142
      src/main/java/cz/senslog/analyzer/util/TimestampUtil.java
  27. 46 0
      src/main/java/cz/senslog/analyzer/util/Triple.java
  28. 40 0
      src/main/java/cz/senslog/analyzer/util/Tuple.java
  29. 211 0
      src/main/java/cz/senslog/analyzer/util/http/HttpClient.java
  30. 13 0
      src/main/java/cz/senslog/analyzer/util/http/HttpCode.java
  31. 16 0
      src/main/java/cz/senslog/analyzer/util/http/HttpContentType.java
  32. 39 0
      src/main/java/cz/senslog/analyzer/util/http/HttpCookie.java
  33. 10 0
      src/main/java/cz/senslog/analyzer/util/http/HttpHeader.java
  34. 5 0
      src/main/java/cz/senslog/analyzer/util/http/HttpMethod.java
  35. 111 0
      src/main/java/cz/senslog/analyzer/util/http/HttpRequest.java
  36. 80 0
      src/main/java/cz/senslog/analyzer/util/http/HttpRequestBuilder.java
  37. 77 0
      src/main/java/cz/senslog/analyzer/util/http/HttpResponse.java
  38. 42 0
      src/main/java/cz/senslog/analyzer/util/http/HttpResponseBuilder.java
  39. 113 0
      src/main/java/cz/senslog/analyzer/util/http/URLBuilder.java
  40. 213 0
      src/main/java/cz/senslog/analyzer/util/json/BasicJson.java
  41. 22 0
      src/main/java/cz/senslog/analyzer/util/json/BasicJsonDeserializer.java
  42. 6 0
      src/main/java/cz/senslog/analyzer/util/json/FormatFunction.java
  43. 139 0
      src/main/java/cz/senslog/analyzer/util/json/JsonSchema.java
  44. 59 0
      src/main/java/cz/senslog/analyzer/util/schedule/ScheduleTask.java
  45. 24 0
      src/main/java/cz/senslog/analyzer/util/schedule/Scheduler.java
  46. 30 0
      src/main/java/cz/senslog/analyzer/util/schedule/SchedulerBuilderImpl.java
  47. 67 0
      src/main/java/cz/senslog/analyzer/util/schedule/SchedulerImpl.java
  48. 5 0
      src/main/java/cz/senslog/analyzer/util/schedule/Status.java
  49. 29 0
      src/main/java/cz/senslog/analyzer/util/schedule/TaskDescription.java
  50. 36 36
      src/main/java/cz/senslog/analyzer/ws/handler/InfoHandler.java
  51. 120 120
      src/main/java/cz/senslog/analyzer/ws/handler/StatisticsHandler.java
  52. 110 110
      src/main/java/cz/senslog/analyzer/ws/manager/WSStatisticsManager.java
  53. 101 95
      src/main/java/cz/senslog/analyzer/ws/vertx/VertxServer.java
  54. 142 142
      src/test/java/cz/senslog/analyzer/provider/task/ObservationAnalyzerTaskTest.java
  55. 121 122
      src/test/java/cz/senslog/analyzer/util/TimestampUtilTest.java
  56. 98 101
      src/test/java/cz/senslog/analyzer/ws/manager/WSStatisticsManagerTest.java

+ 54 - 50
build.gradle

@@ -1,50 +1,54 @@
-plugins {
-    id 'java'
-}
-
-group 'cz.senslog'
-version '1.2'
-//version '1.3-SNAPSHOT'
-
-sourceCompatibility = 1.8
-
-repositories {
-    mavenCentral()
-    mavenLocal()
-}
-
-test {
-    useJUnitPlatform()
-}
-
-jar {
-    manifest {
-        attributes(
-                'Main-Class': 'cz.senslog.analyzer.app.Main'
-        )
-    }
-    from {
-        configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
-    }
-}
-
-dependencies {
-    testCompile group: 'org.junit.jupiter', name: 'junit-jupiter', version: '5.6.0'
-    testCompile group: 'org.mockito', name: 'mockito-core', version: '3.6.28'
-
-    compile group: 'cz.senslog', name: 'common', version: '1.0.0'
-    compile group: 'com.beust', name: 'jcommander', version: '1.78'
-
-    compile group: 'io.vertx', name: 'vertx-core', version: '3.8.5'
-    compile group: 'io.vertx', name: 'vertx-web', version: '3.8.5'
-
-    compile group: 'org.apache.logging.log4j', name: 'log4j-slf4j-impl', version: '2.13.1'
-    compile group: 'org.jdbi', name: 'jdbi3-postgres', version: '3.12.2'
-    compile group: 'org.jdbi', name: 'jdbi3-jodatime2', version: '3.12.2'
-    compile group: 'com.zaxxer', name: 'HikariCP', version: '3.4.2'
-    compile group: 'org.postgresql', name: 'postgresql', version: '42.2.10'
-    compile group: 'com.h2database', name: 'h2', version: '1.4.200'
-
-    implementation 'com.google.dagger:dagger:2.26'
-    annotationProcessor 'com.google.dagger:dagger-compiler:2.26'
-}
+plugins {
+    id 'java'
+}
+
+group 'cz.senslog'
+version '1.2'
+//version '1.3-SNAPSHOT'
+
+sourceCompatibility = 1.8
+
+repositories {
+    mavenCentral()
+    mavenLocal()
+}
+
+test {
+    useJUnitPlatform()
+}
+
+jar {
+    manifest {
+        attributes(
+                'Main-Class': 'cz.senslog.analyzer.app.Main'
+        )
+    }
+    from {
+        configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
+    }
+}
+
+dependencies {
+    testCompile group: 'org.junit.jupiter', name: 'junit-jupiter', version: '5.6.0'
+    testCompile group: 'org.mockito', name: 'mockito-core', version: '3.6.28'
+
+    compile group: 'com.beust', name: 'jcommander', version: '1.78'
+    compile group: 'com.google.code.gson', name: 'gson', version: '2.8.9'
+    implementation group: 'org.yaml', name: 'snakeyaml', version: '1.29'
+    implementation group: 'org.apache.httpcomponents', name: 'httpclient', version: '4.5.13'
+    implementation group: 'org.everit.json', name: 'org.everit.json.schema', version: '1.5.1'
+
+
+    compile group: 'io.vertx', name: 'vertx-core', version: '4.2.1'
+    compile group: 'io.vertx', name: 'vertx-web', version: '4.2.1'
+
+    compile group: 'org.apache.logging.log4j', name: 'log4j-slf4j-impl', version: '2.13.1'
+    compile group: 'org.jdbi', name: 'jdbi3-postgres', version: '3.12.2'
+    compile group: 'org.jdbi', name: 'jdbi3-jodatime2', version: '3.12.2'
+    compile group: 'com.zaxxer', name: 'HikariCP', version: '3.4.2'
+    compile group: 'org.postgresql', name: 'postgresql', version: '42.2.10'
+    compile group: 'com.h2database', name: 'h2', version: '1.4.200'
+
+    implementation 'com.google.dagger:dagger:2.40.1'
+    annotationProcessor 'com.google.dagger:dagger-compiler:2.40.1'
+}

+ 113 - 113
src/main/java/cz/senslog/analyzer/analysis/module/CollectorHandler.java

@@ -1,114 +1,114 @@
-package cz.senslog.analyzer.analysis.module;
-
-import cz.senslog.analyzer.core.api.BlockingHandler;
-import cz.senslog.analyzer.core.api.DataFinisher;
-import cz.senslog.analyzer.core.api.HandlerContext;
-import cz.senslog.analyzer.domain.*;
-import cz.senslog.analyzer.storage.inmemory.CollectedStatisticsStorage;
-import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
-import cz.senslog.common.util.DateTrunc;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.*;
-import java.util.function.Function;
-
-import static cz.senslog.analyzer.domain.TimestampType.LAST_COMMITTED_INCLUSIVE;
-
-
-public abstract class CollectorHandler<I extends Data<?, ?>> extends BlockingHandler<I, DoubleStatistics> {
-
-    private static final Logger logger = LogManager.getLogger(CollectorHandler.class);
-
-    /** Map of saved groups (Map<group_id, Group>). */
-    private Map<Long, Group> groupsGroupById;
-
-    private Map<Group, List<CollectedStatistics>> collectedStatistics;
-
-    private final CollectedStatisticsStorage statisticsStorage;
-    private final TimestampStorage timestampStorage;
-
-    public CollectorHandler(CollectedStatisticsStorage statisticsStorage, TimestampStorage timestampStorage) {
-        this.statisticsStorage = statisticsStorage;
-        this.timestampStorage = timestampStorage;
-    }
-
-    protected abstract List<Group> loadGroups();
-    protected abstract Function<I, Boolean> collectData(DoubleStatistics statistics);
-    protected abstract long getGroupId(I data);
-
-    @Override
-    public void init() {
-        List<Group> groups = loadGroups();
-        groupsGroupById = new HashMap<>(groups.size());
-        collectedStatistics = new HashMap<>(groups.size());
-        for (Group group : groups) {
-            groupsGroupById.put(group.getId(), group);
-            collectedStatistics.put(group, statisticsStorage.restore(group));
-        }
-    }
-
-    @Override
-    public void finish(DataFinisher<DoubleStatistics> finisher, Timestamp edgeDateTime) {
-        logger.info("Finishing collecting data at the time {}.", edgeDateTime);
-        List<DoubleStatistics> finishedData = new ArrayList<>();
-        for (Group group : groupsGroupById.values()) {
-            List<CollectedStatistics> statistics = getCollectedStatisticsByGroup(group);
-            Iterator<CollectedStatistics> statisticsIterator = statistics.iterator();
-            while (statisticsIterator.hasNext()) {
-                CollectedStatistics st = statisticsIterator.next();
-                if (st.getEndTime().isBefore(edgeDateTime)) {
-                    finishedData.add(st.getStatistics());
-                    statisticsIterator.remove();
-                    statisticsStorage.remove(st);
-                }
-            }
-        }
-        if (statisticsStorage.commit()) {
-            timestampStorage.update(edgeDateTime, LAST_COMMITTED_INCLUSIVE);
-        }
-        finisher.finish(finishedData);
-    }
-
-    @Override
-    public void handle(HandlerContext<I, DoubleStatistics> context) {
-        I data = context.data();
-        long groupId = getGroupId(context.data());
-        Timestamp timestamp = data.getTimestamp();
-        Group group = getGroupByGroupId(groupId);
-        logger.trace("Handling data for group: {} at {}.", groupId, timestamp);
-
-        if (group.getInterval() <= 0) { return; }
-
-        List<CollectedStatistics> groupStatistics = getCollectedStatisticsByGroup(group);
-
-        boolean newDataAccepted = false;
-        for (CollectedStatistics st : groupStatistics) { // startInterval <= timestamp < endInterval
-            if (timestamp.isEqual(st.getStartTime()) ||
-                    (timestamp.isAfter(st.getStartTime()) && timestamp.isBefore(st.getEndTime()))
-            ) {
-                collectData(st.getStatistics()).apply(data);
-                newDataAccepted = true;
-            }
-        }
-
-        if (!newDataAccepted) { // register a new statistics
-            Timestamp startOfInterval = createStartOfInterval(timestamp, group);
-            CollectedStatistics newSt = new CollectedStatistics(group, startOfInterval);
-            collectData(newSt.getStatistics()).apply(data);
-            groupStatistics.add(statisticsStorage.watch(newSt));
-        }
-    }
-
-    private static Timestamp createStartOfInterval(Timestamp timestamp, Group group) {
-        return Timestamp.of(DateTrunc.trunc(timestamp.get(), (int)group.getInterval()));
-    }
-
-    private Group getGroupByGroupId(long groupId) {
-        return groupsGroupById.getOrDefault(groupId, Group.empty());
-    }
-
-    private List<CollectedStatistics> getCollectedStatisticsByGroup(Group group) {
-        return collectedStatistics.computeIfAbsent(group, g -> new ArrayList<>());
-    }
+package cz.senslog.analyzer.analysis.module;
+
+import cz.senslog.analyzer.core.api.BlockingHandler;
+import cz.senslog.analyzer.core.api.DataFinisher;
+import cz.senslog.analyzer.core.api.HandlerContext;
+import cz.senslog.analyzer.domain.*;
+import cz.senslog.analyzer.storage.inmemory.CollectedStatisticsStorage;
+import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
+import cz.senslog.analyzer.util.DateTrunc;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.*;
+import java.util.function.Function;
+
+import static cz.senslog.analyzer.domain.TimestampType.LAST_COMMITTED_INCLUSIVE;
+
+
+public abstract class CollectorHandler<I extends Data<?, ?>> extends BlockingHandler<I, DoubleStatistics> {
+
+    private static final Logger logger = LogManager.getLogger(CollectorHandler.class);
+
+    /** Map of saved groups (Map<group_id, Group>). */
+    private Map<Long, Group> groupsGroupById;
+
+    private Map<Group, List<CollectedStatistics>> collectedStatistics;
+
+    private final CollectedStatisticsStorage statisticsStorage;
+    private final TimestampStorage timestampStorage;
+
+    public CollectorHandler(CollectedStatisticsStorage statisticsStorage, TimestampStorage timestampStorage) {
+        this.statisticsStorage = statisticsStorage;
+        this.timestampStorage = timestampStorage;
+    }
+
+    protected abstract List<Group> loadGroups();
+    protected abstract Function<I, Boolean> collectData(DoubleStatistics statistics);
+    protected abstract long getGroupId(I data);
+
+    @Override
+    public void init() {
+        List<Group> groups = loadGroups();
+        groupsGroupById = new HashMap<>(groups.size());
+        collectedStatistics = new HashMap<>(groups.size());
+        for (Group group : groups) {
+            groupsGroupById.put(group.getId(), group);
+            collectedStatistics.put(group, statisticsStorage.restore(group));
+        }
+    }
+
+    @Override
+    public void finish(DataFinisher<DoubleStatistics> finisher, Timestamp edgeDateTime) {
+        logger.info("Finishing collecting data at the time {}.", edgeDateTime);
+        List<DoubleStatistics> finishedData = new ArrayList<>();
+        for (Group group : groupsGroupById.values()) {
+            List<CollectedStatistics> statistics = getCollectedStatisticsByGroup(group);
+            Iterator<CollectedStatistics> statisticsIterator = statistics.iterator();
+            while (statisticsIterator.hasNext()) {
+                CollectedStatistics st = statisticsIterator.next();
+                if (st.getEndTime().isBefore(edgeDateTime)) {
+                    finishedData.add(st.getStatistics());
+                    statisticsIterator.remove();
+                    statisticsStorage.remove(st);
+                }
+            }
+        }
+        if (statisticsStorage.commit()) {
+            timestampStorage.update(edgeDateTime, LAST_COMMITTED_INCLUSIVE);
+        }
+        finisher.finish(finishedData);
+    }
+
+    @Override
+    public void handle(HandlerContext<I, DoubleStatistics> context) {
+        I data = context.data();
+        long groupId = getGroupId(context.data());
+        Timestamp timestamp = data.getTimestamp();
+        Group group = getGroupByGroupId(groupId);
+        logger.trace("Handling data for group: {} at {}.", groupId, timestamp);
+
+        if (group.getInterval() <= 0) { return; }
+
+        List<CollectedStatistics> groupStatistics = getCollectedStatisticsByGroup(group);
+
+        boolean newDataAccepted = false;
+        for (CollectedStatistics st : groupStatistics) { // startInterval <= timestamp < endInterval
+            if (timestamp.isEqual(st.getStartTime()) ||
+                    (timestamp.isAfter(st.getStartTime()) && timestamp.isBefore(st.getEndTime()))
+            ) {
+                collectData(st.getStatistics()).apply(data);
+                newDataAccepted = true;
+            }
+        }
+
+        if (!newDataAccepted) { // register a new statistics
+            Timestamp startOfInterval = createStartOfInterval(timestamp, group);
+            CollectedStatistics newSt = new CollectedStatistics(group, startOfInterval);
+            collectData(newSt.getStatistics()).apply(data);
+            groupStatistics.add(statisticsStorage.watch(newSt));
+        }
+    }
+
+    private static Timestamp createStartOfInterval(Timestamp timestamp, Group group) {
+        return Timestamp.of(DateTrunc.trunc(timestamp.get(), (int)group.getInterval()));
+    }
+
+    private Group getGroupByGroupId(long groupId) {
+        return groupsGroupById.getOrDefault(groupId, Group.empty());
+    }
+
+    private List<CollectedStatistics> getCollectedStatisticsByGroup(Group group) {
+        return collectedStatistics.computeIfAbsent(group, g -> new ArrayList<>());
+    }
 }

+ 113 - 113
src/main/java/cz/senslog/analyzer/app/Application.java

@@ -1,114 +1,114 @@
-package cz.senslog.analyzer.app;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-import cz.senslog.analyzer.analysis.DaggerAnalyzerComponent;
-import cz.senslog.analyzer.domain.Observation;
-import cz.senslog.analyzer.provider.DataProviderComponent;
-import cz.senslog.analyzer.storage.ConnectionModule;
-import cz.senslog.analyzer.storage.StorageConfig;
-import cz.senslog.analyzer.provider.ProviderConfig;
-import cz.senslog.analyzer.provider.DaggerDataProviderComponent;
-import cz.senslog.analyzer.provider.DataProvider;
-import cz.senslog.analyzer.ws.DaggerServerComponent;
-import cz.senslog.analyzer.ws.Server;
-import cz.senslog.common.util.Triple;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.lang.management.RuntimeMXBean;
-
-/**
- * The class {@code Application} represents a trigger for entire application.
- *
- * @author Lukas Cerny
- * @version 1.0
- * @since 1.0
- */
-public class Application extends Thread {
-
-    private static final long START_TIMESTAMP;
-    private static final RuntimeMXBean RUNTIME_MX_BEAN;
-
-    private static final Logger logger = LogManager.getLogger(Application.class);
-
-    private final Parameters params;
-
-    static {
-        START_TIMESTAMP = System.currentTimeMillis();
-        RUNTIME_MX_BEAN = ManagementFactory.getRuntimeMXBean();
-    }
-
-    static Thread init(String... args) throws IOException {
-        Parameters parameters = Parameters.parse(args);
-
-        if (parameters.isHelp()) {
-            return new Thread(parameters::printHelp);
-        }
-
-        Application app = new Application(parameters);
-        Runtime.getRuntime().addShutdownHook(new Thread(app::interrupt, "clean-app"));
-
-        return app;
-    }
-
-    public static long uptime() {
-        return System.currentTimeMillis() - START_TIMESTAMP;
-    }
-
-    public static long uptimeJVM() {
-        return RUNTIME_MX_BEAN.getUptime();
-    }
-
-    private Application(Parameters parameters) {
-        super("app");
-
-        this.params = parameters;
-    }
-
-    @Override
-    public void interrupt() {}
-
-    @Override
-    public void run() {
-
-        String configFile = params.getConfigFileName();
-        Triple<StorageConfig, ProviderConfig, Integer> configs = null;
-        try {
-            configs = Configuration.load(configFile);
-        } catch (IOException e) {
-            logger.error(e.getMessage());
-            System.exit(1);
-        }
-
-        StorageConfig storageConfig = configs.getItem1();
-        ProviderConfig config = configs.getItem2();
-        int port = configs.getItem3();
-
-
-        ConnectionModule connectionModule = ConnectionModule.create(storageConfig);
-        logger.info("Module {} was created successfully.", ConnectionModule.class.getSimpleName());
-
-        Analyzer<Observation> analyzer = DaggerAnalyzerComponent.builder()
-                .connectionModule(connectionModule)
-                .build().createNewObservationAnalyzer();
-        logger.info("Component {} was created successfully.", Analyzer.class.getSimpleName());
-
-
-        DataProviderComponent dataProviderComponent = DaggerDataProviderComponent.builder()
-                .connectionModule(connectionModule).build();
-
-        DataProvider<Observation> dataProvider = dataProviderComponent.scheduledDatabaseProvider()
-                .config(config).deployAnalyzer(analyzer);
-        logger.info("Component {} was created successfully.", DataProvider.class.getSimpleName());
-
-        Server server = DaggerServerComponent.builder()
-                .connectionModule(connectionModule).build()
-                .createServer();
-        logger.info("Component {} was created successfully.", Server.class.getSimpleName());
-
-        server.start(port);
-        dataProvider.start();
-    }
+package cz.senslog.analyzer.app;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.analysis.DaggerAnalyzerComponent;
+import cz.senslog.analyzer.domain.Observation;
+import cz.senslog.analyzer.provider.DataProviderComponent;
+import cz.senslog.analyzer.storage.ConnectionModule;
+import cz.senslog.analyzer.storage.StorageConfig;
+import cz.senslog.analyzer.provider.ProviderConfig;
+import cz.senslog.analyzer.provider.DaggerDataProviderComponent;
+import cz.senslog.analyzer.provider.DataProvider;
+import cz.senslog.analyzer.ws.DaggerServerComponent;
+import cz.senslog.analyzer.ws.Server;
+import cz.senslog.analyzer.util.Triple;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
+
+/**
+ * The class {@code Application} represents a trigger for entire application.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public class Application extends Thread {
+
+    private static final long START_TIMESTAMP;
+    private static final RuntimeMXBean RUNTIME_MX_BEAN;
+
+    private static final Logger logger = LogManager.getLogger(Application.class);
+
+    private final Parameters params;
+
+    static {
+        START_TIMESTAMP = System.currentTimeMillis();
+        RUNTIME_MX_BEAN = ManagementFactory.getRuntimeMXBean();
+    }
+
+    static Thread init(String... args) throws IOException {
+        Parameters parameters = Parameters.parse(args);
+
+        if (parameters.isHelp()) {
+            return new Thread(parameters::printHelp);
+        }
+
+        Application app = new Application(parameters);
+        Runtime.getRuntime().addShutdownHook(new Thread(app::interrupt, "clean-app"));
+
+        return app;
+    }
+
+    public static long uptime() {
+        return System.currentTimeMillis() - START_TIMESTAMP;
+    }
+
+    public static long uptimeJVM() {
+        return RUNTIME_MX_BEAN.getUptime();
+    }
+
+    private Application(Parameters parameters) {
+        super("app");
+
+        this.params = parameters;
+    }
+
+    @Override
+    public void interrupt() {}
+
+    @Override
+    public void run() {
+
+        String configFile = params.getConfigFileName();
+        Triple<StorageConfig, ProviderConfig, Integer> configs = null;
+        try {
+            configs = Configuration.load(configFile);
+        } catch (IOException e) {
+            logger.error(e.getMessage());
+            System.exit(1);
+        }
+
+        StorageConfig storageConfig = configs.getItem1();
+        ProviderConfig config = configs.getItem2();
+        int port = configs.getItem3();
+
+
+        ConnectionModule connectionModule = ConnectionModule.create(storageConfig);
+        logger.info("Module {} was created successfully.", ConnectionModule.class.getSimpleName());
+
+        Analyzer<Observation> analyzer = DaggerAnalyzerComponent.builder()
+                .connectionModule(connectionModule)
+                .build().createNewObservationAnalyzer();
+        logger.info("Component {} was created successfully.", Analyzer.class.getSimpleName());
+
+
+        DataProviderComponent dataProviderComponent = DaggerDataProviderComponent.builder()
+                .connectionModule(connectionModule).build();
+
+        DataProvider<Observation> dataProvider = dataProviderComponent.scheduledDatabaseProvider()
+                .config(config).deployAnalyzer(analyzer);
+        logger.info("Component {} was created successfully.", DataProvider.class.getSimpleName());
+
+        Server server = DaggerServerComponent.builder()
+                .connectionModule(connectionModule).build()
+                .createServer();
+        logger.info("Component {} was created successfully.", Server.class.getSimpleName());
+
+        server.start(port);
+        dataProvider.start();
+    }
 }

+ 112 - 113
src/main/java/cz/senslog/analyzer/app/Configuration.java

@@ -1,113 +1,112 @@
-package cz.senslog.analyzer.app;
-
-import cz.senslog.analyzer.storage.StorageConfig;
-import cz.senslog.analyzer.storage.inmemory.InMemoryStorageConfig;
-import cz.senslog.analyzer.storage.permanent.PermanentStorageConfig;
-import cz.senslog.analyzer.provider.ProviderConfig;
-import cz.senslog.common.exception.UnsupportedFileException;
-import cz.senslog.common.util.Triple;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.time.OffsetDateTime;
-import java.util.Map;
-
-public class Configuration {
-    private static final Logger logger = LogManager.getLogger(Configuration.class);
-
-    public static Triple<StorageConfig, ProviderConfig, Integer> load(String fileName) throws IOException {
-
-        logger.info("Loading '{}' configuration file.", fileName);
-
-        if (!fileName.toLowerCase().endsWith(".yaml")) {
-            throw new UnsupportedFileException(fileName + "does not contain .yaml extension.");
-        }
-
-        Path filePath = Paths.get(fileName);
-        if (Files.notExists(filePath)) {
-            throw new FileNotFoundException(fileName + " does not exist");
-        }
-
-        Map<Object, Object> properties;
-
-        logger.debug("Opening the file '{}'.", fileName);
-        try (InputStream fileStream = Files.newInputStream(filePath)) {
-            logger.debug("Parsing the yaml file '{}'.", fileName);
-            properties = new Yaml().load(fileStream);
-            logger.debug("The configuration yaml file '{}' was parsed successfully.", fileName);
-        }
-
-        if (properties == null || properties.isEmpty()) {
-            throw new IOException(String.format(
-                    "The configuration yaml file %s is empty or was not loaded successfully. ", fileName
-            ));
-        }
-
-        Object permanentStMap = properties.get("permanentStorage");
-        if (!(permanentStMap instanceof Map)) {
-            throw new IOException(String.format(
-                    "Configuration file '%s' contains an error at 'permanentStorage' attribute.", fileName
-            ));
-        }
-
-        Object inMemoryStMap = properties.get("inMemoryStorage");
-        if (!(inMemoryStMap instanceof Map)) {
-            throw new IOException(String.format(
-                    "Configuration file '%s' contains an error at 'inMemoryStorage' attribute.", fileName
-            ));
-        }
-
-        Object schedulerMap = properties.get("scheduler");
-        if (!(schedulerMap instanceof Map)) {
-            throw new IOException(String.format(
-                    "Configuration file '%s' contains an error at 'scheduler' attribute.", fileName
-            ));
-        }
-
-        Object serverMap = properties.get("server");
-        if (!(serverMap instanceof Map)) {
-            throw new IOException(String.format(
-                    "Configuration file '%s' contains an error at 'server' attribute.", fileName
-            ));
-        }
-
-        Map<String, Object> permanentStConfigMap = (Map<String, Object>)permanentStMap;
-        PermanentStorageConfig permanentStConfig = new PermanentStorageConfig(
-                (String)permanentStConfigMap.get("url"),
-                (String)permanentStConfigMap.get("username"),
-                (String)permanentStConfigMap.get("password"),
-                (Integer) permanentStConfigMap.get("connectionPoolSize")
-        );
-
-        Map<String, Object> inMemoryStConfigMap = (Map<String, Object>)inMemoryStMap;
-        InMemoryStorageConfig inMemoryStConfig = new InMemoryStorageConfig(
-                (String)inMemoryStConfigMap.get("path"),
-                (Boolean)inMemoryStConfigMap.get("persistence"),
-                (String)inMemoryStConfigMap.get("parameters")
-        );
-
-        Map<String, Object> schedulerConfigMap = (Map<String, Object>)schedulerMap;
-        String initDateString = (String) schedulerConfigMap.get("initDate");
-        OffsetDateTime initDate = initDateString != null ? OffsetDateTime.parse(initDateString) : OffsetDateTime.now();
-        ProviderConfig providerConfig = ProviderConfig.config()
-                .startDateTime(initDate)
-                .period((Integer)schedulerConfigMap.get("period"))
-                .get();
-
-
-        Map<String, Object> serverConfigMap = (Map<String, Object>)serverMap;
-        Integer port = (Integer)serverConfigMap.get("port");
-
-        StorageConfig storageConfig = new StorageConfig(permanentStConfig, inMemoryStConfig);
-
-        logger.info("Configuration file '{}' was parsed successfully.", fileName);
-        return Triple.of(storageConfig, providerConfig, port);
-    }
-}
+package cz.senslog.analyzer.app;
+
+import cz.senslog.analyzer.storage.StorageConfig;
+import cz.senslog.analyzer.storage.inmemory.InMemoryStorageConfig;
+import cz.senslog.analyzer.storage.permanent.PermanentStorageConfig;
+import cz.senslog.analyzer.provider.ProviderConfig;
+import cz.senslog.analyzer.util.Triple;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.OffsetDateTime;
+import java.util.Map;
+
+public class Configuration {
+    private static final Logger logger = LogManager.getLogger(Configuration.class);
+
+    public static Triple<StorageConfig, ProviderConfig, Integer> load(String fileName) throws IOException {
+
+        logger.info("Loading '{}' configuration file.", fileName);
+
+        if (!fileName.toLowerCase().endsWith(".yaml")) {
+            throw new IOException(fileName + "does not contain .yaml extension.");
+        }
+
+        Path filePath = Paths.get(fileName);
+        if (Files.notExists(filePath)) {
+            throw new FileNotFoundException(fileName + " does not exist");
+        }
+
+        Map<Object, Object> properties;
+
+        logger.debug("Opening the file '{}'.", fileName);
+        try (InputStream fileStream = Files.newInputStream(filePath)) {
+            logger.debug("Parsing the yaml file '{}'.", fileName);
+            properties = new Yaml().load(fileStream);
+            logger.debug("The configuration yaml file '{}' was parsed successfully.", fileName);
+        }
+
+        if (properties == null || properties.isEmpty()) {
+            throw new IOException(String.format(
+                    "The configuration yaml file %s is empty or was not loaded successfully. ", fileName
+            ));
+        }
+
+        Object permanentStMap = properties.get("permanentStorage");
+        if (!(permanentStMap instanceof Map)) {
+            throw new IOException(String.format(
+                    "Configuration file '%s' contains an error at 'permanentStorage' attribute.", fileName
+            ));
+        }
+
+        Object inMemoryStMap = properties.get("inMemoryStorage");
+        if (!(inMemoryStMap instanceof Map)) {
+            throw new IOException(String.format(
+                    "Configuration file '%s' contains an error at 'inMemoryStorage' attribute.", fileName
+            ));
+        }
+
+        Object schedulerMap = properties.get("scheduler");
+        if (!(schedulerMap instanceof Map)) {
+            throw new IOException(String.format(
+                    "Configuration file '%s' contains an error at 'scheduler' attribute.", fileName
+            ));
+        }
+
+        Object serverMap = properties.get("server");
+        if (!(serverMap instanceof Map)) {
+            throw new IOException(String.format(
+                    "Configuration file '%s' contains an error at 'server' attribute.", fileName
+            ));
+        }
+
+        Map<String, Object> permanentStConfigMap = (Map<String, Object>)permanentStMap;
+        PermanentStorageConfig permanentStConfig = new PermanentStorageConfig(
+                (String)permanentStConfigMap.get("url"),
+                (String)permanentStConfigMap.get("username"),
+                (String)permanentStConfigMap.get("password"),
+                (Integer) permanentStConfigMap.get("connectionPoolSize")
+        );
+
+        Map<String, Object> inMemoryStConfigMap = (Map<String, Object>)inMemoryStMap;
+        InMemoryStorageConfig inMemoryStConfig = new InMemoryStorageConfig(
+                (String)inMemoryStConfigMap.get("path"),
+                (Boolean)inMemoryStConfigMap.get("persistence"),
+                (String)inMemoryStConfigMap.get("parameters")
+        );
+
+        Map<String, Object> schedulerConfigMap = (Map<String, Object>)schedulerMap;
+        String initDateString = (String) schedulerConfigMap.get("initDate");
+        OffsetDateTime initDate = initDateString != null ? OffsetDateTime.parse(initDateString) : OffsetDateTime.now();
+        ProviderConfig providerConfig = ProviderConfig.config()
+                .startDateTime(initDate)
+                .period((Integer)schedulerConfigMap.get("period"))
+                .get();
+
+
+        Map<String, Object> serverConfigMap = (Map<String, Object>)serverMap;
+        Integer port = (Integer)serverConfigMap.get("port");
+
+        StorageConfig storageConfig = new StorageConfig(permanentStConfig, inMemoryStConfig);
+
+        logger.info("Configuration file '{}' was parsed successfully.", fileName);
+        return Triple.of(storageConfig, providerConfig, port);
+    }
+}

+ 78 - 78
src/main/java/cz/senslog/analyzer/app/Parameters.java

@@ -1,78 +1,78 @@
-package cz.senslog.analyzer.app;
-
-import com.beust.jcommander.JCommander;
-import com.beust.jcommander.Parameter;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Arrays;
-
-import static cz.senslog.common.util.StringUtils.isNotBlank;
-import static java.lang.String.format;
-import static java.nio.file.Files.notExists;
-import static java.nio.file.Paths.get;
-
-/**
- * The class {@code Parameters} represents input parameters from
- * the applications. For parsing is used {@see JCommander} library.
- *
- * @author Lukas Cerny
- * @version 1.0
- * @since 1.0
- */
-public final class Parameters {
-
-    private static final Logger logger = LogManager.getLogger(Parameters.class);
-
-    private JCommander jCommander;
-
-    /**
-     * Static method to parse input parameters.
-     * @param args - array of parameters in format e.g. ["-cf", "fileName"].
-     * @return instance of {@code Parameters}.
-     * @throws IOException throws if is chosen "-cf" or "-config-file" parameter and the file does not exist.
-     */
-    public static Parameters parse(String... args) throws IOException {
-        logger.debug("Parsing input parameters {}", Arrays.toString(args));
-
-        Parameters parameters = new Parameters();
-        JCommander jCommander = JCommander.newBuilder()
-                .addObject(parameters).build();
-        parameters.jCommander = jCommander;
-
-        jCommander.parse(args);
-
-        String configFileName = parameters.getConfigFileName();
-        logger.debug("Checking existence of configuration file {}", configFileName);
-        if (isNotBlank(configFileName) && notExists(get(configFileName))) {
-            throw new FileNotFoundException(format("Config file %s does not exist.", configFileName));
-        }
-
-        logger.info("Parsing input parameters {} were parsed successfully.", Arrays.toString(args));
-        return parameters;
-    }
-
-    @Parameter(names = {"-h", "-help"}, help = true)
-    private boolean help = false;
-
-    @Parameter(names = {"-cf", "-config-file"}, description = "Configuration file in .yaml format.")
-    private String configFileName;
-
-    /**
-     * Returns name of the configuration file.
-     * @return string name.
-     */
-    public String getConfigFileName() {
-        return configFileName;
-    }
-
-    public boolean isHelp() {
-        return help;
-    }
-
-    public void printHelp() {
-        jCommander.usage();
-    }
-}
+package cz.senslog.analyzer.app;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Arrays;
+
+import static cz.senslog.analyzer.util.StringUtils.isNotBlank;
+import static java.lang.String.format;
+import static java.nio.file.Files.notExists;
+import static java.nio.file.Paths.get;
+
+/**
+ * The class {@code Parameters} represents input parameters from
+ * the applications. For parsing is used {@see JCommander} library.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public final class Parameters {
+
+    private static final Logger logger = LogManager.getLogger(Parameters.class);
+
+    private JCommander jCommander;
+
+    /**
+     * Static method to parse input parameters.
+     * @param args - array of parameters in format e.g. ["-cf", "fileName"].
+     * @return instance of {@code Parameters}.
+     * @throws IOException throws if is chosen "-cf" or "-config-file" parameter and the file does not exist.
+     */
+    public static Parameters parse(String... args) throws IOException {
+        logger.debug("Parsing input parameters {}", Arrays.toString(args));
+
+        Parameters parameters = new Parameters();
+        JCommander jCommander = JCommander.newBuilder()
+                .addObject(parameters).build();
+        parameters.jCommander = jCommander;
+
+        jCommander.parse(args);
+
+        String configFileName = parameters.getConfigFileName();
+        logger.debug("Checking existence of configuration file {}", configFileName);
+        if (isNotBlank(configFileName) && notExists(get(configFileName))) {
+            throw new FileNotFoundException(format("Config file %s does not exist.", configFileName));
+        }
+
+        logger.info("Parsing input parameters {} were parsed successfully.", Arrays.toString(args));
+        return parameters;
+    }
+
+    @Parameter(names = {"-h", "-help"}, help = true)
+    private boolean help = false;
+
+    @Parameter(names = {"-cf", "-config-file"}, description = "Configuration file in .yaml format.")
+    private String configFileName;
+
+    /**
+     * Returns name of the configuration file.
+     * @return string name.
+     */
+    public String getConfigFileName() {
+        return configFileName;
+    }
+
+    public boolean isHelp() {
+        return help;
+    }
+
+    public void printHelp() {
+        jCommander.usage();
+    }
+}

+ 155 - 155
src/main/java/cz/senslog/analyzer/domain/DoubleStatistics.java

@@ -1,155 +1,155 @@
-package cz.senslog.analyzer.domain;
-
-import java.util.*;
-import java.util.stream.DoubleStream;
-
-import static cz.senslog.common.json.BasicJson.objectToJson;
-
-
-public class DoubleStatistics extends Data<Group, DoubleStatistics> {
-
-    private long count;
-    private double sum;
-    private double sumCompensation;
-    private double simpleSum;
-    private double min = 1.0D / 0.0;
-    private double max = -1.0D / 0.0;
-
-    public static DoubleStatistics init(Group group, Timestamp timestamp) {
-        return new DoubleStatistics(group, timestamp);
-    }
-
-    private DoubleStatistics(Group group, Timestamp timestamp) {
-        super(group, timestamp);
-        initMapping();
-    }
-
-    public DoubleStatistics(DoubleStatistics statistics) {
-        this(statistics.getSource(), statistics, statistics.getTimestamp());
-    }
-
-    public DoubleStatistics(Group group, DoubleStatistics statistics, Timestamp timestamp) {
-        this(group, statistics.count, statistics.min, statistics.max, statistics.sum, timestamp);
-    }
-
-    public DoubleStatistics(Group group, long count, double min, double max, double sum, Timestamp timestamp) {
-        super(group, timestamp);
-        if (count < 0L) {
-            throw new IllegalArgumentException("Negative count value");
-        } else {
-            if (count > 0L) {
-                if (min > max) {
-                    throw new IllegalArgumentException("Minimum greater than maximum");
-                }
-
-                long ncount = DoubleStream.of(min, max, sum).filter(Double::isNaN).count();
-                if (ncount > 0L && ncount < 3L) {
-                    throw new IllegalArgumentException("Some, not all, of the minimum, maximum, or sum is NaN");
-                }
-
-                this.count = count;
-                this.sum = sum;
-                this.simpleSum = sum;
-                this.sumCompensation = 0.0D;
-                this.min = min;
-                this.max = max;
-            }
-        }
-        initMapping();
-    }
-
-    private void initMapping() {
-        addMapping(AttributeValue.MIN, this::getMin);
-        addMapping(AttributeValue.MAX, this::getMax);
-        addMapping(AttributeValue.AVG, this::getAverage);
-    }
-
-    private void sumWithCompensation(double value) {
-        double tmp = value - this.sumCompensation;
-        double velvel = this.sum + tmp;
-        this.sumCompensation = velvel - this.sum - tmp;
-        this.sum = velvel;
-    }
-
-    public final long getCount() {
-        return this.count;
-    }
-
-    public final double getSum() {
-        double tmp = this.sum + this.sumCompensation;
-        return Double.isNaN(tmp) && Double.isInfinite(this.simpleSum) ? this.simpleSum : tmp;
-    }
-
-    public final double getMin() {
-        return this.min;
-    }
-
-    public final double getMax() {
-        return this.max;
-    }
-
-    public final double getAverage() {
-        return this.getCount() > 0L ? this.getSum() / (double)this.getCount() : 0.0D;
-    }
-
-    @Override
-    public DoubleStatistics getValue() {
-        return this;
-    }
-
-    private boolean acceptSensor(Sensor sensor) {
-        return getSource().getSensors().contains(sensor);
-    }
-
-    private boolean acceptGroup(Group group) {
-        return getSource().getId() == group.getId();
-    }
-
-    public boolean accept(DoubleStatistics other) {
-        if (acceptGroup(other.getSource())) {
-            this.count += other.count;
-            this.simpleSum += other.simpleSum;
-            this.sumWithCompensation(other.sum);
-            this.sumWithCompensation(other.sumCompensation);
-            this.min = Math.min(this.min, other.min);
-            this.max = Math.max(this.max, other.max);
-            return true;
-        }
-        return false;
-    }
-
-    public boolean accept(Sensor sensor, double value) {
-        if (acceptSensor(sensor)) {
-            ++this.count;
-            this.simpleSum += value;
-            this.sumWithCompensation(value);
-            this.min = Math.min(this.min, value);
-            this.max = Math.max(this.max, value);
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-        DoubleStatistics that = (DoubleStatistics) o;
-        return getSource().equals(that.getSource()) &&
-                getMin() == that.getMin() &&
-                getMax() == that.getMax() &&
-                getSum() == that.getSum() &&
-                getAverage() == that.getAverage() &&
-                getCount() == that.getCount();
-    }
-
-    @Override
-    public int hashCode() {
-        return Objects.hash(getSource(), getMin(), getMax(), getAverage(), getSum(),getCount());
-    }
-
-    @Override
-    public String toString() {
-        return objectToJson(this);
-    }
-}
+package cz.senslog.analyzer.domain;
+
+import java.util.*;
+import java.util.stream.DoubleStream;
+
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+
+
+public class DoubleStatistics extends Data<Group, DoubleStatistics> {
+
+    private long count;
+    private double sum;
+    private double sumCompensation;
+    private double simpleSum;
+    private double min = 1.0D / 0.0;
+    private double max = -1.0D / 0.0;
+
+    public static DoubleStatistics init(Group group, Timestamp timestamp) {
+        return new DoubleStatistics(group, timestamp);
+    }
+
+    private DoubleStatistics(Group group, Timestamp timestamp) {
+        super(group, timestamp);
+        initMapping();
+    }
+
+    public DoubleStatistics(DoubleStatistics statistics) {
+        this(statistics.getSource(), statistics, statistics.getTimestamp());
+    }
+
+    public DoubleStatistics(Group group, DoubleStatistics statistics, Timestamp timestamp) {
+        this(group, statistics.count, statistics.min, statistics.max, statistics.sum, timestamp);
+    }
+
+    public DoubleStatistics(Group group, long count, double min, double max, double sum, Timestamp timestamp) {
+        super(group, timestamp);
+        if (count < 0L) {
+            throw new IllegalArgumentException("Negative count value");
+        } else {
+            if (count > 0L) {
+                if (min > max) {
+                    throw new IllegalArgumentException("Minimum greater than maximum");
+                }
+
+                long ncount = DoubleStream.of(min, max, sum).filter(Double::isNaN).count();
+                if (ncount > 0L && ncount < 3L) {
+                    throw new IllegalArgumentException("Some, not all, of the minimum, maximum, or sum is NaN");
+                }
+
+                this.count = count;
+                this.sum = sum;
+                this.simpleSum = sum;
+                this.sumCompensation = 0.0D;
+                this.min = min;
+                this.max = max;
+            }
+        }
+        initMapping();
+    }
+
+    private void initMapping() {
+        addMapping(AttributeValue.MIN, this::getMin);
+        addMapping(AttributeValue.MAX, this::getMax);
+        addMapping(AttributeValue.AVG, this::getAverage);
+    }
+
+    private void sumWithCompensation(double value) {
+        double tmp = value - this.sumCompensation;
+        double velvel = this.sum + tmp;
+        this.sumCompensation = velvel - this.sum - tmp;
+        this.sum = velvel;
+    }
+
+    public final long getCount() {
+        return this.count;
+    }
+
+    public final double getSum() {
+        double tmp = this.sum + this.sumCompensation;
+        return Double.isNaN(tmp) && Double.isInfinite(this.simpleSum) ? this.simpleSum : tmp;
+    }
+
+    public final double getMin() {
+        return this.min;
+    }
+
+    public final double getMax() {
+        return this.max;
+    }
+
+    public final double getAverage() {
+        return this.getCount() > 0L ? this.getSum() / (double)this.getCount() : 0.0D;
+    }
+
+    @Override
+    public DoubleStatistics getValue() {
+        return this;
+    }
+
+    private boolean acceptSensor(Sensor sensor) {
+        return getSource().getSensors().contains(sensor);
+    }
+
+    private boolean acceptGroup(Group group) {
+        return getSource().getId() == group.getId();
+    }
+
+    public boolean accept(DoubleStatistics other) {
+        if (acceptGroup(other.getSource())) {
+            this.count += other.count;
+            this.simpleSum += other.simpleSum;
+            this.sumWithCompensation(other.sum);
+            this.sumWithCompensation(other.sumCompensation);
+            this.min = Math.min(this.min, other.min);
+            this.max = Math.max(this.max, other.max);
+            return true;
+        }
+        return false;
+    }
+
+    public boolean accept(Sensor sensor, double value) {
+        if (acceptSensor(sensor)) {
+            ++this.count;
+            this.simpleSum += value;
+            this.sumWithCompensation(value);
+            this.min = Math.min(this.min, value);
+            this.max = Math.max(this.max, value);
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+        DoubleStatistics that = (DoubleStatistics) o;
+        return getSource().equals(that.getSource()) &&
+                getMin() == that.getMin() &&
+                getMax() == that.getMax() &&
+                getSum() == that.getSum() &&
+                getAverage() == that.getAverage() &&
+                getCount() == that.getCount();
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(getSource(), getMin(), getMax(), getAverage(), getSum(),getCount());
+    }
+
+    @Override
+    public String toString() {
+        return objectToJson(this);
+    }
+}

+ 70 - 70
src/main/java/cz/senslog/analyzer/domain/Group.java

@@ -1,70 +1,70 @@
-package cz.senslog.analyzer.domain;
-
-import java.util.Objects;
-import java.util.Set;
-
-import static cz.senslog.common.json.BasicJson.objectToJson;
-import static java.util.Collections.emptySet;
-
-public class Group {
-
-    private final long id;
-    private final long interval;
-    private final boolean persistence;
-    private final AggregationType aggregationType;
-    private final Set<Sensor> sensors;
-
-    public static Group empty() {
-        return new Group(-1, 0, false, null, emptySet());
-    }
-
-    public Group(long id, long interval, boolean persistence, AggregationType aggregationType, Set<Sensor> sensors) {
-        this.id = id;
-        this.interval = interval;
-        this.persistence = persistence;
-        this.aggregationType = aggregationType;
-        this.sensors = sensors;
-    }
-
-    public Group(Group group, Set<Sensor> sensors) {
-        this(group.id, group.interval, group.persistence, group.aggregationType, sensors);
-    }
-
-    public long getId() {
-        return id;
-    }
-
-    public long getInterval() {
-        return interval;
-    }
-
-    public boolean isPersistence() {
-        return persistence;
-    }
-
-    public AggregationType getAggregationType() {
-        return aggregationType;
-    }
-
-    public Set<Sensor> getSensors() {
-        return sensors;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-        Group group = (Group) o;
-        return id == group.id;
-    }
-
-    @Override
-    public int hashCode() {
-        return Objects.hash(id);
-    }
-
-    @Override
-    public String toString() {
-        return objectToJson(this);
-    }
-}
+package cz.senslog.analyzer.domain;
+
+import java.util.Objects;
+import java.util.Set;
+
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+import static java.util.Collections.emptySet;
+
+public class Group {
+
+    private final long id;
+    private final long interval;
+    private final boolean persistence;
+    private final AggregationType aggregationType;
+    private final Set<Sensor> sensors;
+
+    public static Group empty() {
+        return new Group(-1, 0, false, null, emptySet());
+    }
+
+    public Group(long id, long interval, boolean persistence, AggregationType aggregationType, Set<Sensor> sensors) {
+        this.id = id;
+        this.interval = interval;
+        this.persistence = persistence;
+        this.aggregationType = aggregationType;
+        this.sensors = sensors;
+    }
+
+    public Group(Group group, Set<Sensor> sensors) {
+        this(group.id, group.interval, group.persistence, group.aggregationType, sensors);
+    }
+
+    public long getId() {
+        return id;
+    }
+
+    public long getInterval() {
+        return interval;
+    }
+
+    public boolean isPersistence() {
+        return persistence;
+    }
+
+    public AggregationType getAggregationType() {
+        return aggregationType;
+    }
+
+    public Set<Sensor> getSensors() {
+        return sensors;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+        Group group = (Group) o;
+        return id == group.id;
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(id);
+    }
+
+    @Override
+    public String toString() {
+        return objectToJson(this);
+    }
+}

+ 24 - 24
src/main/java/cz/senslog/analyzer/domain/Observation.java

@@ -1,24 +1,24 @@
-package cz.senslog.analyzer.domain;
-
-import static cz.senslog.common.json.BasicJson.objectToJson;
-
-public class Observation extends Data<Sensor, Double> {
-
-    private final double value;
-
-    public Observation(Sensor sensor, double value, Timestamp timestamp) {
-        super(sensor, timestamp);
-        this.value = value;
-
-        addMapping(AttributeValue.VAL, this::getValue);
-    }
-
-    public Double getValue() {
-        return value;
-    }
-
-    @Override
-    public String toString() {
-        return objectToJson(this);
-    }
-}
+package cz.senslog.analyzer.domain;
+
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+
+public class Observation extends Data<Sensor, Double> {
+
+    private final double value;
+
+    public Observation(Sensor sensor, double value, Timestamp timestamp) {
+        super(sensor, timestamp);
+        this.value = value;
+
+        addMapping(AttributeValue.VAL, this::getValue);
+    }
+
+    public Double getValue() {
+        return value;
+    }
+
+    @Override
+    public String toString() {
+        return objectToJson(this);
+    }
+}

+ 52 - 52
src/main/java/cz/senslog/analyzer/domain/Threshold.java

@@ -1,52 +1,52 @@
-package cz.senslog.analyzer.domain;
-
-import static cz.senslog.common.json.BasicJson.objectToJson;
-
-public class Threshold {
-
-    public static class Rule {
-
-        private final String mode;
-        private final String property;
-        private final Double value;
-
-        public Rule(String mode, String property, Double value) {
-            this.mode = mode;
-            this.property = property;
-            this.value = value;
-        }
-
-        public String getMode() {
-            return mode;
-        }
-
-        public String getProperty() {
-            return property;
-        }
-
-        public Double getValue() {
-            return value;
-        }
-
-        @Override
-        public String toString() {
-            return objectToJson(this);
-        }
-    }
-
-    private final long groupId;
-    private final Rule rule;
-
-    public Threshold(long groupId, String property, String mode, Double value) {
-        this.groupId = groupId;
-        this.rule = new Rule(mode,property, value);
-    }
-
-    public Long getGroupId() {
-        return groupId;
-    }
-
-    public Rule getRule() {
-        return rule;
-    }
-}
+package cz.senslog.analyzer.domain;
+
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+
+public class Threshold {
+
+    public static class Rule {
+
+        private final String mode;
+        private final String property;
+        private final Double value;
+
+        public Rule(String mode, String property, Double value) {
+            this.mode = mode;
+            this.property = property;
+            this.value = value;
+        }
+
+        public String getMode() {
+            return mode;
+        }
+
+        public String getProperty() {
+            return property;
+        }
+
+        public Double getValue() {
+            return value;
+        }
+
+        @Override
+        public String toString() {
+            return objectToJson(this);
+        }
+    }
+
+    private final long groupId;
+    private final Rule rule;
+
+    public Threshold(long groupId, String property, String mode, Double value) {
+        this.groupId = groupId;
+        this.rule = new Rule(mode,property, value);
+    }
+
+    public Long getGroupId() {
+        return groupId;
+    }
+
+    public Rule getRule() {
+        return rule;
+    }
+}

+ 32 - 32
src/main/java/cz/senslog/analyzer/provider/AnalyzerTask.java

@@ -1,32 +1,32 @@
-package cz.senslog.analyzer.provider;
-
-
-import cz.senslog.analyzer.analysis.Analyzer;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.List;
-import java.util.Objects;
-
-public abstract class AnalyzerTask<T> implements Runnable {
-
-    private static final Logger logger = LogManager.getLogger(AnalyzerTask.class);
-
-    private final Analyzer<T> analyzer;
-
-    protected AnalyzerTask(Analyzer<T> analyzer) {
-        Objects.requireNonNull(analyzer);
-        this.analyzer = analyzer;
-    }
-
-    protected abstract List<T> loadData();
-
-    @Override
-    public final void run() {
-        long startTime = System.currentTimeMillis();
-        analyzer.accept(loadData());
-        long stopTime = System.currentTimeMillis();
-        long durationMilliSec = stopTime - startTime;
-        logger.info("Duration of the task '{}' was {} ms.", getClass().getSimpleName(), durationMilliSec);
-    }
-}
+package cz.senslog.analyzer.provider;
+
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.List;
+import java.util.Objects;
+
+public abstract class AnalyzerTask<T> implements Runnable {
+
+    private static final Logger logger = LogManager.getLogger(AnalyzerTask.class);
+
+    private final Analyzer<T> analyzer;
+
+    protected AnalyzerTask(Analyzer<T> analyzer) {
+        Objects.requireNonNull(analyzer);
+        this.analyzer = analyzer;
+    }
+
+    protected abstract List<T> loadData();
+
+    @Override
+    public final void run() {
+        long startTime = System.currentTimeMillis();
+        analyzer.accept(loadData());
+        long stopTime = System.currentTimeMillis();
+        long durationMilliSec = stopTime - startTime;
+        logger.info("Duration of the task '{}' was {} ms.", getClass().getSimpleName(), durationMilliSec);
+    }
+}

+ 18 - 17
src/main/java/cz/senslog/analyzer/provider/DataProvider.java

@@ -1,17 +1,18 @@
-package cz.senslog.analyzer.provider;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-
-public abstract class DataProvider<T> {
-
-    protected Analyzer<T> analyzer;
-
-    protected ProviderConfig config;
-
-    public void init(Analyzer<T> analyzer, ProviderConfig providerConfiguration) {
-        this.analyzer = analyzer;
-        this.config = providerConfiguration;
-    }
-
-    public abstract void start();
-}
+package cz.senslog.analyzer.provider;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.provider.ProviderConfig;
+
+public abstract class DataProvider<T> {
+
+    protected Analyzer<T> analyzer;
+
+    protected ProviderConfig config;
+
+    public void init(Analyzer<T> analyzer, ProviderConfig providerConfiguration) {
+        this.analyzer = analyzer;
+        this.config = providerConfiguration;
+    }
+
+    public abstract void start();
+}

+ 17 - 18
src/main/java/cz/senslog/analyzer/provider/DataProviderComponent.java

@@ -1,19 +1,18 @@
-package cz.senslog.analyzer.provider;
-
-import cz.senslog.analyzer.domain.Observation;
-import dagger.Component;
-
-import javax.inject.Singleton;
-
-@Singleton
-@Component(modules = {
-        ScheduleDatabaseProviderModule.class,
-        HttpMiddlewareProviderModule.class
-})
-public interface DataProviderComponent {
-
-    ScheduledDataProviderConfig scheduledDatabaseProvider(); // TODO refactor
-
-    MiddlewareDataProviderConfig httpMiddlewareProvider();
-
+package cz.senslog.analyzer.provider;
+
+import dagger.Component;
+
+import javax.inject.Singleton;
+
+@Singleton
+@Component(modules = {
+        ScheduleDatabaseProviderModule.class,
+        HttpMiddlewareProviderModule.class
+})
+public interface DataProviderComponent {
+
+    ScheduledDataProviderConfig scheduledDatabaseProvider(); // TODO refactor
+
+    MiddlewareDataProviderConfig httpMiddlewareProvider();
+
 }

+ 9 - 8
src/main/java/cz/senslog/analyzer/provider/DataProviderDeployment.java

@@ -1,8 +1,9 @@
-package cz.senslog.analyzer.provider;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-
-public interface DataProviderDeployment<T> {
-
-    DataProvider<T> deployAnalyzer(Analyzer<T> analyzer);
-}
+package cz.senslog.analyzer.provider;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.provider.DataProvider;
+
+public interface DataProviderDeployment<T> {
+
+    DataProvider<T> deployAnalyzer(Analyzer<T> analyzer);
+}

+ 16 - 16
src/main/java/cz/senslog/analyzer/provider/HttpMiddlewareProvider.java

@@ -1,16 +1,16 @@
-package cz.senslog.analyzer.provider;
-
-import javax.inject.Inject;
-
-public class HttpMiddlewareProvider extends DataProvider {
-
-    @Inject
-    public HttpMiddlewareProvider() {
-
-    }
-
-    @Override
-    public void start() {
-
-    }
-}
+package cz.senslog.analyzer.provider;
+
+import javax.inject.Inject;
+
+public class HttpMiddlewareProvider extends DataProvider {
+
+    @Inject
+    public HttpMiddlewareProvider() {
+
+    }
+
+    @Override
+    public void start() {
+
+    }
+}

+ 18 - 18
src/main/java/cz/senslog/analyzer/provider/HttpMiddlewareProviderModule.java

@@ -1,18 +1,18 @@
-package cz.senslog.analyzer.provider;
-
-import dagger.Module;
-import dagger.Provides;
-
-@Module
-public class HttpMiddlewareProviderModule {
-
-    @Provides
-    public MiddlewareDataProviderConfig provideMiddlewareProvider(HttpMiddlewareProvider provider) {
-        return new MiddlewareDataProviderConfig() {};
-    }
-
-    @Provides
-    public HttpMiddlewareProvider provideHttpMiddlewareProvider() {
-        return new HttpMiddlewareProvider();
-    }
-}
+package cz.senslog.analyzer.provider;
+
+import dagger.Module;
+import dagger.Provides;
+
+@Module
+public class HttpMiddlewareProviderModule {
+
+    @Provides
+    public MiddlewareDataProviderConfig provideMiddlewareProvider(HttpMiddlewareProvider provider) {
+        return new MiddlewareDataProviderConfig() {};
+    }
+
+    @Provides
+    public HttpMiddlewareProvider provideHttpMiddlewareProvider() {
+        return new HttpMiddlewareProvider();
+    }
+}

+ 4 - 4
src/main/java/cz/senslog/analyzer/provider/MiddlewareDataProviderConfig.java

@@ -1,4 +1,4 @@
-package cz.senslog.analyzer.provider;
-
-public interface MiddlewareDataProviderConfig {
-}
+package cz.senslog.analyzer.provider;
+
+public interface MiddlewareDataProviderConfig {
+}

+ 6 - 6
src/main/java/cz/senslog/analyzer/provider/ScheduledDataProviderConfig.java

@@ -1,6 +1,6 @@
-package cz.senslog.analyzer.provider;
-
-public interface ScheduledDataProviderConfig<T> {
-
-    DataProviderDeployment<T> config(ProviderConfig providerConfiguration);
-}
+package cz.senslog.analyzer.provider;
+
+public interface ScheduledDataProviderConfig<T> {
+
+    DataProviderDeployment<T> config(ProviderConfig providerConfiguration);
+}

+ 27 - 27
src/main/java/cz/senslog/analyzer/provider/ScheduledDataProviderConfigImpl.java

@@ -1,27 +1,27 @@
-package cz.senslog.analyzer.provider;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-import cz.senslog.analyzer.domain.Observation;
-
-public class ScheduledDataProviderConfigImpl implements ScheduledDataProviderConfig<Observation>, DataProviderDeployment<Observation> {
-
-    private final ScheduledDatabaseProvider provider;
-
-    private ProviderConfig providerConfiguration;
-
-    public ScheduledDataProviderConfigImpl(ScheduledDatabaseProvider provider) {
-        this.provider = provider;
-    }
-
-    @Override
-    public DataProvider<Observation> deployAnalyzer(Analyzer<Observation> analyzer) {
-        provider.init(analyzer, providerConfiguration);
-        return provider;
-    }
-
-    @Override
-    public DataProviderDeployment<Observation> config(ProviderConfig providerConfiguration) {
-        this.providerConfiguration = providerConfiguration;
-        return this;
-    }
-}
+package cz.senslog.analyzer.provider;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.domain.Observation;
+
+public class ScheduledDataProviderConfigImpl implements ScheduledDataProviderConfig<Observation>, DataProviderDeployment<Observation> {
+
+    private final ScheduledDatabaseProvider provider;
+
+    private ProviderConfig providerConfiguration;
+
+    public ScheduledDataProviderConfigImpl(ScheduledDatabaseProvider provider) {
+        this.provider = provider;
+    }
+
+    @Override
+    public DataProvider<Observation> deployAnalyzer(Analyzer<Observation> analyzer) {
+        provider.init(analyzer, providerConfiguration);
+        return provider;
+    }
+
+    @Override
+    public DataProviderDeployment<Observation> config(ProviderConfig providerConfiguration) {
+        this.providerConfiguration = providerConfiguration;
+        return this;
+    }
+}

+ 43 - 43
src/main/java/cz/senslog/analyzer/provider/ScheduledDatabaseProvider.java

@@ -1,43 +1,43 @@
-package cz.senslog.analyzer.provider;
-
-import cz.senslog.analyzer.domain.Observation;
-import cz.senslog.analyzer.provider.task.ObservationAnalyzerTask;
-import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
-import cz.senslog.analyzer.storage.inmemory.repository.TimestampRepository;
-import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
-import cz.senslog.common.util.schedule.Scheduler;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import javax.inject.Inject;
-
-public class ScheduledDatabaseProvider extends DataProvider<Observation> {
-
-    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
-
-    private final TimestampStorage storage;
-    private final SensLogRepository repository;
-
-    private Scheduler scheduler;
-
-    @Inject
-    public ScheduledDatabaseProvider(TimestampRepository configRepository, SensLogRepository sensLogRepository) {
-        this.storage = TimestampStorage.createContext(configRepository);
-        this.repository = sensLogRepository;
-    }
-
-    @Override
-    public void start() {
-
-        scheduler = Scheduler.createBuilder()
-                .addTask(new ObservationAnalyzerTask(analyzer, storage, repository, config.getStartDateTime()), config.getPeriod())
-        .build();
-
-        scheduler.start();
-
-    }
-
-    private Scheduler registerTask(AnalyzerTask<Observation> mainTask, AnalyzerTask<Observation>... tasks) {
-        return null;
-    }
-}
+package cz.senslog.analyzer.provider;
+
+import cz.senslog.analyzer.domain.Observation;
+import cz.senslog.analyzer.provider.task.ObservationAnalyzerTask;
+import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
+import cz.senslog.analyzer.storage.inmemory.repository.TimestampRepository;
+import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
+import cz.senslog.analyzer.util.schedule.Scheduler;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.inject.Inject;
+
+public class ScheduledDatabaseProvider extends DataProvider<Observation> {
+
+    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
+
+    private final TimestampStorage storage;
+    private final SensLogRepository repository;
+
+    private Scheduler scheduler;
+
+    @Inject
+    public ScheduledDatabaseProvider(TimestampRepository configRepository, SensLogRepository sensLogRepository) {
+        this.storage = TimestampStorage.createContext(configRepository);
+        this.repository = sensLogRepository;
+    }
+
+    @Override
+    public void start() {
+
+        scheduler = Scheduler.createBuilder()
+                .addTask(new ObservationAnalyzerTask(analyzer, storage, repository, config.getStartDateTime()), config.getPeriod())
+        .build();
+
+        scheduler.start();
+
+    }
+
+    private Scheduler registerTask(AnalyzerTask<Observation> mainTask, AnalyzerTask<Observation>... tasks) {
+        return null;
+    }
+}

+ 98 - 92
src/main/java/cz/senslog/analyzer/provider/task/ObservationAnalyzerTask.java

@@ -1,93 +1,99 @@
-package cz.senslog.analyzer.provider.task;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-import cz.senslog.analyzer.domain.Data;
-import cz.senslog.analyzer.domain.Observation;
-import cz.senslog.analyzer.domain.Timestamp;
-import cz.senslog.analyzer.provider.AnalyzerTask;
-import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
-import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
-import org.apache.logging.log4j.Level;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.time.OffsetDateTime;
-import java.util.*;
-
-import static cz.senslog.analyzer.domain.TimestampType.*;
-import static cz.senslog.analyzer.domain.TimestampType.LAST_PROVIDED_EXCLUSIVE;
-import static cz.senslog.analyzer.util.ListUtils.sublistToEnd;
-
-public class ObservationAnalyzerTask extends AnalyzerTask<Observation> {
-
-    private static final Logger logger = LogManager.getLogger(ObservationAnalyzerTask.class);
-
-    private static final int MAX_OBSERVATIONS = 200;
-
-    private final TimestampStorage storage;
-    private final SensLogRepository repository;
-    private final Timestamp startDateTime;
-
-    public ObservationAnalyzerTask(Analyzer<Observation> analyzer, TimestampStorage storage,
-                                   SensLogRepository repository, OffsetDateTime startDateTime
-    ) {
-        super(analyzer);
-        Objects.requireNonNull(storage);
-        Objects.requireNonNull(repository);
-        Objects.requireNonNull(startDateTime);
-        this.storage = storage;
-        this.repository = repository;
-        this.startDateTime = Timestamp.of(startDateTime);
-    }
-
-    @Override
-    protected List<Observation> loadData() {
-
-        Timestamp firstProvidedIn = storage.get(FIRST_PROVIDED_INCLUSIVE, startDateTime);
-        Timestamp lastProvidedEx = storage.get(LAST_PROVIDED_EXCLUSIVE, Timestamp.MIN);
-        Timestamp lastProvidedIn = storage.get(LAST_PROVIDED_INCLUSIVE, Timestamp.MIN);
-        Timestamp committedIn = storage.get(LAST_COMMITTED_INCLUSIVE, Timestamp.MIN);
-        logger.info("firstProvidedIn: {}, lastProvidedEx: {}, lastProvidedIn: {}, committedIn: {}.",
-                firstProvidedIn, lastProvidedEx, lastProvidedIn, committedIn);
-
-        boolean previousItrWasFinished = firstProvidedIn.isBefore(committedIn) || firstProvidedIn.isEqual(committedIn);
-        Level logLevel = previousItrWasFinished ? Level.INFO : Level.WARN;
-        logger.log(logLevel, "Previous iteration finished: {}.", previousItrWasFinished);
-
-        List<Observation> newObservations;
-        if (previousItrWasFinished) {
-            newObservations = repository.getObservationsFromTime(lastProvidedIn, false, MAX_OBSERVATIONS);
-        } else {
-            newObservations = repository.getObservationsFromTime(firstProvidedIn, true, MAX_OBSERVATIONS);
-        }
-
-        if (newObservations.isEmpty()) {
-            return Collections.emptyList();
-        }
-
-        Timestamp start = newObservations.get(0).getTimestamp();
-        Timestamp end = newObservations.get(newObservations.size() - 1).getTimestamp();
-
-        List<Observation> observations = sublistToEnd(newObservations, end, false, Observation::getTimestamp);
-
-        if (observations.isEmpty()) {
-            if (newObservations.size() < MAX_OBSERVATIONS) {
-                logger.info("No observations loaded.");
-                return Collections.emptyList();
-            } else {
-                // TODO find a better solution -> a problem of more than MAX_OBSERVATIONS observations at the same timestamp
-                observations = newObservations;
-            }
-        }
-
-        observations.sort(Comparator.comparing(Data::getTimestamp));
-        Timestamp lastInclusive = observations.get(observations.size() - 1).getTimestamp();
-
-        logger.info("Loaded {} observations from {} to {}.", observations.size(), start, lastInclusive);
-        storage.update(start, FIRST_PROVIDED_INCLUSIVE);
-        storage.update(end, LAST_PROVIDED_EXCLUSIVE);
-        storage.update(lastInclusive, LAST_PROVIDED_INCLUSIVE);
-
-        return observations;
-    }
+package cz.senslog.analyzer.provider.task;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.domain.Data;
+import cz.senslog.analyzer.domain.Observation;
+import cz.senslog.analyzer.domain.Timestamp;
+import cz.senslog.analyzer.provider.AnalyzerTask;
+import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
+import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.time.OffsetDateTime;
+import java.util.*;
+
+import static cz.senslog.analyzer.domain.TimestampType.*;
+import static cz.senslog.analyzer.domain.TimestampType.LAST_PROVIDED_EXCLUSIVE;
+import static cz.senslog.analyzer.util.ListUtils.sublistToEnd;
+
+public class ObservationAnalyzerTask extends AnalyzerTask<Observation> {
+
+    private static final Logger logger = LogManager.getLogger(ObservationAnalyzerTask.class);
+
+    private static final long MAX_REPOSITORY_DURATION = 1_000; // 1s
+    private static final int MAX_OBSERVATIONS = 200;
+
+    private final TimestampStorage storage;
+    private final SensLogRepository repository;
+    private final Timestamp startDateTime;
+
+    public ObservationAnalyzerTask(Analyzer<Observation> analyzer, TimestampStorage storage,
+                                   SensLogRepository repository, OffsetDateTime startDateTime
+    ) {
+        super(analyzer);
+        Objects.requireNonNull(storage);
+        Objects.requireNonNull(repository);
+        Objects.requireNonNull(startDateTime);
+        this.storage = storage;
+        this.repository = repository;
+        this.startDateTime = Timestamp.of(startDateTime);
+    }
+
+    @Override
+    protected List<Observation> loadData() {
+
+        Timestamp firstProvidedIn = storage.get(FIRST_PROVIDED_INCLUSIVE, startDateTime);
+        Timestamp lastProvidedEx = storage.get(LAST_PROVIDED_EXCLUSIVE, Timestamp.MIN);
+        Timestamp lastProvidedIn = storage.get(LAST_PROVIDED_INCLUSIVE, Timestamp.MIN);
+        Timestamp committedIn = storage.get(LAST_COMMITTED_INCLUSIVE, Timestamp.MIN);
+        logger.info("firstProvidedIn: {}, lastProvidedEx: {}, lastProvidedIn: {}, committedIn: {}.",
+                firstProvidedIn, lastProvidedEx, lastProvidedIn, committedIn);
+
+        boolean previousItrWasFinished = firstProvidedIn.isBefore(committedIn) || firstProvidedIn.isEqual(committedIn);
+        Level logLevel = previousItrWasFinished ? Level.INFO : Level.WARN;
+        logger.log(logLevel, "Previous iteration finished: {}.", previousItrWasFinished);
+
+        long repStart = System.currentTimeMillis();
+        List<Observation> newObservations;
+        if (previousItrWasFinished) {
+            newObservations = repository.getObservationsFromTime(lastProvidedIn, false, MAX_OBSERVATIONS);
+        } else {
+            newObservations = repository.getObservationsFromTime(firstProvidedIn, true, MAX_OBSERVATIONS);
+        }
+        long repDuration = repStart - System.currentTimeMillis();
+        logLevel = repDuration < MAX_REPOSITORY_DURATION ? Level.INFO : Level.WARN;
+        logger.log(logLevel, "New observation was loaded in {} ms.", repDuration);
+
+
+        if (newObservations.isEmpty()) {
+            return Collections.emptyList();
+        }
+
+        Timestamp start = newObservations.get(0).getTimestamp();
+        Timestamp end = newObservations.get(newObservations.size() - 1).getTimestamp();
+
+        List<Observation> observations = sublistToEnd(newObservations, end, false, Observation::getTimestamp);
+
+        if (observations.isEmpty()) {
+            if (newObservations.size() < MAX_OBSERVATIONS) {
+                logger.info("No observations loaded.");
+                return Collections.emptyList();
+            } else {
+                // TODO find a better solution -> a problem of more than MAX_OBSERVATIONS observations at the same timestamp
+                observations = newObservations;
+            }
+        }
+
+        observations.sort(Comparator.comparing(Data::getTimestamp));
+        Timestamp lastInclusive = observations.get(observations.size() - 1).getTimestamp();
+
+        logger.info("Loaded {} observations from {} to {}.", observations.size(), start, lastInclusive);
+        storage.update(start, FIRST_PROVIDED_INCLUSIVE);
+        storage.update(end, LAST_PROVIDED_EXCLUSIVE);
+        storage.update(lastInclusive, LAST_PROVIDED_INCLUSIVE);
+
+        return observations;
+    }
 }

+ 180 - 180
src/main/java/cz/senslog/analyzer/storage/permanent/repository/StatisticsConfigRepository.java

@@ -1,180 +1,180 @@
-package cz.senslog.analyzer.storage.permanent.repository;
-
-import cz.senslog.analyzer.domain.*;
-import cz.senslog.analyzer.provider.ScheduledDatabaseProvider;
-import cz.senslog.analyzer.storage.Connection;
-import cz.senslog.common.util.Tuple;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.jdbi.v3.core.Jdbi;
-
-import javax.inject.Inject;
-import java.time.Instant;
-import java.time.format.DateTimeFormatter;
-import java.util.AbstractMap;
-import java.util.List;
-import java.util.Map;
-
-import static java.util.Collections.emptyList;
-import static java.util.Collections.emptySet;
-import static java.util.stream.Collectors.*;
-
-public class StatisticsConfigRepository {
-
-    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
-
-    private final Jdbi jdbi;
-
-    private final Special specialRepository;
-
-    @Inject
-    public StatisticsConfigRepository(Connection<Jdbi> connection) {
-        this.jdbi = connection.get();
-        this.specialRepository = new Special(connection.get());
-    }
-
-    public Special special() {
-        return specialRepository;
-    }
-
-    public List<Sensor> getAllAvailableSensors() {
-        return jdbi.withHandle(h -> h.createQuery(
-                "SELECT s.unit_id AS unit_id, s.sensor_id AS sensor_id, g.id as group_id " +
-                        "FROM statistics.sensors AS s " +
-                        "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
-                        "JOIN statistics.groups_interval AS g ON sg.group_id = g.id"
-                )
-                        .map((rs, ctx) -> new Sensor(
-                                rs.getLong("unit_id"),
-                                rs.getLong("sensor_id"),
-                                rs.getLong("group_id")
-                        )).list()
-        );
-    }
-
-    public List<Group> getGroupInfos() {
-        return jdbi.withHandle(h -> h.createQuery(
-                "SELECT " +
-                        "g.id AS group_id, " +
-                        "s.sensor_id AS sensor_id, " +
-                        "s.unit_id AS unit_id, " +
-                        "g.time_interval AS time_interval, " +
-                        "g.persistence AS persistence, " +
-                        "g.aggregation_type AS aggregation_type " +
-                        "FROM statistics.groups_interval AS g " +
-                        "JOIN statistics.sensor_to_group AS sg ON sg.group_id = g.id " +
-                        "JOIN statistics.sensors AS s ON s.id = sg.sensor_id " +
-                        "WHERE g.time_interval != 0"
-        )
-                .map((rs, ctx) -> {
-                    long groupId = rs.getLong("group_id");
-                    Sensor sensor = new Sensor(
-                            rs.getLong("unit_id"),
-                            rs.getLong("sensor_id"),
-                            groupId
-                    );
-                    Group group = new Group(
-                            groupId,
-                            rs.getLong("time_interval"),
-                            rs.getBoolean("persistence"),
-                            AggregationType.valueOf(rs.getString("aggregation_type")),
-                            emptySet()
-                    );
-                    return new AbstractMap.SimpleEntry<>(group, sensor);
-                }).collect(groupingBy(Map.Entry::getKey, mapping(Map.Entry::getValue, toSet())))
-                .entrySet().stream().map(entry -> new Group(entry.getKey(), entry.getValue())).collect(toList())
-        );
-    }
-
-    public List<Threshold> getCurrentThresholdsValue() {
-        return jdbi.withHandle(h -> h.createQuery(
-                "SELECT t.group_id AS group_id, t.mode AS mode, t.property AS property, t.threshold_value AS threshold_value " +
-                        "FROM statistics.thresholds AS t " +
-                        "JOIN statistics.groups_interval AS g ON g.id = t.group_id " +
-                        "WHERE g.time_interval = 0"
-                )
-                        .map((rs, ctx) -> new Threshold(
-                                rs.getLong("group_id"),
-                                rs.getString("property"),
-                                rs.getString("mode"),
-                                rs.getDouble("threshold_value")
-                        )).list()
-        );
-    }
-
-    public List<Threshold> getIntervalThresholdsValue() {
-        return jdbi.withHandle(h -> h.createQuery(
-                "SELECT t.group_id AS group_id, t.mode AS mode, t.property AS property, t.threshold_value AS threshold_value " +
-                        "FROM statistics.thresholds AS t " +
-                        "JOIN statistics.groups_interval AS g ON g.id = t.group_id " +
-                        "WHERE g.time_interval != 0"
-                )
-                        .map((rs, ctx) -> new Threshold(
-                                rs.getLong("group_id"),
-                                rs.getString("property"),
-                                rs.getString("mode"),
-                                rs.getDouble("threshold_value")
-                        )).list()
-        );
-    }
-
-    public static class Special {
-
-        private final Jdbi jdbi;
-
-        private Special(Jdbi jdbi) {
-            this.jdbi = jdbi;
-        }
-
-        public List<Tuple<Long, Long>> getGroupsByUnit(long unitId, long maxInterval) {
-            try {
-                return jdbi.<List<Tuple<Long, Long>>, Exception>withHandle(h -> h.createQuery(
-                        "SELECT g.id AS group_id, s.sensor_id AS sensor_id " +
-                                "FROM statistics.sensors AS s " +
-                                "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
-                                "JOIN statistics.groups_interval AS g ON g.id = sg.group_id " +
-                                "WHERE s.unit_id = :unit_id " +
-                                "AND g.time_interval > 0 " +
-                                "AND g.time_interval <= :aggr_interval_s " +
-                                "AND g.persistence = TRUE " +
-                                "AND g.aggregation_type = 'DOUBLE' " +
-                                "ORDER BY g.time_interval"
-                        )
-                                .bind("unit_id", unitId)
-                                .bind("aggr_interval_s", maxInterval)
-                                .bind("aggr_type", AggregationType.DOUBLE)
-                                .map((rs, ctx) ->
-                                        Tuple.of(rs.getLong("group_id"), rs.getLong("sensor_id"))
-                                ).list()
-                );
-            } catch (Exception e) {
-                logger.catching(e);
-                return emptyList();
-            }
-        }
-
-        public long getGroupIdByUnitSensor(long unitId, long sensorId, long maxInterval) {
-            try {
-                return jdbi.<Long, IllegalStateException>withHandle(h -> h.createQuery(
-                        "SELECT g.id AS group_id, g.time_interval AS time_interval FROM statistics.sensors AS s " +
-                                "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
-                                "JOIN statistics.groups_interval AS g ON g.id = sg.group_id " +
-                                "WHERE s.sensor_id = :sensor_id AND s.unit_id = :unit_id " +
-                                "  AND g.time_interval > 0 AND g.time_interval <= :aggr_interval_s " +
-                                "  AND g.persistence = TRUE AND g.aggregation_type = :aggr_type " +
-                                "ORDER BY g.time_interval DESC LIMIT 1"
-                        )
-                                .bind("unit_id", unitId)
-                                .bind("sensor_id", sensorId)
-                                .bind("aggr_interval_s", maxInterval)
-                                .bind("aggr_type", AggregationType.DOUBLE)
-                                .map((rs, ctx) -> rs.getLong("group_id")).first()
-                );
-            } catch (IllegalStateException e) {
-                logger.catching(e);
-                return -1;
-            }
-        }
-
-    }
-}
+package cz.senslog.analyzer.storage.permanent.repository;
+
+import cz.senslog.analyzer.domain.*;
+import cz.senslog.analyzer.provider.ScheduledDatabaseProvider;
+import cz.senslog.analyzer.storage.Connection;
+import cz.senslog.analyzer.util.Tuple;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.jdbi.v3.core.Jdbi;
+
+import javax.inject.Inject;
+import java.time.Instant;
+import java.time.format.DateTimeFormatter;
+import java.util.AbstractMap;
+import java.util.List;
+import java.util.Map;
+
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptySet;
+import static java.util.stream.Collectors.*;
+
+public class StatisticsConfigRepository {
+
+    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
+
+    private final Jdbi jdbi;
+
+    private final Special specialRepository;
+
+    @Inject
+    public StatisticsConfigRepository(Connection<Jdbi> connection) {
+        this.jdbi = connection.get();
+        this.specialRepository = new Special(connection.get());
+    }
+
+    public Special special() {
+        return specialRepository;
+    }
+
+    public List<Sensor> getAllAvailableSensors() {
+        return jdbi.withHandle(h -> h.createQuery(
+                "SELECT s.unit_id AS unit_id, s.sensor_id AS sensor_id, g.id as group_id " +
+                        "FROM statistics.sensors AS s " +
+                        "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
+                        "JOIN statistics.groups_interval AS g ON sg.group_id = g.id"
+                )
+                        .map((rs, ctx) -> new Sensor(
+                                rs.getLong("unit_id"),
+                                rs.getLong("sensor_id"),
+                                rs.getLong("group_id")
+                        )).list()
+        );
+    }
+
+    public List<Group> getGroupInfos() {
+        return jdbi.withHandle(h -> h.createQuery(
+                "SELECT " +
+                        "g.id AS group_id, " +
+                        "s.sensor_id AS sensor_id, " +
+                        "s.unit_id AS unit_id, " +
+                        "g.time_interval AS time_interval, " +
+                        "g.persistence AS persistence, " +
+                        "g.aggregation_type AS aggregation_type " +
+                        "FROM statistics.groups_interval AS g " +
+                        "JOIN statistics.sensor_to_group AS sg ON sg.group_id = g.id " +
+                        "JOIN statistics.sensors AS s ON s.id = sg.sensor_id " +
+                        "WHERE g.time_interval != 0"
+        )
+                .map((rs, ctx) -> {
+                    long groupId = rs.getLong("group_id");
+                    Sensor sensor = new Sensor(
+                            rs.getLong("unit_id"),
+                            rs.getLong("sensor_id"),
+                            groupId
+                    );
+                    Group group = new Group(
+                            groupId,
+                            rs.getLong("time_interval"),
+                            rs.getBoolean("persistence"),
+                            AggregationType.valueOf(rs.getString("aggregation_type")),
+                            emptySet()
+                    );
+                    return new AbstractMap.SimpleEntry<>(group, sensor);
+                }).collect(groupingBy(Map.Entry::getKey, mapping(Map.Entry::getValue, toSet())))
+                .entrySet().stream().map(entry -> new Group(entry.getKey(), entry.getValue())).collect(toList())
+        );
+    }
+
+    public List<Threshold> getCurrentThresholdsValue() {
+        return jdbi.withHandle(h -> h.createQuery(
+                "SELECT t.group_id AS group_id, t.mode AS mode, t.property AS property, t.threshold_value AS threshold_value " +
+                        "FROM statistics.thresholds AS t " +
+                        "JOIN statistics.groups_interval AS g ON g.id = t.group_id " +
+                        "WHERE g.time_interval = 0"
+                )
+                        .map((rs, ctx) -> new Threshold(
+                                rs.getLong("group_id"),
+                                rs.getString("property"),
+                                rs.getString("mode"),
+                                rs.getDouble("threshold_value")
+                        )).list()
+        );
+    }
+
+    public List<Threshold> getIntervalThresholdsValue() {
+        return jdbi.withHandle(h -> h.createQuery(
+                "SELECT t.group_id AS group_id, t.mode AS mode, t.property AS property, t.threshold_value AS threshold_value " +
+                        "FROM statistics.thresholds AS t " +
+                        "JOIN statistics.groups_interval AS g ON g.id = t.group_id " +
+                        "WHERE g.time_interval != 0"
+                )
+                        .map((rs, ctx) -> new Threshold(
+                                rs.getLong("group_id"),
+                                rs.getString("property"),
+                                rs.getString("mode"),
+                                rs.getDouble("threshold_value")
+                        )).list()
+        );
+    }
+
+    public static class Special {
+
+        private final Jdbi jdbi;
+
+        private Special(Jdbi jdbi) {
+            this.jdbi = jdbi;
+        }
+
+        public List<Tuple<Long, Long>> getGroupsByUnit(long unitId, long maxInterval) {
+            try {
+                return jdbi.<List<Tuple<Long, Long>>, Exception>withHandle(h -> h.createQuery(
+                        "SELECT g.id AS group_id, s.sensor_id AS sensor_id " +
+                                "FROM statistics.sensors AS s " +
+                                "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
+                                "JOIN statistics.groups_interval AS g ON g.id = sg.group_id " +
+                                "WHERE s.unit_id = :unit_id " +
+                                "AND g.time_interval > 0 " +
+                                "AND g.time_interval <= :aggr_interval_s " +
+                                "AND g.persistence = TRUE " +
+                                "AND g.aggregation_type = 'DOUBLE' " +
+                                "ORDER BY g.time_interval"
+                        )
+                                .bind("unit_id", unitId)
+                                .bind("aggr_interval_s", maxInterval)
+                                .bind("aggr_type", AggregationType.DOUBLE)
+                                .map((rs, ctx) ->
+                                        Tuple.of(rs.getLong("group_id"), rs.getLong("sensor_id"))
+                                ).list()
+                );
+            } catch (Exception e) {
+                logger.catching(e);
+                return emptyList();
+            }
+        }
+
+        public long getGroupIdByUnitSensor(long unitId, long sensorId, long maxInterval) {
+            try {
+                return jdbi.<Long, IllegalStateException>withHandle(h -> h.createQuery(
+                        "SELECT g.id AS group_id, g.time_interval AS time_interval FROM statistics.sensors AS s " +
+                                "JOIN statistics.sensor_to_group AS sg ON sg.sensor_id = s.id " +
+                                "JOIN statistics.groups_interval AS g ON g.id = sg.group_id " +
+                                "WHERE s.sensor_id = :sensor_id AND s.unit_id = :unit_id " +
+                                "  AND g.time_interval > 0 AND g.time_interval <= :aggr_interval_s " +
+                                "  AND g.persistence = TRUE AND g.aggregation_type = :aggr_type " +
+                                "ORDER BY g.time_interval DESC LIMIT 1"
+                        )
+                                .bind("unit_id", unitId)
+                                .bind("sensor_id", sensorId)
+                                .bind("aggr_interval_s", maxInterval)
+                                .bind("aggr_type", AggregationType.DOUBLE)
+                                .map((rs, ctx) -> rs.getLong("group_id")).first()
+                );
+            } catch (IllegalStateException e) {
+                logger.catching(e);
+                return -1;
+            }
+        }
+
+    }
+}

+ 174 - 176
src/main/java/cz/senslog/analyzer/storage/permanent/repository/StatisticsRepository.java

@@ -1,176 +1,174 @@
-package cz.senslog.analyzer.storage.permanent.repository;
-
-import cz.senslog.analyzer.domain.*;
-import cz.senslog.analyzer.storage.Connection;
-import cz.senslog.common.util.TimeRange;
-import cz.senslog.common.util.Tuple;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.jdbi.v3.core.Jdbi;
-import org.jdbi.v3.core.statement.PreparedBatch;
-
-import javax.inject.Inject;
-import java.time.Instant;
-import java.util.*;
-
-import static cz.senslog.analyzer.domain.AggregationType.DOUBLE;
-import static java.util.Collections.singletonList;
-import static java.util.stream.Collectors.*;
-
-public class StatisticsRepository {
-
-    private static final Logger logger = LogManager.getLogger(StatisticsRepository.class);
-
-    private final Jdbi jdbi;
-
-    @Inject
-    public StatisticsRepository(Connection<Jdbi> connection) {
-        this.jdbi = connection.get();
-    }
-
-    public void save(DoubleStatistics statistics) {
-        try {
-            saveStatisticsBatch(singletonList(statistics));
-        } catch (Exception e) {
-            logger.error("Can not persist this data: {}.", statistics);
-            logger.error(e.getMessage());
-        }
-    }
-
-    public void save(List<DoubleStatistics> statistics) {
-        try {
-            saveStatisticsBatch(statistics);
-        } catch (Exception e) {
-            logger.warn(e.getMessage());
-            statistics.forEach(this::save);
-        }
-    }
-
-    private void saveStatisticsBatch(List<DoubleStatistics> statistics) throws Exception {
-            jdbi.<int[], Exception>withHandle(h -> h.inTransaction(t -> {
-                PreparedBatch batch = t.prepareBatch(
-                        "INSERT INTO statistics.records(group_id, value_attribute, record_value, time_interval, time_stamp) " +
-                                "VALUES(:group_id, :value_attribute, :recorded_value, :time_interval, :time_stamp)"
-                );
-
-                statistics.stream().filter(st -> st.getSource().isPersistence()).forEach(st -> {
-                    long groupId = st.getSource().getId();
-                    long interval = st.getSource().getInterval();
-                    batch
-                            .bind("group_id", groupId)
-                            .bind("value_attribute", AttributeValue.MIN)
-                            .bind("recorded_value", st.getMin())
-                            .bind("time_interval", interval)
-                            .bind("time_stamp", st.getTimestamp().get())
-                            .add()
-                            .bind("group_id", groupId)
-                            .bind("value_attribute", AttributeValue.MAX)
-                            .bind("recorded_value", st.getMax())
-                            .bind("time_interval", interval)
-                            .bind("time_stamp", st.getTimestamp().get())
-                            .add()
-                            .bind("group_id", groupId)
-                            .bind("value_attribute", AttributeValue.SUM)
-                            .bind("recorded_value", st.getSum())
-                            .bind("time_interval", interval)
-                            .bind("time_stamp", st.getTimestamp().get())
-                            .add()
-                            .bind("group_id", groupId)
-                            .bind("value_attribute", AttributeValue.COUNT)
-                            .bind("recorded_value", Long.valueOf(st.getCount()).doubleValue())
-                            .bind("time_interval", interval)
-                            .bind("time_stamp", st.getTimestamp().get())
-                            .add();
-                });
-                return batch.execute();
-            }));
-    }
-
-    public List<DoubleStatistics> getByTimeRange(long groupId, Tuple<Timestamp, Timestamp> timeRange) {
-
-        class RawRecord {
-            long recordId, groupId, sensorId, unitId;
-            double value; int interval;
-            AttributeValue attribute;
-            Timestamp timestamp;
-            AggregationType aggregationType;
-        }
-
-        List<RawRecord> rawRecords = jdbi.withHandle(h -> h.createQuery(
-                "SELECT r.id AS record_id," +
-                        "g.id AS group_id," +
-                        "s.sensor_id AS sensor_id," +
-                        "s.unit_id AS unit_id," +
-                        "r.value_attribute AS attribute," +
-                        "r.record_value AS value," +
-                        "r.time_interval AS interval," +
-                        "r.time_stamp AS time_stamp," +
-                        "g.aggregation_type AS aggregation_type " +
-                        "FROM statistics.records AS r " +
-                        "JOIN statistics.groups_interval AS g ON g.id = r.group_id " +
-                        "JOIN statistics.sensor_to_group AS sg ON sg.group_id = r.group_id " +
-                        "JOIN statistics.sensors AS s ON s.id = sg.sensor_id " +
-                        "WHERE g.id = :group_id AND r.time_stamp >= :time_from " +
-                        "AND (r.time_stamp + r.time_interval * interval '1 second') < :time_to " +
-                        "AND r.created >= sg.created " +
-                        "ORDER BY r.time_stamp ASC"
-                )
-                    .bind("group_id", groupId)
-                    .bind("time_from", timeRange.getItem1().toInstant())
-                    .bind("time_to", timeRange.getItem2().toInstant())
-                .map((rs, ctx) -> {
-                    RawRecord r = new RawRecord();
-                    r.recordId = rs.getLong("record_id");
-                    r.groupId = rs.getLong("group_id");
-                    r.sensorId = rs.getLong("sensor_id");
-                    r.unitId = rs.getLong("unit_id");
-                    r.attribute = AttributeValue.valueOf(rs.getString("attribute"));
-                    r.value = rs.getDouble("value");
-                    r.interval = rs.getInt("interval");
-                    r.timestamp = Timestamp.parse(rs.getString("time_stamp"));
-                    r.aggregationType = AggregationType.valueOf(rs.getString("aggregation_type"));
-                    return r;
-                }).stream().filter(r -> r.aggregationType.equals(DOUBLE))
-                    .collect(toList())
-        );
-
-        Set<Sensor> sensors = new HashSet<>();
-
-        Map<Long, RawRecord> districtRecords = new HashMap<>();
-        for (RawRecord rawRecord : rawRecords) {
-            sensors.add(new Sensor(rawRecord.sensorId, rawRecord.unitId, rawRecord.groupId));
-            districtRecords.put(rawRecord.recordId, rawRecord);
-        }
-
-        Map<Tuple<Timestamp, Integer>, List<RawRecord>> groupedRecords = new HashMap<>();
-        for (RawRecord record : districtRecords.values()) {
-            groupedRecords.computeIfAbsent(Tuple.of(record.timestamp, record.interval), k -> new ArrayList<>())
-                    .add(record);
-        }
-
-        List<DoubleStatistics> statistics = new ArrayList<>(groupedRecords.size());
-        for (Map.Entry<Tuple<Timestamp, Integer>, List<RawRecord>> stEntry : groupedRecords.entrySet()) {
-            Timestamp timestamp = stEntry.getKey().getItem1();
-            int interval = stEntry.getKey().getItem2();
-            List<RawRecord> records = stEntry.getValue();
-
-            double min=0, max=0, sum=0; long count=0;
-            boolean unknown = false;
-            for (RawRecord r : records) {
-                switch (r.attribute) {
-                    case MAX: max = r.value; break;
-                    case MIN: min = r.value; break;
-                    case SUM: sum = r.value; break;
-                    case COUNT: count = (long) r.value; break;
-                    default: unknown = true;
-                }
-            }
-            if (!unknown) {
-                Group group = new Group(groupId, interval, true, DOUBLE, sensors);
-                statistics.add(new DoubleStatistics(group, count, min, max, sum, timestamp));
-            }
-        }
-
-        return statistics;
-    }
-}
+package cz.senslog.analyzer.storage.permanent.repository;
+
+import cz.senslog.analyzer.domain.*;
+import cz.senslog.analyzer.storage.Connection;
+import cz.senslog.analyzer.util.Tuple;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.jdbi.v3.core.Jdbi;
+import org.jdbi.v3.core.statement.PreparedBatch;
+
+import javax.inject.Inject;
+import java.util.*;
+
+import static cz.senslog.analyzer.domain.AggregationType.DOUBLE;
+import static java.util.Collections.singletonList;
+import static java.util.stream.Collectors.*;
+
+public class StatisticsRepository {
+
+    private static final Logger logger = LogManager.getLogger(StatisticsRepository.class);
+
+    private final Jdbi jdbi;
+
+    @Inject
+    public StatisticsRepository(Connection<Jdbi> connection) {
+        this.jdbi = connection.get();
+    }
+
+    public void save(DoubleStatistics statistics) {
+        try {
+            saveStatisticsBatch(singletonList(statistics));
+        } catch (Exception e) {
+            logger.error("Can not persist this data: {}.", statistics);
+            logger.error(e.getMessage());
+        }
+    }
+
+    public void save(List<DoubleStatistics> statistics) {
+        try {
+            saveStatisticsBatch(statistics);
+        } catch (Exception e) {
+            logger.warn(e.getMessage());
+            statistics.forEach(this::save);
+        }
+    }
+
+    private void saveStatisticsBatch(List<DoubleStatistics> statistics) throws Exception {
+            jdbi.<int[], Exception>withHandle(h -> h.inTransaction(t -> {
+                PreparedBatch batch = t.prepareBatch(
+                        "INSERT INTO statistics.records(group_id, value_attribute, record_value, time_interval, time_stamp) " +
+                                "VALUES(:group_id, :value_attribute, :recorded_value, :time_interval, :time_stamp)"
+                );
+
+                statistics.stream().filter(st -> st.getSource().isPersistence()).forEach(st -> {
+                    long groupId = st.getSource().getId();
+                    long interval = st.getSource().getInterval();
+                    batch
+                            .bind("group_id", groupId)
+                            .bind("value_attribute", AttributeValue.MIN)
+                            .bind("recorded_value", st.getMin())
+                            .bind("time_interval", interval)
+                            .bind("time_stamp", st.getTimestamp().get())
+                            .add()
+                            .bind("group_id", groupId)
+                            .bind("value_attribute", AttributeValue.MAX)
+                            .bind("recorded_value", st.getMax())
+                            .bind("time_interval", interval)
+                            .bind("time_stamp", st.getTimestamp().get())
+                            .add()
+                            .bind("group_id", groupId)
+                            .bind("value_attribute", AttributeValue.SUM)
+                            .bind("recorded_value", st.getSum())
+                            .bind("time_interval", interval)
+                            .bind("time_stamp", st.getTimestamp().get())
+                            .add()
+                            .bind("group_id", groupId)
+                            .bind("value_attribute", AttributeValue.COUNT)
+                            .bind("recorded_value", Long.valueOf(st.getCount()).doubleValue())
+                            .bind("time_interval", interval)
+                            .bind("time_stamp", st.getTimestamp().get())
+                            .add();
+                });
+                return batch.execute();
+            }));
+    }
+
+    public List<DoubleStatistics> getByTimeRange(long groupId, Tuple<Timestamp, Timestamp> timeRange) {
+
+        class RawRecord {
+            long recordId, groupId, sensorId, unitId;
+            double value; int interval;
+            AttributeValue attribute;
+            Timestamp timestamp;
+            AggregationType aggregationType;
+        }
+
+        List<RawRecord> rawRecords = jdbi.withHandle(h -> h.createQuery(
+                "SELECT r.id AS record_id," +
+                        "g.id AS group_id," +
+                        "s.sensor_id AS sensor_id," +
+                        "s.unit_id AS unit_id," +
+                        "r.value_attribute AS attribute," +
+                        "r.record_value AS value," +
+                        "r.time_interval AS interval," +
+                        "r.time_stamp AS time_stamp," +
+                        "g.aggregation_type AS aggregation_type " +
+                        "FROM statistics.records AS r " +
+                        "JOIN statistics.groups_interval AS g ON g.id = r.group_id " +
+                        "JOIN statistics.sensor_to_group AS sg ON sg.group_id = r.group_id " +
+                        "JOIN statistics.sensors AS s ON s.id = sg.sensor_id " +
+                        "WHERE g.id = :group_id AND r.time_stamp >= :time_from " +
+                        "AND (r.time_stamp + r.time_interval * interval '1 second') < :time_to " +
+                        "AND r.created >= sg.created " +
+                        "ORDER BY r.time_stamp ASC"
+                )
+                    .bind("group_id", groupId)
+                    .bind("time_from", timeRange.getItem1().toInstant())
+                    .bind("time_to", timeRange.getItem2().toInstant())
+                .map((rs, ctx) -> {
+                    RawRecord r = new RawRecord();
+                    r.recordId = rs.getLong("record_id");
+                    r.groupId = rs.getLong("group_id");
+                    r.sensorId = rs.getLong("sensor_id");
+                    r.unitId = rs.getLong("unit_id");
+                    r.attribute = AttributeValue.valueOf(rs.getString("attribute"));
+                    r.value = rs.getDouble("value");
+                    r.interval = rs.getInt("interval");
+                    r.timestamp = Timestamp.parse(rs.getString("time_stamp"));
+                    r.aggregationType = AggregationType.valueOf(rs.getString("aggregation_type"));
+                    return r;
+                }).stream().filter(r -> r.aggregationType.equals(DOUBLE))
+                    .collect(toList())
+        );
+
+        Set<Sensor> sensors = new HashSet<>();
+
+        Map<Long, RawRecord> districtRecords = new HashMap<>();
+        for (RawRecord rawRecord : rawRecords) {
+            sensors.add(new Sensor(rawRecord.sensorId, rawRecord.unitId, rawRecord.groupId));
+            districtRecords.put(rawRecord.recordId, rawRecord);
+        }
+
+        Map<Tuple<Timestamp, Integer>, List<RawRecord>> groupedRecords = new HashMap<>();
+        for (RawRecord record : districtRecords.values()) {
+            groupedRecords.computeIfAbsent(Tuple.of(record.timestamp, record.interval), k -> new ArrayList<>())
+                    .add(record);
+        }
+
+        List<DoubleStatistics> statistics = new ArrayList<>(groupedRecords.size());
+        for (Map.Entry<Tuple<Timestamp, Integer>, List<RawRecord>> stEntry : groupedRecords.entrySet()) {
+            Timestamp timestamp = stEntry.getKey().getItem1();
+            int interval = stEntry.getKey().getItem2();
+            List<RawRecord> records = stEntry.getValue();
+
+            double min=0, max=0, sum=0; long count=0;
+            boolean unknown = false;
+            for (RawRecord r : records) {
+                switch (r.attribute) {
+                    case MAX: max = r.value; break;
+                    case MIN: min = r.value; break;
+                    case SUM: sum = r.value; break;
+                    case COUNT: count = (long) r.value; break;
+                    default: unknown = true;
+                }
+            }
+            if (!unknown) {
+                Group group = new Group(groupId, interval, true, DOUBLE, sensors);
+                statistics.add(new DoubleStatistics(group, count, min, max, sum, timestamp));
+            }
+        }
+
+        return statistics;
+    }
+}

+ 136 - 0
src/main/java/cz/senslog/analyzer/util/DateTrunc.java

@@ -0,0 +1,136 @@
+package cz.senslog.analyzer.util;
+
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.util.Arrays;
+import java.util.function.BiConsumer;
+
+public final class DateTrunc {
+
+    public enum Option {
+        SECOND  (1),
+        MINUTE  (60),
+        HOUR    (3_600),
+        DAY     (86_400),
+        WEEK    (604_800),
+        MONTH   (2_629_743), // 2 629 743.83
+        YEAR    (31_556_926)
+
+        ;
+        private final int sec;
+        Option(int sec) {
+            this.sec = sec;
+        }
+        public final int getSeconds() { return sec; }
+    }
+
+    private DateTrunc() {}
+
+    public static OffsetDateTime trunc(OffsetDateTime dateTime, int period) {
+        if (dateTime == null) { return null; }
+        return OffsetDateTime.of(trunc(dateTime.toLocalDateTime(), period), dateTime.getOffset());
+    }
+
+    public static OffsetDateTime trunc(OffsetDateTime dateTime, Option option) {
+        if (dateTime == null) { return null; }
+        return OffsetDateTime.of(trunc(dateTime.toLocalDateTime(), option), dateTime.getOffset());
+    }
+
+    public static LocalDateTime trunc(LocalDateTime dateTime, int period) {
+        return trunc(dateTime, PeriodUtils.disassemble(period));
+    }
+
+    public static LocalDateTime trunc(LocalDateTime dateTime, Option option) {
+        return trunc(dateTime, PeriodUtils.disassemble(option));
+    }
+
+    private static LocalDateTime trunc(LocalDateTime dateTime, int[] optionComponents) {
+        if (optionComponents == null || optionComponents.length != Option.values().length) {
+            return dateTime;
+        }
+
+        LocalDateTime result = dateTime.withNano(0);
+        for (Option option : Option.values()) {
+            int value = optionComponents[option.ordinal()];
+            if (value == -1) { continue; }
+            switch (option) {
+                case YEAR: {
+                    result = result.withYear(value == 0 ? 0 : result.getYear() - (result.getYear() % value));
+                } break;
+                case MONTH: {
+                    result = result.withMonth(value == 0 ? 1 : result.getMonthValue() - (result.getMonthValue() % value));
+                } break;
+                case WEEK: {
+                    // TODO: implement
+                } break;
+                case DAY: {
+                    result = result.withDayOfMonth(value == 0 ? 1 : result.getDayOfMonth() - (result.getDayOfMonth() % value));
+                } break;
+                case HOUR: {
+                    result = result.withHour(value == 0 ? 0 : result.getHour() - (result.getHour() % value));
+                } break;
+                case MINUTE: {
+                    result = result.withMinute(value == 0 ? 0 : result.getMinute() - (result.getMinute() % value));
+                } break;
+                case SECOND: {
+                    result = result.withSecond(value == 0 ? 0 : result.getSecond() - (result.getSecond() % value));
+                } break;
+            }
+        }
+        return result;
+    }
+
+    private static class PeriodUtils {
+
+        private static int[] disassemble(Option option) {
+            int [] cmp = new int[Option.values().length];
+            Arrays.fill(cmp, -1);
+            if (option == null) {
+                return cmp;
+            }
+            Arrays.fill(cmp, 0, option.ordinal(), 0);
+            cmp[option.ordinal()] = 1;
+            return cmp;
+        }
+
+
+        private static int[] disassemble(int period) {
+            int [] cmp = new int[Option.values().length];
+            BiConsumer<Option, Double> setValue = (o, v) -> cmp[o.ordinal()] = v.intValue() == 0 ? -1 : v.intValue();
+            double periodSec = Math.min(period, Option.YEAR.sec);
+
+            double years = periodSec / Option.YEAR.sec;
+            setValue.accept(Option.YEAR, years);
+            periodSec %= Option.YEAR.sec;
+
+            double months =  round((years - (int)years) * 12.0, 5);
+            setValue.accept(Option.MONTH, months);
+
+            double weeksOfYears = periodSec / Option.WEEK.sec;
+            setValue.accept(Option.WEEK, 0.0); // TODO
+
+            double days = (periodSec % Option.WEEK.sec) / Option.DAY.sec;
+            setValue.accept(Option.DAY, days);
+
+            double hours = (days - (int)days) * 24.0;
+            setValue.accept(Option.HOUR, hours);
+
+            double minutes = (hours - (int)hours) * 60.0;
+            setValue.accept(Option.MINUTE, minutes);
+
+            double seconds = (minutes - (int)minutes) * 60.0;
+            setValue.accept(Option.SECOND, seconds);
+
+            for (int i = 0; i < cmp.length; i++) {
+                if (cmp[i] > 0) { break; }
+                cmp[i] = 0;
+            }
+
+            return cmp;
+        }
+    }
+
+    public static double round(double value, int scale) {
+        return Math.round(value * Math.pow(10, scale)) / Math.pow(10, scale);
+    }
+}

+ 20 - 0
src/main/java/cz/senslog/analyzer/util/LongUtils.java

@@ -0,0 +1,20 @@
+package cz.senslog.analyzer.util;
+
+import java.util.Optional;
+
+public final class LongUtils {
+    private LongUtils() {
+    }
+
+    public static Optional<Long> parseLong(String value) {
+        if (value != null && value.length() != 0) {
+            try {
+                return Optional.of(Long.parseLong(value));
+            } catch (NumberFormatException var2) {
+                return Optional.empty();
+            }
+        } else {
+            return Optional.empty();
+        }
+    }
+}

+ 22 - 0
src/main/java/cz/senslog/analyzer/util/StringUtils.java

@@ -0,0 +1,22 @@
+package cz.senslog.analyzer.util;
+
+public final class StringUtils {
+
+    private StringUtils() {}
+
+    public static boolean isEmpty(String string) {
+        return string == null || string.isEmpty();
+    }
+
+    public static boolean isBlank(String string) {
+        return string == null || string.replaceAll("\\s+", "").isEmpty();
+    }
+
+    public static boolean isNotEmpty(String string) {
+        return !isEmpty(string);
+    }
+
+    public static boolean isNotBlank(String string) {
+        return !isBlank(string);
+    }
+}

+ 141 - 142
src/main/java/cz/senslog/analyzer/util/TimestampUtil.java

@@ -1,142 +1,141 @@
-package cz.senslog.analyzer.util;
-
-import cz.senslog.analyzer.domain.IntervalGroup;
-import cz.senslog.analyzer.domain.Timestamp;
-import cz.senslog.common.util.Tuple;
-
-import java.time.temporal.ChronoUnit;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
-import java.util.function.Function;
-
-import static cz.senslog.analyzer.domain.Timestamp.of;
-import static cz.senslog.analyzer.util.DateUtils.isLeapYear;
-import static cz.senslog.common.util.DateTrunc.Option.*;
-import static cz.senslog.common.util.DateTrunc.trunc;
-
-public final class TimestampUtil {
-    // 28 = 2_419_200
-    // 29 = 2_505_600
-    // 30 = 2_592_000
-    // 31 = 2_678_400
-
-    private static final int [] MONTHS_BY_SECONDS = new int[] {
-            2_678_400,  /* january  */  2_505_600, /* february  */
-            2_678_400,  /* march    */  2_592_000, /* april     */
-            2_678_400,  /* may      */  2_592_000, /* june      */
-            2_678_400,  /* july     */  2_678_400, /* august    */
-            2_592_000,  /* september*/  2_678_400, /* october   */
-            2_592_000,  /* november */  2_678_400, /* december  */
-    };
-
-    private static final Map<IntervalGroup, Function<Tuple<Timestamp, Timestamp>, Integer>> _GROUP_TO_INTERVAL;
-    private static final Function<Tuple<Timestamp, Timestamp>, Integer> DEFAULT_FNC = tr -> -1;
-
-    static {
-        _GROUP_TO_INTERVAL = new HashMap<>(IntervalGroup.values().length);
-        _GROUP_TO_INTERVAL.put(IntervalGroup.HOUR, tr -> 3_600);
-        _GROUP_TO_INTERVAL.put(IntervalGroup.DAY, tr -> 86_400);
-        _GROUP_TO_INTERVAL.put(IntervalGroup.MONTH, tr -> {
-            int yearFrom = tr.getItem1().get().getYear();
-            int yearTo = tr.getItem2().get().getYear();
-            int monthFrom = tr.getItem1().get().getMonthValue();
-            int monthTo = tr.getItem2().get().getMonthValue();
-
-            int yearDiff = yearTo - yearFrom;
-
-            // over two years
-            if (yearDiff > 1) {
-                int februarySec = MONTHS_BY_SECONDS[1];
-                for (int year = yearFrom+1; year < yearTo; year++) {
-                    if (isLeapYear(year)) {
-                        return februarySec - 86_400; // 29 - 1 day
-                    }
-                }
-                if (monthFrom <= 2 && isLeapYear(yearFrom)) {
-                    return februarySec - 86_400; // 29 - 1 day
-                }
-
-                if (monthTo >= 2 && isLeapYear(yearTo)) {
-                    return februarySec - 86_400; // 29 - 1 day
-                }
-
-                return februarySec;
-            }
-
-            // over a year
-            if (yearDiff == 1) {
-                int minMonthIdx = monthFrom - 1;
-                for (int monthIdx = monthFrom; monthIdx < 12; monthIdx++) {
-                    if (MONTHS_BY_SECONDS[monthIdx] < MONTHS_BY_SECONDS[minMonthIdx]) {
-                        minMonthIdx = monthIdx;
-                    }
-                }
-                int minIntervalFirst = MONTHS_BY_SECONDS[minMonthIdx];
-                minIntervalFirst = minMonthIdx == 1 && isLeapYear(yearFrom) ? minIntervalFirst-86_400 : minIntervalFirst;
-                minMonthIdx = 0;
-
-                for (int month = 1; month < monthTo; month++) {
-                    if (MONTHS_BY_SECONDS[month] < MONTHS_BY_SECONDS[minMonthIdx]) {
-                        minMonthIdx = month;
-                    }
-                }
-                int minIntervalSecond = MONTHS_BY_SECONDS[minMonthIdx];
-                minIntervalSecond = minMonthIdx == 1 && isLeapYear(yearTo) ? minIntervalSecond-86_400 : minIntervalSecond;
-                return Math.min(minIntervalFirst, minIntervalSecond);
-            }
-
-            // the same year
-            boolean leapYear = isLeapYear(yearFrom);
-            if (2 >= monthFrom && 2 <= monthTo) {
-                int februarySec = MONTHS_BY_SECONDS[1];
-                return leapYear ? februarySec - 86_400 : februarySec;
-            }
-
-            int minInterval = MONTHS_BY_SECONDS[monthFrom - 1];
-            for (int i = monthFrom; i < monthTo; i++) {
-                if (MONTHS_BY_SECONDS[i] < minInterval) {
-                    minInterval = MONTHS_BY_SECONDS[i];
-                }
-            }
-            return minInterval;
-        });
-        _GROUP_TO_INTERVAL.put(IntervalGroup.YEAR, tr -> 31_556_926);
-    }
-
-    private TimestampUtil() {}
-
-    public static Optional<Timestamp> parseTimestamp(String value) {
-        if (value == null) { return Optional.empty(); }
-        try {
-            return Optional.of(Timestamp.parse(value));
-        } catch (NumberFormatException e) {
-            return Optional.empty();
-        }
-    }
-
-    public static int differenceByIntervalGroup(Tuple<Timestamp, Timestamp> timeRange, IntervalGroup intervalGroup) {
-        if (timeRange == null || intervalGroup == null) { return -1; }
-        return _GROUP_TO_INTERVAL.getOrDefault(intervalGroup, DEFAULT_FNC).apply(timeRange);
-    }
-
-    private static Timestamp truncByGroup(Timestamp timestamp, IntervalGroup intervalGroup, int addition) {
-        switch (intervalGroup) {
-            case HOUR:  return of(trunc(timestamp.get(), HOUR).plusHours(addition));
-            case DAY:   return of(trunc(timestamp.get(), DAY).plusDays(addition));
-            case MONTH: return of(trunc(timestamp.get(), MONTH).plusMonths(addition));
-            case YEAR:  return of(trunc(timestamp.get(), YEAR).plusYears(addition));
-            default:    return timestamp;
-        }
-    }
-
-    public static Timestamp truncByGroup(Timestamp timestamp, IntervalGroup intervalGroup) {
-        return truncByGroup(timestamp, intervalGroup, 0);
-    }
-
-    public static Tuple<Timestamp, Timestamp> truncToIntervalByGroup(Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
-        Timestamp fromTrunc = truncByGroup(from, intervalGroup, 1);
-        Timestamp toTrunc = truncByGroup(to, intervalGroup).minus(1, ChronoUnit.SECONDS);
-        return Tuple.of(fromTrunc, toTrunc);
-    }
-}
+package cz.senslog.analyzer.util;
+
+import cz.senslog.analyzer.domain.IntervalGroup;
+import cz.senslog.analyzer.domain.Timestamp;
+
+import java.time.temporal.ChronoUnit;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+
+import static cz.senslog.analyzer.domain.Timestamp.of;
+import static cz.senslog.analyzer.util.DateUtils.isLeapYear;
+import static cz.senslog.analyzer.util.DateTrunc.Option.*;
+import static cz.senslog.analyzer.util.DateTrunc.trunc;
+
+public final class TimestampUtil {
+    // 28 = 2_419_200
+    // 29 = 2_505_600
+    // 30 = 2_592_000
+    // 31 = 2_678_400
+
+    private static final int [] MONTHS_BY_SECONDS = new int[] {
+            2_678_400,  /* january  */  2_505_600, /* february  */
+            2_678_400,  /* march    */  2_592_000, /* april     */
+            2_678_400,  /* may      */  2_592_000, /* june      */
+            2_678_400,  /* july     */  2_678_400, /* august    */
+            2_592_000,  /* september*/  2_678_400, /* october   */
+            2_592_000,  /* november */  2_678_400, /* december  */
+    };
+
+    private static final Map<IntervalGroup, Function<Tuple<Timestamp, Timestamp>, Integer>> _GROUP_TO_INTERVAL;
+    private static final Function<Tuple<Timestamp, Timestamp>, Integer> DEFAULT_FNC = tr -> -1;
+
+    static {
+        _GROUP_TO_INTERVAL = new HashMap<>(IntervalGroup.values().length);
+        _GROUP_TO_INTERVAL.put(IntervalGroup.HOUR, tr -> 3_600);
+        _GROUP_TO_INTERVAL.put(IntervalGroup.DAY, tr -> 86_400);
+        _GROUP_TO_INTERVAL.put(IntervalGroup.MONTH, tr -> {
+            int yearFrom = tr.getItem1().get().getYear();
+            int yearTo = tr.getItem2().get().getYear();
+            int monthFrom = tr.getItem1().get().getMonthValue();
+            int monthTo = tr.getItem2().get().getMonthValue();
+
+            int yearDiff = yearTo - yearFrom;
+
+            // over two years
+            if (yearDiff > 1) {
+                int februarySec = MONTHS_BY_SECONDS[1];
+                for (int year = yearFrom+1; year < yearTo; year++) {
+                    if (isLeapYear(year)) {
+                        return februarySec - 86_400; // 29 - 1 day
+                    }
+                }
+                if (monthFrom <= 2 && isLeapYear(yearFrom)) {
+                    return februarySec - 86_400; // 29 - 1 day
+                }
+
+                if (monthTo >= 2 && isLeapYear(yearTo)) {
+                    return februarySec - 86_400; // 29 - 1 day
+                }
+
+                return februarySec;
+            }
+
+            // over a year
+            if (yearDiff == 1) {
+                int minMonthIdx = monthFrom - 1;
+                for (int monthIdx = monthFrom; monthIdx < 12; monthIdx++) {
+                    if (MONTHS_BY_SECONDS[monthIdx] < MONTHS_BY_SECONDS[minMonthIdx]) {
+                        minMonthIdx = monthIdx;
+                    }
+                }
+                int minIntervalFirst = MONTHS_BY_SECONDS[minMonthIdx];
+                minIntervalFirst = minMonthIdx == 1 && isLeapYear(yearFrom) ? minIntervalFirst-86_400 : minIntervalFirst;
+                minMonthIdx = 0;
+
+                for (int month = 1; month < monthTo; month++) {
+                    if (MONTHS_BY_SECONDS[month] < MONTHS_BY_SECONDS[minMonthIdx]) {
+                        minMonthIdx = month;
+                    }
+                }
+                int minIntervalSecond = MONTHS_BY_SECONDS[minMonthIdx];
+                minIntervalSecond = minMonthIdx == 1 && isLeapYear(yearTo) ? minIntervalSecond-86_400 : minIntervalSecond;
+                return Math.min(minIntervalFirst, minIntervalSecond);
+            }
+
+            // the same year
+            boolean leapYear = isLeapYear(yearFrom);
+            if (2 >= monthFrom && 2 <= monthTo) {
+                int februarySec = MONTHS_BY_SECONDS[1];
+                return leapYear ? februarySec - 86_400 : februarySec;
+            }
+
+            int minInterval = MONTHS_BY_SECONDS[monthFrom - 1];
+            for (int i = monthFrom; i < monthTo; i++) {
+                if (MONTHS_BY_SECONDS[i] < minInterval) {
+                    minInterval = MONTHS_BY_SECONDS[i];
+                }
+            }
+            return minInterval;
+        });
+        _GROUP_TO_INTERVAL.put(IntervalGroup.YEAR, tr -> 31_556_926);
+    }
+
+    private TimestampUtil() {}
+
+    public static Optional<Timestamp> parseTimestamp(String value) {
+        if (value == null) { return Optional.empty(); }
+        try {
+            return Optional.of(Timestamp.parse(value));
+        } catch (NumberFormatException e) {
+            return Optional.empty();
+        }
+    }
+
+    public static int differenceByIntervalGroup(Tuple<Timestamp, Timestamp> timeRange, IntervalGroup intervalGroup) {
+        if (timeRange == null || intervalGroup == null) { return -1; }
+        return _GROUP_TO_INTERVAL.getOrDefault(intervalGroup, DEFAULT_FNC).apply(timeRange);
+    }
+
+    private static Timestamp truncByGroup(Timestamp timestamp, IntervalGroup intervalGroup, int addition) {
+        switch (intervalGroup) {
+            case HOUR:  return of(trunc(timestamp.get(), HOUR).plusHours(addition));
+            case DAY:   return of(trunc(timestamp.get(), DAY).plusDays(addition));
+            case MONTH: return of(trunc(timestamp.get(), MONTH).plusMonths(addition));
+            case YEAR:  return of(trunc(timestamp.get(), YEAR).plusYears(addition));
+            default:    return timestamp;
+        }
+    }
+
+    public static Timestamp truncByGroup(Timestamp timestamp, IntervalGroup intervalGroup) {
+        return truncByGroup(timestamp, intervalGroup, 0);
+    }
+
+    public static Tuple<Timestamp, Timestamp> truncToIntervalByGroup(Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
+        Timestamp fromTrunc = truncByGroup(from, intervalGroup, 1);
+        Timestamp toTrunc = truncByGroup(to, intervalGroup).minus(1, ChronoUnit.SECONDS);
+        return Tuple.of(fromTrunc, toTrunc);
+    }
+}

+ 46 - 0
src/main/java/cz/senslog/analyzer/util/Triple.java

@@ -0,0 +1,46 @@
+package cz.senslog.analyzer.util;
+
+import java.util.Objects;
+
+public final class Triple<A, B, C> {
+    private final A item1;
+    private final B item2;
+    private final C item3;
+
+    public static <A, B, C> Triple<A, B, C> of(A item1, B item2, C item3) {
+        return new Triple<>(item1, item2, item3);
+    }
+
+    private Triple(A item1, B item2, C item3) {
+        this.item1 = item1;
+        this.item2 = item2;
+        this.item3 = item3;
+    }
+
+    public A getItem1() {
+        return this.item1;
+    }
+
+    public B getItem2() {
+        return this.item2;
+    }
+
+    public C getItem3() {
+        return this.item3;
+    }
+
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        } else if (o != null && this.getClass() == o.getClass()) {
+            Triple<?, ?, ?> triple = (Triple<?,?, ?>)o;
+            return this.item1.equals(triple.item1) && this.item2.equals(triple.item2) && this.item3.equals(triple.item3);
+        } else {
+            return false;
+        }
+    }
+
+    public int hashCode() {
+        return Objects.hash(this.item1, this.item2, this.item3);
+    }
+}

+ 40 - 0
src/main/java/cz/senslog/analyzer/util/Tuple.java

@@ -0,0 +1,40 @@
+package cz.senslog.analyzer.util;
+
+import java.util.Objects;
+
+public final class Tuple<A, B> {
+    private final A item1;
+    private final B item2;
+
+    public static <A, B> Tuple<A, B> of(A item1, B item2) {
+        return new Tuple<>(item1, item2);
+    }
+
+    private Tuple(A item1, B item2) {
+        this.item1 = item1;
+        this.item2 = item2;
+    }
+
+    public A getItem1() {
+        return this.item1;
+    }
+
+    public B getItem2() {
+        return this.item2;
+    }
+
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        } else if (o != null && this.getClass() == o.getClass()) {
+            Tuple<?, ?> tuple = (Tuple<?,?>)o;
+            return this.item1.equals(tuple.item1) && this.item2.equals(tuple.item2);
+        } else {
+            return false;
+        }
+    }
+
+    public int hashCode() {
+        return Objects.hash(this.item1, this.item2);
+    }
+}

+ 211 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpClient.java

@@ -0,0 +1,211 @@
+package cz.senslog.analyzer.util.http;
+
+import cz.senslog.analyzer.util.StringUtils;
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpMessage;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.TrustStrategy;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.BasicCookieStore;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.ssl.SSLContextBuilder;
+import org.apache.http.util.EntityUtils;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.security.KeyManagementException;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.http.HttpHeaders.*;
+
+/**
+ * The class {@code HttpClient} represents a wrapper for {@link org.apache.http.client.HttpClient}.
+ * Provides functionality of sending GET and POST request. Otherwise is returned response with {@see #BAD_REQUEST}.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public class HttpClient {
+
+    /** Instance of http client. */
+    private final org.apache.http.client.HttpClient client;
+    private final BasicCookieStore cookieStore;
+
+
+    /**
+     * Factory method to create a new instance of client.
+     * @return new instance of {@code HttpClient}.
+     */
+    public static HttpClient newHttpClient() {
+        return new HttpClient(HttpClientBuilder.create(), new BasicCookieStore());
+    }
+
+    public static HttpClient newHttpSSLClient() {
+        try {
+            SSLContextBuilder builder = new SSLContextBuilder();
+            builder.loadTrustMaterial(null, (TrustStrategy) (chain, authType) -> true);
+
+            SSLConnectionSocketFactory sslSF = new SSLConnectionSocketFactory(builder.build(),
+                    SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+            return new HttpClient(HttpClientBuilder.create().setSSLSocketFactory(sslSF), new BasicCookieStore());
+
+        } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) {
+            return null;
+        }
+    }
+
+    /**
+     * Private constructors sets http client.
+     */
+    private HttpClient(HttpClientBuilder httpClientBuilder, BasicCookieStore cookieStore) {
+        this.cookieStore = cookieStore;
+        this.client = httpClientBuilder.setDefaultCookieStore(cookieStore).build();
+    }
+
+    /**
+     * Sends http request.
+     * @param request - virtual request.
+     * @return virtual response.
+     */
+    public HttpResponse send(HttpRequest request) {
+        try {
+            switch (request.getMethod()) {
+                case GET:  return sendGet(request);
+                case POST: return sendPost(request);
+                default: return HttpResponse.newBuilder()
+                            .body("Request does not contain method definition.")
+                            .status(HttpCode.METHOD_NOT_ALLOWED).build();
+            }
+        } catch (URISyntaxException e) {
+            return HttpResponse.newBuilder()
+                    .body(e.getMessage()).status(HttpCode.BAD_REQUEST)
+                    .build();
+        } catch (IOException e) {
+            return  HttpResponse.newBuilder()
+                    .body(e.getMessage()).status(HttpCode.SERVER_ERROR)
+                    .build();
+        }
+    }
+
+    /**
+     * Sends GET request.
+     * @param request - virtual request.
+     * @return virtual response of the request.
+     * @throws URISyntaxException throws if host url is not valid.
+     * @throws IOException throws if anything happen during sending.
+     */
+    private HttpResponse sendGet(HttpRequest request) throws IOException, URISyntaxException {
+
+        URI uri = request.getUrl().toURI();
+        HttpGet requestGet = new HttpGet(uri);
+        setBasicHeaders(request, requestGet);
+
+        cookieStore.clear();
+        for (HttpCookie cookie : request.getCookies()) {
+            cookieStore.addCookie(cookie);
+        }
+
+        org.apache.http.HttpResponse responseGet = client.execute(requestGet);
+
+        HttpResponse response = HttpResponse.newBuilder()
+                .status(responseGet.getStatusLine().getStatusCode())
+                .headers(getHeaders(responseGet))
+                .body(getBody(responseGet.getEntity()))
+                .build();
+
+        EntityUtils.consume(responseGet.getEntity());
+
+        return response;
+    }
+
+    /**
+     * Sends POST request.
+     * @param request - virtual request.
+     * @return virtual response of the request.
+     * @throws URISyntaxException throws if host url is not valid.
+     * @throws IOException throws if anything happen during sending.
+     */
+    private HttpResponse sendPost(HttpRequest request) throws URISyntaxException, IOException {
+
+        URI uri = request.getUrl().toURI();
+        HttpPost requestPost = new HttpPost(uri);
+        setBasicHeaders(request, requestPost);
+
+        if (StringUtils.isNotBlank(request.getContentType())) {
+            requestPost.setHeader(CONTENT_TYPE, request.getContentType());
+        }
+
+        requestPost.setEntity(new StringEntity(request.getBody()));
+
+        org.apache.http.HttpResponse responsePost = client.execute(requestPost);
+
+        HttpResponse response = HttpResponse.newBuilder()
+                .headers(getHeaders(requestPost))
+                .status(responsePost.getStatusLine().getStatusCode())
+                .body(getBody(responsePost.getEntity()))
+                .build();
+
+        EntityUtils.consume(responsePost.getEntity());
+
+        return response;
+    }
+
+    /**
+     * Sets basic headers to each request.
+     * @param userRequest - virtual request.
+     * @param httpRequest - real request prepared to send.
+     */
+    private void setBasicHeaders(HttpRequest userRequest, HttpRequestBase httpRequest) {
+
+        httpRequest.setHeader(USER_AGENT, "SenslogConnector/1.0");
+        httpRequest.setHeader(CACHE_CONTROL, "no-cache");
+
+        for (Map.Entry<String, String> headerEntry : userRequest.getHeaders().entrySet()) {
+            httpRequest.setHeader(headerEntry.getKey(), headerEntry.getValue());
+        }
+    }
+
+    /**
+     * Returns map of headers from the response.
+     * @param response - response message.
+     * @return map of headers.
+     */
+    private Map<String, String> getHeaders(HttpMessage response) {
+        Map<String, String> headers = new HashMap<>();
+        for (Header header : response.getAllHeaders()) {
+            headers.put(header.getName(), header.getValue());
+        }
+        return headers;
+    }
+
+    /**
+     * Returns body from the response.
+     * @param entity - response entity.
+     * @return string body of the response.
+     * @throws IOException can not get body from the response.
+     */
+    private String getBody(HttpEntity entity) throws IOException {
+        if (entity == null) return "";
+        InputStream contentStream = entity.getContent();
+        InputStreamReader bodyStream = new InputStreamReader(contentStream);
+        BufferedReader rd = new BufferedReader(bodyStream);
+        StringBuilder bodyBuffer = new StringBuilder();
+        String line;
+        while ((line = rd.readLine()) != null) {
+            bodyBuffer.append(line);
+        }
+        return bodyBuffer.toString();
+    }
+}

+ 13 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpCode.java

@@ -0,0 +1,13 @@
+package cz.senslog.analyzer.util.http;
+
+public final class HttpCode {
+    public static final int OK = 200;
+    public static final int NO_RESULT = 204;
+    public static final int NO_CONTENT = 204;
+    public static final int BAD_REQUEST = 400;
+    public static final int UNAUTHORIZED = 401;
+    public static final int FORBIDDEN = 403;
+    public static final int NOT_FOUND = 404;
+    public static final int METHOD_NOT_ALLOWED = 405;
+    public static final int SERVER_ERROR = 500;
+}

+ 16 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpContentType.java

@@ -0,0 +1,16 @@
+package cz.senslog.analyzer.util.http;
+
+public final class HttpContentType {
+    public final static String APPLICATION_ATOM_XML         = "application/atom+xml";
+    public final static String APPLICATION_FORM_URLENCODED  = "application/x-www-form-urlencoded";
+    public final static String APPLICATION_JSON             = "application/json";
+    public final static String APPLICATION_OCTET_STREAM     = "application/octet-stream";
+    public final static String APPLICATION_SVG_XML          = "application/svg+xml";
+    public final static String APPLICATION_XHTML_XML        = "application/xhtml+xml";
+    public final static String APPLICATION_XML              = "application/xml";
+    public final static String MULTIPART_FORM_DATA          = "multipart/form-data";
+    public final static String TEXT_HTML                    = "text/html";
+    public final static String TEXT_PLAIN                   = "text/plain";
+    public final static String TEXT_XML                     = "text/xml";
+    public final static String WILDCARD                     = "*/*";
+}

+ 39 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpCookie.java

@@ -0,0 +1,39 @@
+package cz.senslog.analyzer.util.http;
+
+import org.apache.http.impl.cookie.BasicClientCookie;
+
+public class HttpCookie extends BasicClientCookie {
+
+    public static HttpCookie empty() {
+        HttpCookie cookie = new HttpCookie("", "", "", "");
+        cookie.setSecure(false);
+        return cookie;
+    }
+
+    public HttpCookie(String name, String value, String domain, String path) {
+        super(name, value);
+        super.setDomain(domain);
+        super.setPath(path);
+        super.setSecure(true);
+    }
+
+    @Override
+    public String getName() {
+        return super.getName();
+    }
+
+    @Override
+    public String getValue() {
+        return super.getValue();
+    }
+
+    @Override
+    public String getDomain() {
+        return super.getDomain();
+    }
+
+    @Override
+    public String getPath() {
+        return super.getPath();
+    }
+}

+ 10 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpHeader.java

@@ -0,0 +1,10 @@
+package cz.senslog.analyzer.util.http;
+
+import org.apache.http.HttpHeaders;
+
+public final class HttpHeader {
+    public static final String AUTHORIZATION = HttpHeaders.AUTHORIZATION;
+    public static final String DATE = HttpHeaders.DATE;
+    public static final String ACCEPT = HttpHeaders.ACCEPT;
+    public static final String CONTENT_TYPE = HttpHeaders.CONTENT_TYPE;
+}

+ 5 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpMethod.java

@@ -0,0 +1,5 @@
+package cz.senslog.analyzer.util.http;
+
+public enum  HttpMethod {
+    GET, POST
+}

+ 111 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpRequest.java

@@ -0,0 +1,111 @@
+package cz.senslog.analyzer.util.http;
+
+
+import java.net.URL;
+import java.util.Map;
+
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+
+
+/**
+ * The class {@code HttpRequest} represents a wrapper for a http request.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public class HttpRequest {
+
+    public interface Builder {
+        Builder header(String name, String value);
+        Builder url(URL url);
+        Builder POST();
+        Builder GET();
+        Builder contentType(String contentType);
+        Builder body(String body);
+        Builder addCookie(HttpCookie cookie);
+        HttpRequest build();
+    }
+
+    /**
+     * Factory method to create a new builder for {@link HttpRequest}.
+     * @return new instance of builder.
+     */
+    public static Builder newBuilder() {
+        return new HttpRequestBuilder();
+    }
+
+    /**
+     * Factory method to create a new builder for {@link HttpRequest}.
+     * @param url - host url.
+     * @return new instance of builder.
+     */
+    public static Builder newBuilder(URL url) {
+        HttpRequestBuilder builder = new HttpRequestBuilder();
+        builder.url(url);
+        return builder;
+    }
+
+    /** Request url. */
+    private final URL url;
+
+    /** Request headers. */
+    private final Map<String, String> headers;
+
+    /** Request body. */
+    private final String body;
+
+    /** Request method. */
+    private final HttpMethod method;
+
+    /** Request content type. */
+    private final String contentType;
+
+    private final HttpCookie [] cookies;
+
+    /**
+     * Constructors sets all attributes.
+     * @param url - url.
+     * @param headers - headers.
+     * @param body - body.
+     * @param method - method.
+     * @param contentType - content type.
+     */
+    HttpRequest(URL url, Map<String, String> headers, String body, HttpMethod method, String contentType, HttpCookie [] cookies) {
+        this.url = url;
+        this.headers = headers;
+        this.body = body;
+        this.method = method;
+        this.contentType = contentType;
+        this.cookies = cookies;
+    }
+
+    public URL getUrl() {
+        return url;
+    }
+
+    public String getBody() {
+        return body;
+    }
+
+    public HttpMethod getMethod() {
+        return method;
+    }
+
+    public Map<String, String> getHeaders() {
+        return headers;
+    }
+
+    public String getContentType() {
+        return contentType;
+    }
+
+    public HttpCookie[] getCookies() {
+        return cookies;
+    }
+
+    @Override
+    public String toString() {
+        return objectToJson(this);
+    }
+}

+ 80 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpRequestBuilder.java

@@ -0,0 +1,80 @@
+package cz.senslog.analyzer.util.http;
+
+
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The class {@code HttpRequestBuilder} represents a builder for the {@link HttpRequest}.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+final class HttpRequestBuilder implements HttpRequest.Builder {
+
+    private URL url;
+    private Map<String, String> headers;
+    private final List<HttpCookie> cookies;
+    private String body;
+    private HttpMethod method;
+    private String contentType;
+
+    HttpRequestBuilder() {
+        this.headers = new HashMap<>();
+        this.cookies = new ArrayList<>();
+        this.method = HttpMethod.GET;
+        this.body = "";
+    }
+
+
+    @Override
+    public HttpRequest.Builder header(String name, String value) {
+        this.headers.put(name, value);
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder url(URL url) {
+        this.url = url;
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder POST() {
+        this.method = HttpMethod.POST;
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder GET() {
+        this.method = HttpMethod.GET;
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder contentType(String contentType) {
+        this.contentType = contentType;
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder body(String body) {
+        this.body = body;
+        return this;
+    }
+
+    @Override
+    public HttpRequest.Builder addCookie(HttpCookie cookie) {
+        this.cookies.add(cookie);
+        return this;
+    }
+
+    @Override
+    public HttpRequest build() {
+        return new HttpRequest(url, headers, body, method, contentType, cookies.toArray(new HttpCookie[0]));
+    }
+}

+ 77 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpResponse.java

@@ -0,0 +1,77 @@
+package cz.senslog.analyzer.util.http;
+
+import cz.senslog.analyzer.util.json.BasicJson;
+
+import java.util.Map;
+
+/**
+ * The class {@code HttpResponse} represents a wrapper for a http response.
+ * Contains basic information like status, headers and body.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public class HttpResponse {
+
+    public interface Builder {
+        Builder body(String body);
+        Builder headers(Map<String, String> headers);
+        Builder status(int status);
+        HttpResponse build();
+    }
+
+    /**
+     * Factory method to create a new builder for {@link HttpResponse}.
+     * @return new instance of builder.
+     */
+    public static Builder newBuilder() {
+        return new HttpResponseBuilder();
+    }
+
+    /** Response body. */
+    private final String body;
+
+    /** Response headers. */
+    private final Map<String, String> headers;
+
+    /** Response status. */
+    private final int status;
+
+    /**
+     * Constructors sets all attributes.
+     * @param body - body.
+     * @param headers - headers.
+     * @param status - status.
+     */
+    HttpResponse(String body, Map<String, String> headers, int status) {
+        this.body = body;
+        this.headers = headers;
+        this.status = status;
+    }
+
+    public String getBody() {
+        return body;
+    }
+
+    public String getHeader(String value) {
+        return headers.get(value);
+    }
+
+    public int getStatus() {
+        return status;
+    }
+
+    public boolean isOk() {
+        return status == HttpCode.OK;
+    }
+
+    public boolean isError() {
+        return !isOk();
+    }
+
+    @Override
+    public String toString() {
+        return BasicJson.objectToJson(this);
+    }
+}

+ 42 - 0
src/main/java/cz/senslog/analyzer/util/http/HttpResponseBuilder.java

@@ -0,0 +1,42 @@
+package cz.senslog.analyzer.util.http;
+
+import java.util.Map;
+
+/**
+ * The class {@code HttpResponseBuilder} represents a builder for the {@link HttpResponse}.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+class HttpResponseBuilder implements HttpResponse.Builder {
+
+    private String body;
+    private Map<String, String> headers;
+    private int status;
+
+    HttpResponseBuilder(){}
+
+    @Override
+    public HttpResponse.Builder body(String body) {
+        this.body = body;
+        return this;
+    }
+
+    @Override
+    public HttpResponse.Builder headers(Map<String, String> headers) {
+        this.headers = headers;
+        return this;
+    }
+
+    @Override
+    public HttpResponse.Builder status(int status) {
+        this.status = status;
+        return this;
+    }
+
+    @Override
+    public HttpResponse build() {
+        return new HttpResponse(body, headers, status);
+    }
+}

+ 113 - 0
src/main/java/cz/senslog/analyzer/util/http/URLBuilder.java

@@ -0,0 +1,113 @@
+package cz.senslog.analyzer.util.http;
+
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import static java.net.URLEncoder.encode;
+
+/**
+ * The class {@code URLBuilder} represents a builder to create a new instance of {@link URL}.
+ * Provides a creating a url from domain and path and adding a parameter.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public final class URLBuilder {
+
+    /**
+     * Factory method to create a new instance of {@code URLBuilder} from base url.
+     * @param baseURL - host url.
+     * @return new instance of {@code URLBuilder}.
+     */
+    public static URLBuilder newBuilder(String baseURL) {
+        return new URLBuilder(baseURL);
+    }
+
+    /**
+     * Factory method to create a new instance of {@code URLBuilder} from domain and path.
+     * Normalizes domain and path to the form:
+     *
+     * domain: http://domain.com/
+     * path: /host
+     * -> url: http://domain.com/host
+     *
+     * domain: http://domain.com
+     * path: host
+     * -> url: http://domain.com/host
+     *
+     * @param domain - domain of host.
+     * @param path - path of host.
+     * @return new instance of {@code URLBuilder}.
+     */
+    public static URLBuilder newBuilder(String domain, String path) {
+        boolean domainSlash = domain.endsWith("/");
+        boolean pathSlash = path.startsWith("/");
+
+        if ((domainSlash && !pathSlash) || (!domainSlash && pathSlash)) {
+            return new URLBuilder(domain + path);
+        } else if (domainSlash) {
+            return new URLBuilder(domain + path.substring(1));
+        } else {
+            return new URLBuilder(domain + "/" + path);
+        }
+    }
+
+    /** String builder for url. */
+    private StringBuilder urlBuilder;
+
+    /** String builder for parameters. */
+    private StringBuilder paramsBuilder;
+
+    /**
+     * Private constructor initializes builders and normalizes url.
+     * If the url ends with slash '/', it is removed.
+     * @param baseURL - host url.
+     */
+    private URLBuilder(String baseURL) {
+        String url = baseURL.endsWith("/") ? baseURL.substring(0, baseURL.length() - 1) : baseURL;
+        this.urlBuilder = new StringBuilder(url);
+        this.paramsBuilder = new StringBuilder();
+    }
+
+    /**
+     * Adds a new parameter to the url.
+     * @param name - name of parameter.
+     * @param value - value of parameter.
+     * @return instance of {@code URLBuilder}.
+     */
+    public URLBuilder addParam(String name, String value) {
+        try {
+            paramsBuilder.append("&").append(name).append("=").append(encode(value, "UTF-8"));
+        } catch (UnsupportedEncodingException e) {
+            throw new AssertionError(e.getMessage());
+        }
+        return this;
+    }
+
+    /**
+     * Adds a new parameter to the url.
+     * @param name - name of parameter.
+     * @param value - value of parameter.
+     * @return instance of {@code URLBuilder}.
+     */
+    public URLBuilder addParam(String name, Object value) {
+        if (value == null) return this;
+        return addParam(name, value.toString());
+    }
+
+    /**
+     * Creates a new instance of {@link URL}.
+     * @return new instance of {@link URL}.
+     */
+    public URL build() {
+        try {
+            String params = paramsBuilder.replace(0, 1, "").toString();
+            return new URL(urlBuilder.append(params.isEmpty() ? "" : ("?" + params)).toString());
+        } catch (MalformedURLException e) {
+            throw new IllegalArgumentException(e.getMessage(), e);
+        }
+    }
+}
+

+ 213 - 0
src/main/java/cz/senslog/analyzer/util/json/BasicJson.java

@@ -0,0 +1,213 @@
+package cz.senslog.analyzer.util.json;
+
+import com.google.gson.*;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import cz.senslog.analyzer.util.Tuple;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.lang.reflect.Type;
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.ZonedDateTime;
+
+import static com.google.gson.stream.JsonToken.END_DOCUMENT;
+import static java.time.format.DateTimeFormatter.ISO_DATE_TIME;
+import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME;
+
+/**
+ * The class {@code BasicJson} represents a basic wrapper for {@link Gson} library.
+ * Provides basic converter from object to string and string to object.
+ *
+ * Configuration contains basic formatters for {@see LocalDateTime}, {@see ZonedDateTime} and {@see Class}.
+ *
+ *
+ * Both time classes are formatter to ISO format e.q. '2011-12-03T10:15:30',
+ * '2011-12-03T10:15:30+01:00' or '2011-12-03T10:15:30+01:00[Europe/Paris]'.
+ *
+ * Class is formatted as the full name of the class.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public final class BasicJson {
+
+    /** Instance of json converter. */
+    private static final Gson gson = new GsonBuilder()
+            .registerTypeAdapter(LocalDateTime.class, new LocalDateTimeAdapter())
+            .registerTypeAdapter(ZonedDateTime.class, new ZonedDateTimeAdapter())
+            .registerTypeAdapter(OffsetDateTime.class, new OffsetDateTimeAdapter())
+            .registerTypeAdapter(Class.class, new ClassAdapter())
+            .create();
+
+    /** Formatter for {@see LocalDateTime}. */
+    private static class LocalDateTimeAdapter implements JsonSerializer<LocalDateTime>, JsonDeserializer<LocalDateTime> {
+
+        @Override
+        public JsonElement serialize(LocalDateTime localDateTime, Type type, JsonSerializationContext jsonSerializationContext) {
+            return new JsonPrimitive(localDateTime.format(ISO_DATE_TIME));
+        }
+
+        @Override
+        public LocalDateTime deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
+            return LocalDateTime.parse(jsonElement.getAsString(), ISO_DATE_TIME);
+        }
+    }
+
+    /** Formatter for {@see ZonedDateTime}. */
+    private static class ZonedDateTimeAdapter implements JsonSerializer<ZonedDateTime>, JsonDeserializer<ZonedDateTime> {
+
+        @Override
+        public JsonElement serialize(ZonedDateTime zonedDateTime, Type type, JsonSerializationContext jsonSerializationContext) {
+            return new JsonPrimitive(zonedDateTime.format(ISO_DATE_TIME));
+        }
+
+        @Override
+        public ZonedDateTime deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
+            return ZonedDateTime.parse(jsonElement.getAsString(), ISO_DATE_TIME);
+        }
+    }
+
+    /** Formatter for {@see OffsetDateTime}. */
+    private static class OffsetDateTimeAdapter implements JsonSerializer<OffsetDateTime>, JsonDeserializer<OffsetDateTime> {
+
+        @Override
+        public JsonElement serialize(OffsetDateTime offsetDateTime, Type type, JsonSerializationContext jsonSerializationContext) {
+            return new JsonPrimitive(offsetDateTime.format(ISO_OFFSET_DATE_TIME));
+        }
+
+        @Override
+        public OffsetDateTime deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
+            return OffsetDateTime.parse(jsonElement.getAsString(), ISO_OFFSET_DATE_TIME);
+        }
+    }
+
+    /** Formatter for {@see Class}. */
+    private static class ClassAdapter implements JsonSerializer<Class<?>>, JsonDeserializer<Class<?>> {
+
+        @Override
+        public JsonElement serialize(Class<?> aClass, Type type, JsonSerializationContext jsonSerializationContext) {
+            return new JsonPrimitive(aClass.getName());
+        }
+
+        @Override
+        public Class<?> deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
+            try {
+                return Class.forName(jsonElement.getAsString());
+            } catch (ClassNotFoundException e) {
+                return null;
+            }
+        }
+    }
+
+    /**
+     * Deserialize json to a typed object according to class.
+     * @param jsonString - json string.
+     * @param aClass - class of the object.
+     * @param <T> - generic type object.
+     * @return new instance of the input class.
+     */
+    public static <T> T jsonToObject(String jsonString, Class<T> aClass) {
+        try {
+            return gson.fromJson(jsonString, aClass);
+        } catch (JsonSyntaxException e) {
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    /**
+     * Deserialize json to a typed object according to type.
+     * @param jsonString - json string.
+     * @param type - type of the object.
+     * @param <T> - generic type object.
+     * @return new instance of the input type.
+     */
+    public static <T> T jsonToObject(String jsonString, Type type) {
+        try {
+            return gson.fromJson(jsonString, type);
+        } catch (JsonSyntaxException e) {
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    /**
+     * Serialize object to string json.
+     * @param object - input object.
+     * @param <T> - generic type of object.
+     * @return string json.
+     */
+    public static <T> String objectToJson(T object) {
+        try {
+            return gson.toJson(object);
+        } catch (JsonSyntaxException e) {
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    @SafeVarargs
+    public static <R, E> R jsonToObject(String json, Type type, Tuple<Class<E>, FormatFunction<E>>... formatters) {
+        GsonBuilder gsonBuilder = new GsonBuilder();
+        for (Tuple<Class<E>, FormatFunction<E>> formatter : formatters) {
+            gsonBuilder.registerTypeAdapter(formatter.getItem1(), new BasicJsonDeserializer<>(formatter.getItem2()));
+        }
+        Gson gson = gsonBuilder.create();
+        return gson.fromJson(json, type);
+    }
+
+
+    /**
+     * Checks if input string is in json format.
+     * @param json - input json.
+     * @return true - valid, false - invalid.
+     */
+    public static boolean isValid(String json) {
+        return isValid(new JsonReader(new StringReader(json)));
+    }
+
+    /**
+     * Validates input json reader.
+     * @param jsonReader - input json reader.
+     * @return true - valid, false - invalid.
+     */
+    private static boolean isValid(JsonReader jsonReader) {
+        try {
+            JsonToken token;
+            loop:
+            while ( (token = jsonReader.peek()) != END_DOCUMENT && token != null ) {
+                switch ( token ) {
+                    case BEGIN_ARRAY:
+                        jsonReader.beginArray();
+                        break;
+                    case END_ARRAY:
+                        jsonReader.endArray();
+                        break;
+                    case BEGIN_OBJECT:
+                        jsonReader.beginObject();
+                        break;
+                    case END_OBJECT:
+                        jsonReader.endObject();
+                        break;
+                    case NAME:
+                        jsonReader.nextName();
+                        break;
+                    case STRING:
+                    case NUMBER:
+                    case BOOLEAN:
+                    case NULL:
+                        jsonReader.skipValue();
+                        break;
+                    case END_DOCUMENT:
+                        break loop;
+                    default:
+                        throw new AssertionError(token);
+                }
+            }
+            return true;
+        } catch (IOException ignored ) {
+            return false;
+        }
+    }
+
+}

+ 22 - 0
src/main/java/cz/senslog/analyzer/util/json/BasicJsonDeserializer.java

@@ -0,0 +1,22 @@
+package cz.senslog.analyzer.util.json;
+
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+
+import java.lang.reflect.Type;
+
+public class BasicJsonDeserializer<T> implements JsonDeserializer<T> {
+
+    private final FormatFunction<T> formatter;
+
+    public BasicJsonDeserializer(FormatFunction<T> formatter) {
+        this.formatter = formatter;
+    }
+
+    @Override
+    public T deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
+        return formatter.apply(jsonElement.getAsString());
+    }
+}

+ 6 - 0
src/main/java/cz/senslog/analyzer/util/json/FormatFunction.java

@@ -0,0 +1,6 @@
+package cz.senslog.analyzer.util.json;
+
+@FunctionalInterface
+public interface FormatFunction<T> {
+    T apply(String element);
+}

+ 139 - 0
src/main/java/cz/senslog/analyzer/util/json/JsonSchema.java

@@ -0,0 +1,139 @@
+package cz.senslog.analyzer.util.json;
+
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.ValidationException;
+import org.everit.json.schema.loader.SchemaLoader;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static java.lang.String.format;
+import static java.nio.file.Files.readAllBytes;
+
+/**
+ * The class {@code JsonSchema} represents a basic wrapper for {@link Schema}.
+ * Provides functionality of creating a new schema and validation an input json according the schema.
+ * Input json can start as a list or object.
+ *
+ * @author Lukas Cerny
+ * @version 1.0
+ * @since 1.0
+ */
+public final class JsonSchema {
+
+    /** Instance of loaded schema. */
+    private final Schema schema;
+
+    /**
+     * Loads the schema from resources.
+     * @param schemaName - name of the schema.
+     * @return new instance of {@code JsonSchema}.
+     * @throws IOException throws if the schema does not exist or can not be loaded.
+     */
+    public static JsonSchema loadAsResource(String schemaName) throws IOException {
+        InputStream inputStream = ClassLoader.getSystemResourceAsStream(schemaName);
+
+        if (inputStream == null) {
+            throw new IOException(format("Resource file %s was not found.", schemaName));
+        }
+
+        BufferedReader streamReader = new BufferedReader(new InputStreamReader(inputStream));
+        String schema = streamReader.lines().collect(Collectors.joining());
+
+        return create(schema);
+    }
+
+    /**
+     * Loads the schema from file system.
+     * @param schemaPath - path of the schema.
+     * @return new instance of {@code JsonSchema}.
+     * @throws IOException throws if the schema does not exist or can not be loaded.
+     */
+    public static JsonSchema load(Path schemaPath) throws IOException {
+        return create(new String(readAllBytes(schemaPath)));
+    }
+
+    /**
+     *  Creates and build a new schema.
+     * @param jsonSchema - string json schema.
+     * @return new instance of {@code JsonSchema}.
+     */
+    public static JsonSchema create(String jsonSchema) {
+        Schema schema = SchemaLoader.builder()
+                .schemaJson(new JSONObject(jsonSchema)).build()
+                .load().build();
+        return new JsonSchema(schema);
+    }
+
+    /**
+     * Private constructor of the class. Accessible via static method {@link JsonSchema#create(String)}.
+     * @param schema - build schema.
+     */
+    private JsonSchema(Schema schema) {
+        this.schema = schema;
+    }
+
+    /**
+     * Validates input json which starts as an object.
+     * @param json - input object json.
+     * @param errors - list of errors if json is not valid.
+     * @return true - valid, false - invalid.
+     */
+    public boolean validateJsonObject(String json, List<String> errors) {
+        try {
+            return validate(new JSONObject(json), errors);
+        } catch (JSONException e) {
+            if (errors != null) {
+                errors.clear();
+                errors.add(e.getMessage());
+            }
+            return false;
+        }
+    }
+
+    /**
+     * Validates input json which starts as an array.
+     * @param json - input array json.
+     * @param errors - list of errors if json is not valid.
+     * @return true - valid, false - invalid.
+     */
+    public boolean validateJsonArray(String json, List<String> errors) {
+        try {
+            return validate(new JSONArray(json), errors);
+        } catch (JSONException e) {
+            if (errors != null) {
+                errors.clear();
+                errors.add(e.getMessage());
+            }
+            return false;
+        }
+    }
+
+    /**
+     * Validates input json according to build schema. If input json is not valid,
+     * is thrown an exception with messages why json is not valid.
+     * @param json - input json.
+     * @param errors - list of errors if json is not valid.
+     * @return true - valid, false - invalid.
+     */
+    private boolean validate(Object json, List<String> errors) {
+        try {
+            schema.validate(json);
+            return true;
+        } catch (ValidationException e) {
+            if (errors != null) {
+                errors.clear();
+                errors.addAll(e.getAllMessages());
+            }
+            return false;
+        }
+    }
+}

+ 59 - 0
src/main/java/cz/senslog/analyzer/util/schedule/ScheduleTask.java

@@ -0,0 +1,59 @@
+package cz.senslog.analyzer.util.schedule;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+public final class ScheduleTask {
+
+    private static final int DELAY = 2;
+
+    private TaskDescription description;
+    private final Runnable task;
+    private final long period;
+
+    private ScheduledFuture<?> scheduledTask;
+
+    public ScheduleTask(String name, Runnable task, long period) {
+        this.description = new TaskDescription(name, Status.STOPPED);
+        this.task = task;
+        this.period = period;
+    }
+
+    public TaskDescription getDescription() {
+        return description;
+    }
+
+    public Runnable getTask() {
+        return task;
+    }
+
+    public long getPeriod() {
+        return period;
+    }
+
+    public boolean terminate() {
+        if (scheduledTask != null && !scheduledTask.isCancelled()) {
+            return scheduledTask.cancel(true);
+        }
+        return true;
+    }
+
+    public void schedule(ScheduledExecutorService scheduledService, CountDownLatch latch) {
+        scheduledTask = scheduledService.scheduleAtFixedRate(task, DELAY, period, SECONDS);
+        description = new TaskDescription(description.getName(), Status.RUNNING);
+        new Thread(() -> {
+            try {
+                scheduledTask.get();
+            } catch (Exception e) {
+                e.printStackTrace();
+            } finally {
+                scheduledTask.cancel(true);
+                latch.countDown();
+                description = new TaskDescription(description.getName(), Status.STOPPED);
+            }
+        }, "thread-"+description.getName()).start();
+    }
+}

+ 24 - 0
src/main/java/cz/senslog/analyzer/util/schedule/Scheduler.java

@@ -0,0 +1,24 @@
+package cz.senslog.analyzer.util.schedule;
+
+import java.util.Set;
+
+public interface Scheduler {
+
+    static SchedulerBuilder createBuilder() {
+        return new SchedulerBuilderImpl();
+    }
+
+    void start();
+    void stop();
+
+    Status getStatus();
+    Set<TaskDescription> getTaskDescriptions();
+
+    interface SchedulerBuilder {
+
+        SchedulerBuilder addTask(String name, Runnable task, long period);
+        SchedulerBuilder addTask(Runnable task, long period);
+
+        Scheduler build();
+    }
+}

+ 30 - 0
src/main/java/cz/senslog/analyzer/util/schedule/SchedulerBuilderImpl.java

@@ -0,0 +1,30 @@
+package cz.senslog.analyzer.util.schedule;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class SchedulerBuilderImpl implements Scheduler.SchedulerBuilder {
+
+    private final Set<ScheduleTask> tasks;
+
+    public SchedulerBuilderImpl() {
+        this.tasks = new HashSet<>();
+    }
+
+    @Override
+    public Scheduler.SchedulerBuilder addTask(String name, Runnable task, long period) {
+        tasks.add(new ScheduleTask(name, task, period));
+        return this;
+    }
+
+    @Override
+    public Scheduler.SchedulerBuilder addTask(Runnable task, long period) {
+        tasks.add(new ScheduleTask(task.getClass().getSimpleName(), task, period));
+        return this;
+    }
+
+    @Override
+    public Scheduler build() {
+        return new SchedulerImpl(tasks);
+    }
+}

+ 67 - 0
src/main/java/cz/senslog/analyzer/util/schedule/SchedulerImpl.java

@@ -0,0 +1,67 @@
+package cz.senslog.analyzer.util.schedule;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+public class SchedulerImpl implements Scheduler, Runnable {
+
+    private final Set<ScheduleTask> tasks;
+
+    private ScheduledExecutorService scheduler;
+    private CountDownLatch latch;
+
+    private Thread schedulerThread;
+
+    public SchedulerImpl(Set<ScheduleTask> tasks) {
+        this.tasks = tasks;
+    }
+
+    @Override
+    public void run() {
+        start();
+    }
+
+    @Override
+    public void start() {
+
+        if (!tasks.isEmpty()) {
+            scheduler = Executors.newScheduledThreadPool(tasks.size());
+            latch = new CountDownLatch(tasks.size());
+            tasks.forEach(t -> t.schedule(scheduler, latch));
+
+            try {
+                latch.await();
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+        } else {
+            // TODO no tasks
+        }
+    }
+
+    @Override
+    public void stop() {
+        if (getStatus() == Status.RUNNING) {
+            scheduler.shutdown();
+            scheduler = null;
+        }
+    }
+
+    @Override
+    public Status getStatus() {
+        boolean active = scheduler != null && !scheduler.isShutdown();
+        return active ? Status.RUNNING : Status.STOPPED;
+    }
+
+    @Override
+    public Set<TaskDescription> getTaskDescriptions() {
+        Set<TaskDescription> descriptions = new HashSet<>(tasks.size());
+        for (ScheduleTask task : tasks) {
+            descriptions.add(task.getDescription());
+        }
+        return descriptions;
+    }
+}

+ 5 - 0
src/main/java/cz/senslog/analyzer/util/schedule/Status.java

@@ -0,0 +1,5 @@
+package cz.senslog.analyzer.util.schedule;
+
+public enum Status {
+    RUNNING, PREPARED, STOPPED,
+}

+ 29 - 0
src/main/java/cz/senslog/analyzer/util/schedule/TaskDescription.java

@@ -0,0 +1,29 @@
+package cz.senslog.analyzer.util.schedule;
+
+public class TaskDescription {
+
+    private final String name;
+
+    private final Status status;
+
+    public TaskDescription(String name, Status status) {
+        this.name = name;
+        this.status = status;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public Status getStatus() {
+        return status;
+    }
+
+    @Override
+    public String toString() {
+        return "TaskDescription{" +
+                "name='" + name + '\'' +
+                ", status=" + status +
+                '}';
+    }
+}

+ 36 - 36
src/main/java/cz/senslog/analyzer/ws/handler/InfoHandler.java

@@ -1,37 +1,37 @@
-package cz.senslog.analyzer.ws.handler;
-
-import com.google.gson.JsonObject;
-import cz.senslog.analyzer.app.Application;
-import cz.senslog.analyzer.ws.vertx.AbstractRestHandler;
-import io.vertx.core.http.HttpServerResponse;
-
-import javax.inject.Inject;
-
-import static cz.senslog.common.http.HttpContentType.APPLICATION_JSON;
-import static cz.senslog.common.http.HttpHeader.CONTENT_TYPE;
-import static cz.senslog.common.json.BasicJson.objectToJson;
-
-public class InfoHandler extends AbstractRestHandler {
-
-    @Inject
-    public InfoHandler(){}
-
-    @Override
-    public void start() {
-
-        router().get().handler(ctx -> {
-            HttpServerResponse response = ctx.response();
-            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
-
-            JsonObject uptime = new JsonObject();
-            uptime.addProperty("app", Application.uptime() / 1000);
-            uptime.addProperty("jvm", Application.uptimeJVM() / 1000);
-
-            JsonObject json = new JsonObject();
-            json.addProperty("status", "ok");
-            json.add("uptime", uptime);
-
-            response.end(objectToJson(json));
-        });
-    }
+package cz.senslog.analyzer.ws.handler;
+
+import com.google.gson.JsonObject;
+import cz.senslog.analyzer.app.Application;
+import cz.senslog.analyzer.ws.vertx.AbstractRestHandler;
+import io.vertx.core.http.HttpServerResponse;
+
+import javax.inject.Inject;
+
+import static cz.senslog.analyzer.util.http.HttpContentType.APPLICATION_JSON;
+import static cz.senslog.analyzer.util.http.HttpHeader.CONTENT_TYPE;
+import static cz.senslog.analyzer.util.json.BasicJson.objectToJson;
+
+public class InfoHandler extends AbstractRestHandler {
+
+    @Inject
+    public InfoHandler(){}
+
+    @Override
+    public void start() {
+
+        router().get().handler(ctx -> {
+            HttpServerResponse response = ctx.response();
+            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
+
+            JsonObject uptime = new JsonObject();
+            uptime.addProperty("app", Application.uptime() / 1000);
+            uptime.addProperty("jvm", Application.uptimeJVM() / 1000);
+
+            JsonObject json = new JsonObject();
+            json.addProperty("status", "ok");
+            json.add("uptime", uptime);
+
+            response.end(objectToJson(json));
+        });
+    }
 }

+ 120 - 120
src/main/java/cz/senslog/analyzer/ws/handler/StatisticsHandler.java

@@ -1,121 +1,121 @@
-package cz.senslog.analyzer.ws.handler;
-
-import cz.senslog.analyzer.domain.*;
-import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
-import cz.senslog.analyzer.ws.manager.WSStatisticsManager;
-import cz.senslog.analyzer.ws.vertx.AbstractRestHandler;
-import io.vertx.core.MultiMap;
-import io.vertx.core.http.HttpServerResponse;
-import io.vertx.core.json.JsonArray;
-import io.vertx.core.json.JsonObject;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import javax.inject.Inject;
-import java.util.*;
-
-import static cz.senslog.analyzer.domain.IntervalGroup.parseIntervalGroup;
-import static cz.senslog.analyzer.util.TimestampUtil.parseTimestamp;
-import static cz.senslog.common.http.HttpContentType.APPLICATION_JSON;
-import static cz.senslog.common.http.HttpHeader.CONTENT_TYPE;
-import static cz.senslog.common.util.number.LongUtils.parseLong;
-
-public class StatisticsHandler extends AbstractRestHandler {
-
-    private static final Logger logger = LogManager.getLogger(StatisticsHandler.class);
-
-    private final WSStatisticsManager manager;
-
-    @Inject
-    public StatisticsHandler(WSStatisticsManager manager) {
-        this.manager = manager;
-    }
-
-    @Override
-    public void start() {
-
-        router().get("/analytics").handler(ctx -> {
-            logger.info("Handling '{}' with the params '{}'.", ctx.request().path(), ctx.request().params().entries());
-
-            HttpServerResponse response = ctx.response();
-            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
-            MultiMap params = ctx.request().params();
-
-            Optional<Long> unitIdOpt = parseLong(params.get("unit_id"));
-            if (!unitIdOpt.isPresent()) {
-                ctx.fail(400, new Throwable(
-                        "Parameter 'unit_id' is not at the correct format. Expected long number."
-                )); return;
-            }
-
-            Optional<Long> sensorIdOpt = parseLong(params.get("sensor_id"));
-
-            Optional<IntervalGroup> intervalGroupOpt = Optional.ofNullable(parseIntervalGroup(params.get("interval")));
-            if (!intervalGroupOpt.isPresent()) {
-                ctx.fail(400, new Throwable(
-                        "Parameter 'interval' is not at the correct format. " +
-                        "Expected one of these values: "+ Arrays.toString(IntervalGroup.values()) +"."
-                )); return;
-            }
-
-            Optional<Timestamp> fromOpt = parseTimestamp(params.get("from"));
-            if (!fromOpt.isPresent()) {
-                Timestamp now = Timestamp.now();
-                ctx.fail(400, new Throwable(
-                        "Parameter 'from' is not at the correct format. " +
-                        "Expected a date at the format 'yyyy-MM-dd HH:mm:ss+HH' or 'yyyy-MM-dd'. " +
-                        "For example: '"+now.format()+"' or '"+now.dateFormat()+"'."
-                )); return;
-            }
-
-            Optional<Timestamp> toOpt = parseTimestamp(params.get("to"));
-            if (!toOpt.isPresent()) {
-                Timestamp now = Timestamp.now();
-                ctx.fail(400, new Throwable(
-                        "Parameter 'to' is not at the correct format. " +
-                        "Expected a date at the format 'yyyy-MM-dd HH:mm:ss+HH' or 'yyyy-MM-dd'. " +
-                        "For example: '"+now.format()+"' or '"+now.dateFormat()+"'."
-                )); return;
-            }
-
-            List<SensorStatisticsData> statisticsData;
-            if (sensorIdOpt.isPresent()) {
-                statisticsData = manager.loadData(unitIdOpt.get(), sensorIdOpt.get(), fromOpt.get(), toOpt.get(), intervalGroupOpt.get());
-            } else {
-                statisticsData = manager.loadData(unitIdOpt.get(), fromOpt.get(), toOpt.get(), intervalGroupOpt.get());
-            }
-
-            if (statisticsData.isEmpty()) {
-                ctx.fail(204, new Throwable(
-                        "No data loaded according to the input parameters."
-                )); return;
-            }
-
-            JsonObject jsonSensors = new JsonObject();
-            for (SensorStatisticsData data : statisticsData) {
-                JsonObject jsonObject = new JsonObject();
-                jsonObject.put("interval", data.getInterval());
-                JsonArray dataArray = new JsonArray();
-                for (SensorStatisticsData.Data stData : data.getData()) {
-                    dataArray.add(new JsonObject()
-                            .put("min", stData.getMin())
-                            .put("max", stData.getMax())
-                            .put("avg", stData.getAvg())
-                            .put("sum", stData.getSum())
-                            .put("timestamp", stData.getTimestamp().format())
-                    );
-                }
-                jsonObject.put("data", dataArray);
-                jsonObject.put("statistics", new JsonObject()
-                        .put("min", data.getStatistics().getMin())
-                        .put("max", data.getStatistics().getMax())
-                        .put("avg", data.getStatistics().getAvg())
-                        .put("sum", data.getStatistics().getSum())
-                );
-                jsonSensors.put(String.valueOf(data.getSensorId()), jsonObject);
-            }
-
-            response.end(jsonSensors.encode());
-        });
-    }
+package cz.senslog.analyzer.ws.handler;
+
+import cz.senslog.analyzer.domain.*;
+import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
+import cz.senslog.analyzer.ws.manager.WSStatisticsManager;
+import cz.senslog.analyzer.ws.vertx.AbstractRestHandler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.json.JsonArray;
+import io.vertx.core.json.JsonObject;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.inject.Inject;
+import java.util.*;
+
+import static cz.senslog.analyzer.domain.IntervalGroup.parseIntervalGroup;
+import static cz.senslog.analyzer.util.TimestampUtil.parseTimestamp;
+import static cz.senslog.analyzer.util.http.HttpContentType.APPLICATION_JSON;
+import static cz.senslog.analyzer.util.http.HttpHeader.CONTENT_TYPE;
+import static cz.senslog.analyzer.util.LongUtils.parseLong;
+
+public class StatisticsHandler extends AbstractRestHandler {
+
+    private static final Logger logger = LogManager.getLogger(StatisticsHandler.class);
+
+    private final WSStatisticsManager manager;
+
+    @Inject
+    public StatisticsHandler(WSStatisticsManager manager) {
+        this.manager = manager;
+    }
+
+    @Override
+    public void start() {
+
+        router().get("/analytics").handler(ctx -> {
+            logger.info("Handling '{}' with the params '{}'.", ctx.request().path(), ctx.request().params().entries());
+
+            HttpServerResponse response = ctx.response();
+            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
+            MultiMap params = ctx.request().params();
+
+            Optional<Long> unitIdOpt = parseLong(params.get("unit_id"));
+            if (!unitIdOpt.isPresent()) {
+                ctx.fail(400, new Throwable(
+                        "Parameter 'unit_id' is not at the correct format. Expected long number."
+                )); return;
+            }
+
+            Optional<Long> sensorIdOpt = parseLong(params.get("sensor_id"));
+
+            Optional<IntervalGroup> intervalGroupOpt = Optional.ofNullable(parseIntervalGroup(params.get("interval")));
+            if (!intervalGroupOpt.isPresent()) {
+                ctx.fail(400, new Throwable(
+                        "Parameter 'interval' is not at the correct format. " +
+                        "Expected one of these values: "+ Arrays.toString(IntervalGroup.values()) +"."
+                )); return;
+            }
+
+            Optional<Timestamp> fromOpt = parseTimestamp(params.get("from"));
+            if (!fromOpt.isPresent()) {
+                Timestamp now = Timestamp.now();
+                ctx.fail(400, new Throwable(
+                        "Parameter 'from' is not at the correct format. " +
+                        "Expected a date at the format 'yyyy-MM-dd HH:mm:ss+HH' or 'yyyy-MM-dd'. " +
+                        "For example: '"+now.format()+"' or '"+now.dateFormat()+"'."
+                )); return;
+            }
+
+            Optional<Timestamp> toOpt = parseTimestamp(params.get("to"));
+            if (!toOpt.isPresent()) {
+                Timestamp now = Timestamp.now();
+                ctx.fail(400, new Throwable(
+                        "Parameter 'to' is not at the correct format. " +
+                        "Expected a date at the format 'yyyy-MM-dd HH:mm:ss+HH' or 'yyyy-MM-dd'. " +
+                        "For example: '"+now.format()+"' or '"+now.dateFormat()+"'."
+                )); return;
+            }
+
+            List<SensorStatisticsData> statisticsData;
+            if (sensorIdOpt.isPresent()) {
+                statisticsData = manager.loadData(unitIdOpt.get(), sensorIdOpt.get(), fromOpt.get(), toOpt.get(), intervalGroupOpt.get());
+            } else {
+                statisticsData = manager.loadData(unitIdOpt.get(), fromOpt.get(), toOpt.get(), intervalGroupOpt.get());
+            }
+
+            if (statisticsData.isEmpty()) {
+                ctx.fail(204, new Throwable(
+                        "No data loaded according to the input parameters."
+                )); return;
+            }
+
+            JsonObject jsonSensors = new JsonObject();
+            for (SensorStatisticsData data : statisticsData) {
+                JsonObject jsonObject = new JsonObject();
+                jsonObject.put("interval", data.getInterval());
+                JsonArray dataArray = new JsonArray();
+                for (SensorStatisticsData.Data stData : data.getData()) {
+                    dataArray.add(new JsonObject()
+                            .put("min", stData.getMin())
+                            .put("max", stData.getMax())
+                            .put("avg", stData.getAvg())
+                            .put("sum", stData.getSum())
+                            .put("timestamp", stData.getTimestamp().format())
+                    );
+                }
+                jsonObject.put("data", dataArray);
+                jsonObject.put("statistics", new JsonObject()
+                        .put("min", data.getStatistics().getMin())
+                        .put("max", data.getStatistics().getMax())
+                        .put("avg", data.getStatistics().getAvg())
+                        .put("sum", data.getStatistics().getSum())
+                );
+                jsonSensors.put(String.valueOf(data.getSensorId()), jsonObject);
+            }
+
+            response.end(jsonSensors.encode());
+        });
+    }
 }

+ 110 - 110
src/main/java/cz/senslog/analyzer/ws/manager/WSStatisticsManager.java

@@ -1,110 +1,110 @@
-package cz.senslog.analyzer.ws.manager;
-
-import cz.senslog.analyzer.domain.DoubleStatistics;
-import cz.senslog.analyzer.domain.Group;
-import cz.senslog.analyzer.domain.IntervalGroup;
-import cz.senslog.analyzer.domain.Timestamp;
-import cz.senslog.analyzer.provider.ScheduledDatabaseProvider;
-import cz.senslog.analyzer.storage.permanent.repository.StatisticsConfigRepository;
-import cz.senslog.analyzer.storage.permanent.repository.StatisticsRepository;
-import cz.senslog.analyzer.util.TimestampUtil;
-import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
-import cz.senslog.common.util.Tuple;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import javax.inject.Inject;
-import java.util.*;
-
-import static cz.senslog.analyzer.domain.DoubleStatistics.init;
-import static cz.senslog.analyzer.util.TimestampUtil.truncByGroup;
-import static java.util.Collections.emptyList;
-import static java.util.Collections.singletonList;
-
-public class WSStatisticsManager {
-
-    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
-
-    private final StatisticsRepository statisticsRepository;
-    private final StatisticsConfigRepository configRepository;
-
-    @Inject
-    public WSStatisticsManager(StatisticsRepository statisticsRepository, StatisticsConfigRepository configRepository) {
-        this.statisticsRepository = statisticsRepository;
-        this.configRepository = configRepository;
-    }
-
-    private static Tuple<Timestamp, Timestamp> createIntervalByIntervalGroup(Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
-        Tuple<Timestamp, Timestamp> interval = TimestampUtil.truncToIntervalByGroup(from, to, intervalGroup);
-
-        if (interval.getItem1().isAfter(interval.getItem2())) {
-            throw new IllegalArgumentException("The input interval is too short according to the configuration. " +
-                    "The 'from' parameter was truncated to '"+interval.getItem1().format()+"' and " +
-                    "the 'to' parameter was truncated to '"+interval.getItem2().format()+"'.");
-        }
-
-        return interval;
-    }
-
-    private static Tuple<DoubleStatistics, Collection<DoubleStatistics>> aggregateStatistics(List<DoubleStatistics> statistics, IntervalGroup intervalGroup) {
-
-        if (statistics == null || statistics.isEmpty()) {
-            return null;
-        }
-
-        DoubleStatistics firstSt = statistics.get(0);
-        Group group = firstSt.getSource();
-        DoubleStatistics aggrSt = init(firstSt.getSource(), firstSt.getTimestamp());
-        Map<Timestamp, DoubleStatistics> intervalsSt = new HashMap<>();
-        for (DoubleStatistics st : statistics) {
-            aggrSt.accept(st);
-            Timestamp tmTrunc = truncByGroup(st.getTimestamp(), intervalGroup);
-            intervalsSt.computeIfAbsent(tmTrunc, tm -> init(group, tm)).accept(st);
-        }
-
-        return Tuple.of(aggrSt, intervalsSt.values());
-    }
-
-    private static List<SensorStatisticsData> mapToStatisticsData(long sensorId, long intervalSec, Tuple<DoubleStatistics, Collection<DoubleStatistics>> statistics) {
-        if (statistics == null) { return emptyList(); }
-
-        DoubleStatistics sumSt = statistics.getItem1();
-        Collection<DoubleStatistics> aggrSt = statistics.getItem2();
-        List<SensorStatisticsData.Data> statisticsData = new ArrayList<>(aggrSt.size());
-        for (DoubleStatistics st : aggrSt) {
-            statisticsData.add(new SensorStatisticsData.Data(st));
-        }
-
-        statisticsData.sort(Comparator.comparing(SensorStatisticsData.Data::getTimestamp));
-        SensorStatisticsData.Data aggregated = new SensorStatisticsData.Data(sumSt);
-        return singletonList(new SensorStatisticsData(sensorId, intervalSec, statisticsData, aggregated));
-    }
-
-    public List<SensorStatisticsData> loadData(long unitId, long sensorId, Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
-        Tuple<Timestamp, Timestamp> timeRange = createIntervalByIntervalGroup(from, to, intervalGroup);
-        long intervalSec = TimestampUtil.differenceByIntervalGroup(timeRange, intervalGroup);
-
-        long groupId = configRepository.special().getGroupIdByUnitSensor(unitId, sensorId, intervalSec);
-        if (groupId <= 0) {
-            throw new IllegalArgumentException(String.format(
-                    "None group for the combination of unit_id '%d' and sensor_id '%d' and interval %d seconds.", unitId, sensorId, intervalSec
-            ));
-        }
-
-        List<DoubleStatistics> statistics = statisticsRepository.getByTimeRange(groupId, timeRange);
-        return mapToStatisticsData(sensorId, intervalSec, aggregateStatistics(statistics, intervalGroup));
-    }
-
-    public List<SensorStatisticsData> loadData(long unitId, Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
-        Tuple<Timestamp, Timestamp> timeRange = createIntervalByIntervalGroup(from, to, intervalGroup);
-        long intervalSec = TimestampUtil.differenceByIntervalGroup(timeRange, intervalGroup);
-        List<Tuple<Long, Long>> groupsBySensor = configRepository.special().getGroupsByUnit(unitId, intervalSec);
-        List<SensorStatisticsData> statisticsDataList = new ArrayList<>(groupsBySensor.size());
-        for (Tuple<Long, Long> tuple : groupsBySensor) {
-            long groupId = tuple.getItem1(); long sensorId = tuple.getItem2();
-            List<DoubleStatistics> statistics = statisticsRepository.getByTimeRange(groupId, timeRange);
-            statisticsDataList.addAll(mapToStatisticsData(sensorId, intervalSec, aggregateStatistics(statistics, intervalGroup)));
-        }
-        return statisticsDataList;
-    }
-}
+package cz.senslog.analyzer.ws.manager;
+
+import cz.senslog.analyzer.domain.DoubleStatistics;
+import cz.senslog.analyzer.domain.Group;
+import cz.senslog.analyzer.domain.IntervalGroup;
+import cz.senslog.analyzer.domain.Timestamp;
+import cz.senslog.analyzer.provider.ScheduledDatabaseProvider;
+import cz.senslog.analyzer.storage.permanent.repository.StatisticsConfigRepository;
+import cz.senslog.analyzer.storage.permanent.repository.StatisticsRepository;
+import cz.senslog.analyzer.util.TimestampUtil;
+import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
+import cz.senslog.analyzer.util.Tuple;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.inject.Inject;
+import java.util.*;
+
+import static cz.senslog.analyzer.domain.DoubleStatistics.init;
+import static cz.senslog.analyzer.util.TimestampUtil.truncByGroup;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.singletonList;
+
+public class WSStatisticsManager {
+
+    private static final Logger logger = LogManager.getLogger(ScheduledDatabaseProvider.class);
+
+    private final StatisticsRepository statisticsRepository;
+    private final StatisticsConfigRepository configRepository;
+
+    @Inject
+    public WSStatisticsManager(StatisticsRepository statisticsRepository, StatisticsConfigRepository configRepository) {
+        this.statisticsRepository = statisticsRepository;
+        this.configRepository = configRepository;
+    }
+
+    private static Tuple<Timestamp, Timestamp> createIntervalByIntervalGroup(Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
+        Tuple<Timestamp, Timestamp> interval = TimestampUtil.truncToIntervalByGroup(from, to, intervalGroup);
+
+        if (interval.getItem1().isAfter(interval.getItem2())) {
+            throw new IllegalArgumentException("The input interval is too short according to the configuration. " +
+                    "The 'from' parameter was truncated to '"+interval.getItem1().format()+"' and " +
+                    "the 'to' parameter was truncated to '"+interval.getItem2().format()+"'.");
+        }
+
+        return interval;
+    }
+
+    private static Tuple<DoubleStatistics, Collection<DoubleStatistics>> aggregateStatistics(List<DoubleStatistics> statistics, IntervalGroup intervalGroup) {
+
+        if (statistics == null || statistics.isEmpty()) {
+            return null;
+        }
+
+        DoubleStatistics firstSt = statistics.get(0);
+        Group group = firstSt.getSource();
+        DoubleStatistics aggrSt = init(firstSt.getSource(), firstSt.getTimestamp());
+        Map<Timestamp, DoubleStatistics> intervalsSt = new HashMap<>();
+        for (DoubleStatistics st : statistics) {
+            aggrSt.accept(st);
+            Timestamp tmTrunc = truncByGroup(st.getTimestamp(), intervalGroup);
+            intervalsSt.computeIfAbsent(tmTrunc, tm -> init(group, tm)).accept(st);
+        }
+
+        return Tuple.of(aggrSt, intervalsSt.values());
+    }
+
+    private static List<SensorStatisticsData> mapToStatisticsData(long sensorId, long intervalSec, Tuple<DoubleStatistics, Collection<DoubleStatistics>> statistics) {
+        if (statistics == null) { return emptyList(); }
+
+        DoubleStatistics sumSt = statistics.getItem1();
+        Collection<DoubleStatistics> aggrSt = statistics.getItem2();
+        List<SensorStatisticsData.Data> statisticsData = new ArrayList<>(aggrSt.size());
+        for (DoubleStatistics st : aggrSt) {
+            statisticsData.add(new SensorStatisticsData.Data(st));
+        }
+
+        statisticsData.sort(Comparator.comparing(SensorStatisticsData.Data::getTimestamp));
+        SensorStatisticsData.Data aggregated = new SensorStatisticsData.Data(sumSt);
+        return singletonList(new SensorStatisticsData(sensorId, intervalSec, statisticsData, aggregated));
+    }
+
+    public List<SensorStatisticsData> loadData(long unitId, long sensorId, Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
+        Tuple<Timestamp, Timestamp> timeRange = createIntervalByIntervalGroup(from, to, intervalGroup);
+        long intervalSec = TimestampUtil.differenceByIntervalGroup(timeRange, intervalGroup);
+
+        long groupId = configRepository.special().getGroupIdByUnitSensor(unitId, sensorId, intervalSec);
+        if (groupId <= 0) {
+            throw new IllegalArgumentException(String.format(
+                    "None group for the combination of unit_id '%d' and sensor_id '%d' and interval %d seconds.", unitId, sensorId, intervalSec
+            ));
+        }
+
+        List<DoubleStatistics> statistics = statisticsRepository.getByTimeRange(groupId, timeRange);
+        return mapToStatisticsData(sensorId, intervalSec, aggregateStatistics(statistics, intervalGroup));
+    }
+
+    public List<SensorStatisticsData> loadData(long unitId, Timestamp from, Timestamp to, IntervalGroup intervalGroup) {
+        Tuple<Timestamp, Timestamp> timeRange = createIntervalByIntervalGroup(from, to, intervalGroup);
+        long intervalSec = TimestampUtil.differenceByIntervalGroup(timeRange, intervalGroup);
+        List<Tuple<Long, Long>> groupsBySensor = configRepository.special().getGroupsByUnit(unitId, intervalSec);
+        List<SensorStatisticsData> statisticsDataList = new ArrayList<>(groupsBySensor.size());
+        for (Tuple<Long, Long> tuple : groupsBySensor) {
+            long groupId = tuple.getItem1(); long sensorId = tuple.getItem2();
+            List<DoubleStatistics> statistics = statisticsRepository.getByTimeRange(groupId, timeRange);
+            statisticsDataList.addAll(mapToStatisticsData(sensorId, intervalSec, aggregateStatistics(statistics, intervalGroup)));
+        }
+        return statisticsDataList;
+    }
+}

+ 101 - 95
src/main/java/cz/senslog/analyzer/ws/vertx/VertxServer.java

@@ -1,96 +1,102 @@
-package cz.senslog.analyzer.ws.vertx;
-
-import cz.senslog.analyzer.ws.Server;
-import cz.senslog.analyzer.ws.handler.GroupsHandler;
-import cz.senslog.analyzer.ws.handler.InfoHandler;
-import cz.senslog.analyzer.ws.handler.StatisticsHandler;
-import io.vertx.core.*;
-import io.vertx.core.http.HttpServerResponse;
-import io.vertx.core.json.JsonObject;
-import io.vertx.ext.web.Router;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import javax.inject.Inject;
-import java.util.HashMap;
-import java.util.Map;
-
-import static cz.senslog.common.http.HttpContentType.APPLICATION_JSON;
-import static cz.senslog.common.http.HttpHeader.CONTENT_TYPE;
-
-
-public class VertxServer extends AbstractVerticle implements Server {
-
-    private static final Logger logger = LogManager.getLogger(VertxServer.class);
-
-    private final Map<String, AbstractRestHandler> restHandlers;
-
-    @Inject
-    public VertxServer(
-            InfoHandler infoHandler,
-            StatisticsHandler statisticsHandler,
-            GroupsHandler groupsHandler
-    ) {
-        this.restHandlers = new HashMap<>();
-
-        registerRestHandler("info", infoHandler);
-        registerRestHandler("analytics", statisticsHandler);
-        registerRestHandler("groups", groupsHandler);
-    }
-
-    private void registerRestHandler(String id, AbstractRestHandler handler) {
-        id = id.charAt(0) == '/' ? id : "/" + id;
-        restHandlers.put(id, handler);
-    }
-
-    @Override
-    public void start(final Promise<Void> promise) {
-
-        Router router = Router.router(vertx);
-        for (Map.Entry<String, AbstractRestHandler> handlerEntry : restHandlers.entrySet()) {
-            router.mountSubRouter(handlerEntry.getKey(), handlerEntry.getValue().start(vertx));
-        }
-
-        router.route().failureHandler(ctx -> {
-            logger.catching(ctx.failure());
-            HttpServerResponse response = ctx.response();
-            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
-            JsonObject error = new JsonObject()
-                    .put("timestamp", System.nanoTime())
-                    .put("message", ctx.failure().getMessage())
-                    .put("path", ctx.request().path());
-            int code = ctx.statusCode() > 0 ? ctx.statusCode() : 400;
-            response.setStatusCode(code).end(error.encode());
-        });
-
-        vertx.createHttpServer()
-                .requestHandler(router)
-                .listen(config().getInteger("http.server.port"), result -> {
-                    if (result.succeeded()) { promise.complete(); }
-                    else { promise.fail(result.cause()); }
-                });
-    }
-
-    @Override
-    public void stop() {
-        this.vertx.close();
-    }
-
-    @Override
-    public void start(int port) {
-        vertx = Vertx.vertx();
-
-        DeploymentOptions options = new DeploymentOptions().setConfig(new JsonObject()
-                .put("http.server.port", port)
-        );
-
-        vertx.deployVerticle(this, options, res -> {
-            if (res.succeeded()) {
-                logger.info("Deployment id is {} ", res.result());
-                logger.info("The HTTP server running on port {}.", port);
-            } else {
-                logger.error("Could not start the HTTP server: " + res.cause());
-            }
-        });
-    }
+package cz.senslog.analyzer.ws.vertx;
+
+import cz.senslog.analyzer.ws.Server;
+import cz.senslog.analyzer.ws.handler.GroupsHandler;
+import cz.senslog.analyzer.ws.handler.InfoHandler;
+import cz.senslog.analyzer.ws.handler.StatisticsHandler;
+import io.vertx.core.*;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.json.JsonObject;
+import io.vertx.ext.web.Router;
+import io.vertx.ext.web.handler.CorsHandler;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import javax.inject.Inject;
+import java.util.HashMap;
+import java.util.Map;
+
+import static cz.senslog.analyzer.util.http.HttpContentType.APPLICATION_JSON;
+import static cz.senslog.analyzer.util.http.HttpHeader.CONTENT_TYPE;
+
+
+public class VertxServer extends AbstractVerticle implements Server {
+
+    private static final Logger logger = LogManager.getLogger(VertxServer.class);
+
+    private final Map<String, AbstractRestHandler> restHandlers;
+
+    @Inject
+    public VertxServer(
+            InfoHandler infoHandler,
+            StatisticsHandler statisticsHandler,
+            GroupsHandler groupsHandler
+    ) {
+        this.restHandlers = new HashMap<>();
+
+        registerRestHandler("info", infoHandler);
+        registerRestHandler("analytics", statisticsHandler);
+        registerRestHandler("groups", groupsHandler);
+    }
+
+    private void registerRestHandler(String id, AbstractRestHandler handler) {
+        id = id.charAt(0) == '/' ? id : "/" + id;
+        restHandlers.put(id, handler);
+    }
+
+    @Override
+    public void start(final Promise<Void> promise) {
+
+        Router router = Router.router(vertx);
+        for (Map.Entry<String, AbstractRestHandler> handlerEntry : restHandlers.entrySet()) {
+            router.mountSubRouter(handlerEntry.getKey(), handlerEntry.getValue().start(vertx));
+        }
+
+        router.route().handler(CorsHandler.create()
+                .allowedMethod(HttpMethod.GET)
+        );
+
+        router.route().failureHandler(ctx -> {
+            logger.catching(ctx.failure());
+            HttpServerResponse response = ctx.response();
+            response.putHeader(CONTENT_TYPE, APPLICATION_JSON);
+            JsonObject error = new JsonObject()
+                    .put("timestamp", System.nanoTime())
+                    .put("message", ctx.failure().getMessage())
+                    .put("path", ctx.request().path());
+            int code = ctx.statusCode() > 0 ? ctx.statusCode() : 400;
+            response.setStatusCode(code).end(error.encode());
+        });
+
+        vertx.createHttpServer()
+                .requestHandler(router)
+                .listen(config().getInteger("http.server.port"), result -> {
+                    if (result.succeeded()) { promise.complete(); }
+                    else { promise.fail(result.cause()); }
+                });
+    }
+
+    @Override
+    public void stop() {
+        this.vertx.close();
+    }
+
+    @Override
+    public void start(int port) {
+        vertx = Vertx.vertx();
+
+        DeploymentOptions options = new DeploymentOptions().setConfig(new JsonObject()
+                .put("http.server.port", port)
+        );
+
+        vertx.deployVerticle(this, options, res -> {
+            if (res.succeeded()) {
+                logger.info("Deployment id is {} ", res.result());
+                logger.info("The HTTP server running on port {}.", port);
+            } else {
+                logger.error("Could not start the HTTP server: " + res.cause());
+            }
+        });
+    }
 }

+ 142 - 142
src/test/java/cz/senslog/analyzer/provider/task/ObservationAnalyzerTaskTest.java

@@ -1,143 +1,143 @@
-package cz.senslog.analyzer.provider.task;
-
-import cz.senslog.analyzer.analysis.Analyzer;
-import cz.senslog.analyzer.domain.Observation;
-import cz.senslog.analyzer.domain.Sensor;
-import cz.senslog.analyzer.domain.Timestamp;
-import cz.senslog.analyzer.provider.AnalyzerTask;
-import cz.senslog.analyzer.storage.Connection;
-import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
-import cz.senslog.analyzer.storage.inmemory.repository.TimestampRepository;
-import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
-import org.jdbi.v3.core.Jdbi;
-import org.junit.jupiter.api.Test;
-
-import java.time.LocalDateTime;
-import java.time.OffsetDateTime;
-import java.time.ZoneOffset;
-import java.util.Arrays;
-
-import static cz.senslog.analyzer.domain.TimestampType.*;
-import static java.time.temporal.ChronoUnit.SECONDS;
-import static org.junit.jupiter.api.Assertions.*;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.*;
-
-class ObservationAnalyzerTaskTest {
-
-    @Test
-    void loadData_dataNotAccepted_true() {
-
-        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
-        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
-
-        Analyzer<Observation> analyzer = mock(Analyzer.class);
-        SensLogRepository repository = mock(SensLogRepository.class);
-        when(repository.getObservationsFromTime(Timestamp.of(startTime), true, 100))
-                .then(answer -> {
-                    Timestamp timestamp = answer.getArgument(0);
-                    return Arrays.asList(
-                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
-                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
-                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
-                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
-                    );
-                });
-
-        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_dataNotAccepted_true;DB_CLOSE_DELAY=-1"));
-        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
-
-
-        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
-
-        task.run();
-        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-
-        task.run();
-        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-    }
-
-    @Test
-    void loadData_allDataAccepted_true() {
-
-        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
-        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
-
-        Analyzer<Observation> analyzer = mock(Analyzer.class);
-        SensLogRepository repository = mock(SensLogRepository.class);
-
-        doAnswer(answer -> {
-            Timestamp timestamp = answer.getArgument(0);
-            return Arrays.asList(
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
-            );
-        }).when(repository).getObservationsFromTime(any(Timestamp.class), true, 100);
-
-        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_allDataAccepted_true;DB_CLOSE_DELAY=-1"));
-        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
-        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
-
-        Timestamp startTimestamp;
-
-        // 1. run
-        startTimestamp = Timestamp.of(startTime);
-        task.run();
-        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-
-        // simulate analyzer
-        storage.update(startTimestamp.plus(2, SECONDS), LAST_COMMITTED_INCLUSIVE);
-
-        // 2. run
-        startTimestamp = startTimestamp.plus(3, SECONDS);
-        task.run();
-        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-    }
-
-    @Test
-    void loadData_processHalted_true() {
-
-        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
-        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
-
-        Analyzer<Observation> analyzer = mock(Analyzer.class);
-        SensLogRepository repository = mock(SensLogRepository.class);
-
-        doAnswer(answer -> {
-            Timestamp timestamp = answer.getArgument(0);
-            return Arrays.asList(
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
-                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
-            );
-        }).when(repository).getObservationsFromTime(any(Timestamp.class), true, 100);
-
-        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_processHalted_true;DB_CLOSE_DELAY=-1"));
-        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
-        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
-
-        Timestamp startTimestamp;
-
-        // 1. run
-        task.run();
-        startTimestamp = Timestamp.of(startTime);
-        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-
-        // simulate analyzer
-        // data was accepted but not persisted to the database
-
-        // 2. run
-        task.run();
-        startTimestamp = startTimestamp.plus(0, SECONDS);
-        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
-        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
-    }
+package cz.senslog.analyzer.provider.task;
+
+import cz.senslog.analyzer.analysis.Analyzer;
+import cz.senslog.analyzer.domain.Observation;
+import cz.senslog.analyzer.domain.Sensor;
+import cz.senslog.analyzer.domain.Timestamp;
+import cz.senslog.analyzer.provider.AnalyzerTask;
+import cz.senslog.analyzer.storage.Connection;
+import cz.senslog.analyzer.storage.inmemory.TimestampStorage;
+import cz.senslog.analyzer.storage.inmemory.repository.TimestampRepository;
+import cz.senslog.analyzer.storage.permanent.repository.SensLogRepository;
+import org.jdbi.v3.core.Jdbi;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.Arrays;
+
+import static cz.senslog.analyzer.domain.TimestampType.*;
+import static java.time.temporal.ChronoUnit.SECONDS;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.*;
+
+class ObservationAnalyzerTaskTest {
+
+    @Test
+    void loadData_dataNotAccepted_true() {
+
+        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
+        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
+
+        Analyzer<Observation> analyzer = mock(Analyzer.class);
+        SensLogRepository repository = mock(SensLogRepository.class);
+        when(repository.getObservationsFromTime(Timestamp.of(startTime), true, 100))
+                .then(answer -> {
+                    Timestamp timestamp = answer.getArgument(0);
+                    return Arrays.asList(
+                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
+                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
+                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
+                            new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
+                    );
+                });
+
+        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_dataNotAccepted_true;DB_CLOSE_DELAY=-1"));
+        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
+
+
+        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
+
+        task.run();
+        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+
+        task.run();
+        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+    }
+
+    @Test
+    void loadData_allDataAccepted_true() {
+
+        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
+        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
+
+        Analyzer<Observation> analyzer = mock(Analyzer.class);
+        SensLogRepository repository = mock(SensLogRepository.class);
+
+        doAnswer(answer -> {
+            Timestamp timestamp = answer.getArgument(0);
+            return Arrays.asList(
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
+            );
+        }).when(repository).getObservationsFromTime(any(Timestamp.class), true, 100);
+
+        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_allDataAccepted_true;DB_CLOSE_DELAY=-1"));
+        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
+        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
+
+        Timestamp startTimestamp;
+
+        // 1. run
+        startTimestamp = Timestamp.of(startTime);
+        task.run();
+        assertEquals(startTime.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTime.plusSeconds(2).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+
+        // simulate analyzer
+        storage.update(startTimestamp.plus(2, SECONDS), LAST_COMMITTED_INCLUSIVE);
+
+        // 2. run
+        startTimestamp = startTimestamp.plus(3, SECONDS);
+        task.run();
+        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+    }
+
+    @Test
+    void loadData_processHalted_true() {
+
+        LocalDateTime time = LocalDateTime.of(2020, 1, 1, 0, 0);
+        OffsetDateTime startTime = OffsetDateTime.of(time, ZoneOffset.UTC);
+
+        Analyzer<Observation> analyzer = mock(Analyzer.class);
+        SensLogRepository repository = mock(SensLogRepository.class);
+
+        doAnswer(answer -> {
+            Timestamp timestamp = answer.getArgument(0);
+            return Arrays.asList(
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(0, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(1, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(2, SECONDS)),
+                    new Observation(new Sensor(123, 456), 1.0, timestamp.plus(3, SECONDS))
+            );
+        }).when(repository).getObservationsFromTime(any(Timestamp.class), true, 100);
+
+        Connection<Jdbi> connection = new Connection<>(Jdbi.create("jdbc:h2:mem:loadData_processHalted_true;DB_CLOSE_DELAY=-1"));
+        TimestampStorage storage = TimestampStorage.createContext(new TimestampRepository(connection));
+        AnalyzerTask<Observation> task = new ObservationAnalyzerTask(analyzer, storage, repository, startTime);
+
+        Timestamp startTimestamp;
+
+        // 1. run
+        task.run();
+        startTimestamp = Timestamp.of(startTime);
+        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+
+        // simulate analyzer
+        // data was accepted but not persisted to the database
+
+        // 2. run
+        task.run();
+        startTimestamp = startTimestamp.plus(0, SECONDS);
+        assertEquals(startTimestamp.toInstant(), storage.get(FIRST_PROVIDED_INCLUSIVE).toInstant());
+        assertEquals(startTimestamp.plus(2, SECONDS).toInstant(), storage.get(LAST_PROVIDED_EXCLUSIVE).toInstant());
+    }
 }

+ 121 - 122
src/test/java/cz/senslog/analyzer/util/TimestampUtilTest.java

@@ -1,123 +1,122 @@
-package cz.senslog.analyzer.util;
-
-import cz.senslog.analyzer.domain.IntervalGroup;
-import cz.senslog.analyzer.domain.Timestamp;
-import cz.senslog.common.util.TimeRange;
-import cz.senslog.common.util.Tuple;
-import org.junit.jupiter.api.Test;
-
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.OffsetDateTime;
-import java.time.ZoneOffset;
-
-import static cz.senslog.analyzer.domain.Timestamp.of;
-import static java.time.ZoneOffset.UTC;
-import static org.junit.jupiter.api.Assertions.*;
-
-class TimestampUtilTest {
-
-    @Test
-    void truncToIntervalByGroup_HOUR_equals_true() {
-
-        LocalDateTime fromLocal = LocalDateTime.of(2020, 1, 30, 14, 30, 15);
-        LocalDateTime toLocal = LocalDateTime.of(2020, 1, 30, 15, 30, 15);
-
-        Timestamp from = of(OffsetDateTime.of(fromLocal, ZoneOffset.UTC));
-        Timestamp to = of(OffsetDateTime.of(toLocal, ZoneOffset.UTC));
-
-        Tuple<Timestamp, Timestamp> interval = TimestampUtil.truncToIntervalByGroup(from, to, IntervalGroup.HOUR);
-
-        assertNull(interval);
-    }
-
-    @Test
-    void diff_1_MONTH_true() {
-        LocalDateTime from = LocalDateTime.of(2020, 1, 1, 0, 0);
-        LocalDateTime to = from.plusMonths(1).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_678_400, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_5_MONTH_true() {
-        LocalDateTime from = LocalDateTime.of(2020, 3, 1, 0, 0);
-        LocalDateTime to = from.plusMonths(10).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_592_000, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_10_MONTH_leapYear_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
-        LocalDateTime to = from.plusMonths(10).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_15_MONTH_leapYear_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
-        LocalDateTime to = from.plusMonths(15).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_15_MONTH_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 3, 1, 0, 0);
-        LocalDateTime to = from.plusMonths(15).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_505_600, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_2_YEARS_2_MONTHS_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
-        LocalDateTime to = LocalDateTime.of(2001, 2, 1, 0, 0).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_678_400, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_3_YEARS_noLeapYear_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
-        LocalDateTime to = LocalDateTime.of(2003, 2, 1, 0, 0).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_505_600, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_4_YEARS_LastYEAR_leapYear_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
-        LocalDateTime to = LocalDateTime.of(2004, 5, 1, 0, 0).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_3_YEARS_FirstYEAR_leapYear_true() {
-        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
-        LocalDateTime to = from.plusYears(3).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
-
-    @Test
-    void diff_3_YEARS_MiddleYEAR_leapYear_true() {
-        LocalDateTime from = LocalDateTime.of(1999, 12, 1, 0, 0);
-        LocalDateTime to = from.plusYears(3).minusSeconds(1);
-        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
-
-        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
-    }
+package cz.senslog.analyzer.util;
+
+import cz.senslog.analyzer.domain.IntervalGroup;
+import cz.senslog.analyzer.domain.Timestamp;
+import cz.senslog.analyzer.util.Tuple;
+import org.junit.jupiter.api.Test;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+
+import static cz.senslog.analyzer.domain.Timestamp.of;
+import static java.time.ZoneOffset.UTC;
+import static org.junit.jupiter.api.Assertions.*;
+
+class TimestampUtilTest {
+
+    @Test
+    void truncToIntervalByGroup_HOUR_equals_true() {
+
+        LocalDateTime fromLocal = LocalDateTime.of(2020, 1, 30, 14, 30, 15);
+        LocalDateTime toLocal = LocalDateTime.of(2020, 1, 30, 15, 30, 15);
+
+        Timestamp from = of(OffsetDateTime.of(fromLocal, ZoneOffset.UTC));
+        Timestamp to = of(OffsetDateTime.of(toLocal, ZoneOffset.UTC));
+
+        Tuple<Timestamp, Timestamp> interval = TimestampUtil.truncToIntervalByGroup(from, to, IntervalGroup.HOUR);
+
+        assertNull(interval);
+    }
+
+    @Test
+    void diff_1_MONTH_true() {
+        LocalDateTime from = LocalDateTime.of(2020, 1, 1, 0, 0);
+        LocalDateTime to = from.plusMonths(1).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_678_400, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_5_MONTH_true() {
+        LocalDateTime from = LocalDateTime.of(2020, 3, 1, 0, 0);
+        LocalDateTime to = from.plusMonths(10).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_592_000, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_10_MONTH_leapYear_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
+        LocalDateTime to = from.plusMonths(10).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_15_MONTH_leapYear_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
+        LocalDateTime to = from.plusMonths(15).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_15_MONTH_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 3, 1, 0, 0);
+        LocalDateTime to = from.plusMonths(15).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_505_600, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_2_YEARS_2_MONTHS_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
+        LocalDateTime to = LocalDateTime.of(2001, 2, 1, 0, 0).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_678_400, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_3_YEARS_noLeapYear_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
+        LocalDateTime to = LocalDateTime.of(2003, 2, 1, 0, 0).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_505_600, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_4_YEARS_LastYEAR_leapYear_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 12, 1, 0, 0);
+        LocalDateTime to = LocalDateTime.of(2004, 5, 1, 0, 0).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_3_YEARS_FirstYEAR_leapYear_true() {
+        LocalDateTime from = LocalDateTime.of(2000, 1, 1, 0, 0);
+        LocalDateTime to = from.plusYears(3).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
+
+    @Test
+    void diff_3_YEARS_MiddleYEAR_leapYear_true() {
+        LocalDateTime from = LocalDateTime.of(1999, 12, 1, 0, 0);
+        LocalDateTime to = from.plusYears(3).minusSeconds(1);
+        Tuple<Timestamp, Timestamp> timeRange = Tuple.of(of(OffsetDateTime.of(from, UTC)), of(OffsetDateTime.of(to, UTC)));
+
+        assertEquals(2_419_200, TimestampUtil.differenceByIntervalGroup(timeRange, IntervalGroup.MONTH));
+    }
 }

+ 98 - 101
src/test/java/cz/senslog/analyzer/ws/manager/WSStatisticsManagerTest.java

@@ -1,102 +1,99 @@
-package cz.senslog.analyzer.ws.manager;
-
-import cz.senslog.analyzer.domain.*;
-import cz.senslog.analyzer.storage.permanent.repository.StatisticsConfigRepository;
-import cz.senslog.analyzer.storage.permanent.repository.StatisticsRepository;
-import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
-import cz.senslog.common.util.Tuple;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.OffsetDateTime;
-import java.time.ZoneOffset;
-import java.time.temporal.ChronoUnit;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-
-import static cz.senslog.analyzer.domain.AggregationType.DOUBLE;
-import static java.time.temporal.ChronoUnit.*;
-import static java.util.Collections.emptySet;
-import static java.util.Collections.singletonList;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.mockito.ArgumentMatchers.*;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-class WSStatisticsManagerTest {
-
-    @BeforeEach
-    void setUp() {
-
-
-    }
-
-    private static WSStatisticsManager createManager(Group group, List<DoubleStatistics> doubleStatistics) {
-        StatisticsConfigRepository.Special specialRepo = mock(StatisticsConfigRepository.Special.class);
-        when(specialRepo.getGroupIdByUnitSensor(anyLong(), anyLong(), anyLong())).thenReturn(group.getId());
-        StatisticsConfigRepository configRepo = mock(StatisticsConfigRepository.class);
-        when(configRepo.special()).thenReturn(specialRepo);
-
-        StatisticsRepository statisticsRepo = mock(StatisticsRepository.class);
-        when(statisticsRepo.getByTimeRange(anyLong(), any(Tuple.class))).thenReturn(doubleStatistics);
-        return new WSStatisticsManager(statisticsRepo, configRepo);
-    }
-
-
-    @Test
-    void loadData() {
-
-        long unitId = 0;
-        long sensorId = 0;
-
-        Group group = new Group(0, 3600, true, DOUBLE,
-                new HashSet<>(singletonList(new Sensor(unitId, sensorId)))
-        );
-
-        LocalDateTime startDate = LocalDateTime.of(2020, 1, 1, 1, 1);
-        Timestamp from = Timestamp.of(OffsetDateTime.of(startDate, ZoneOffset.UTC));
-        Timestamp to = from.plus(1, ChronoUnit.MONTHS);
-        IntervalGroup intervalGroup = IntervalGroup.DAY;
-
-        WSStatisticsManager manager = createManager(group, Arrays.asList(
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(1, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(1, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(2, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(2, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(3, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(3, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(4, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(4, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(5, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(5, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(10, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(10, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(11, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(11, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(12, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(12, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(13, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(13, DAYS).plus(22, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(14, DAYS).plus(1, HOURS)),
-                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(14, DAYS).plus(22, HOURS))
-            )
-        );
-
-        List<SensorStatisticsData> dataList = manager.loadData(unitId, sensorId, from, to, intervalGroup);
-
-        assertEquals(1, dataList.size());
-
-        SensorStatisticsData sensorData = dataList.get(0);
-        assertEquals(10, sensorData.getData().size());
-
-        SensorStatisticsData.Data statisticsAggr = sensorData.getStatistics();
-        assertEquals(600, statisticsAggr.getSum());
-        assertEquals(10, statisticsAggr.getMin());
-        assertEquals(20, statisticsAggr.getMax());
-        assertEquals(15, statisticsAggr.getAvg());
-    }
+package cz.senslog.analyzer.ws.manager;
+
+import cz.senslog.analyzer.domain.*;
+import cz.senslog.analyzer.storage.permanent.repository.StatisticsConfigRepository;
+import cz.senslog.analyzer.storage.permanent.repository.StatisticsRepository;
+import cz.senslog.analyzer.ws.dto.SensorStatisticsData;
+import cz.senslog.analyzer.util.Tuple;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.time.temporal.ChronoUnit;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+
+import static cz.senslog.analyzer.domain.AggregationType.DOUBLE;
+import static java.time.temporal.ChronoUnit.*;
+import static java.util.Collections.singletonList;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+class WSStatisticsManagerTest {
+
+    @BeforeEach
+    void setUp() {
+
+
+    }
+
+    private static WSStatisticsManager createManager(Group group, List<DoubleStatistics> doubleStatistics) {
+        StatisticsConfigRepository.Special specialRepo = mock(StatisticsConfigRepository.Special.class);
+        when(specialRepo.getGroupIdByUnitSensor(anyLong(), anyLong(), anyLong())).thenReturn(group.getId());
+        StatisticsConfigRepository configRepo = mock(StatisticsConfigRepository.class);
+        when(configRepo.special()).thenReturn(specialRepo);
+
+        StatisticsRepository statisticsRepo = mock(StatisticsRepository.class);
+        when(statisticsRepo.getByTimeRange(anyLong(), any(Tuple.class))).thenReturn(doubleStatistics);
+        return new WSStatisticsManager(statisticsRepo, configRepo);
+    }
+
+
+    @Test
+    void loadData() {
+
+        long unitId = 0;
+        long sensorId = 0;
+
+        Group group = new Group(0, 3600, true, DOUBLE,
+                new HashSet<>(singletonList(new Sensor(unitId, sensorId)))
+        );
+
+        LocalDateTime startDate = LocalDateTime.of(2020, 1, 1, 1, 1);
+        Timestamp from = Timestamp.of(OffsetDateTime.of(startDate, ZoneOffset.UTC));
+        Timestamp to = from.plus(1, ChronoUnit.MONTHS);
+        IntervalGroup intervalGroup = IntervalGroup.DAY;
+
+        WSStatisticsManager manager = createManager(group, Arrays.asList(
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(1, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(1, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(2, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(2, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(3, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(3, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(4, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(4, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(5, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(5, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(10, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(10, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(11, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(11, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(12, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(12, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(13, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(13, DAYS).plus(22, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(14, DAYS).plus(1, HOURS)),
+                new DoubleStatistics(group, 2, 10, 20, 30, from.plus(14, DAYS).plus(22, HOURS))
+            )
+        );
+
+        List<SensorStatisticsData> dataList = manager.loadData(unitId, sensorId, from, to, intervalGroup);
+
+        assertEquals(1, dataList.size());
+
+        SensorStatisticsData sensorData = dataList.get(0);
+        assertEquals(10, sensorData.getData().size());
+
+        SensorStatisticsData.Data statisticsAggr = sensorData.getStatistics();
+        assertEquals(600, statisticsAggr.getSum());
+        assertEquals(10, statisticsAggr.getMin());
+        assertEquals(20, statisticsAggr.getMax());
+        assertEquals(15, statisticsAggr.getAvg());
+    }
 }