storm-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From etha...@apache.org
Subject [1/3] storm git commit: STORM-3133: Extend metrics on Nimbus and LogViewer:
Date Fri, 10 Aug 2018 17:12:44 GMT
Repository: storm
Updated Branches:
  refs/heads/master 16e500844 -> 154173a70


http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogCleaner.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogCleaner.java
b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogCleaner.java
index b8468ec..ae8aff6 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogCleaner.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogCleaner.java
@@ -27,6 +27,8 @@ import static org.apache.storm.DaemonConfig.LOGVIEWER_CLEANUP_INTERVAL_SECS;
 import static org.apache.storm.DaemonConfig.LOGVIEWER_MAX_PER_WORKER_LOGS_SIZE_MB;
 import static org.apache.storm.DaemonConfig.LOGVIEWER_MAX_SUM_WORKER_LOGS_SIZE_MB;
 
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Timer;
 import com.google.common.annotations.VisibleForTesting;
 
 import java.io.Closeable;
@@ -48,8 +50,10 @@ import java.util.concurrent.TimeUnit;
 import java.util.function.BinaryOperator;
 import java.util.stream.StreamSupport;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.storm.StormTimer;
+import org.apache.storm.metric.StormMetricsRegistry;
 import org.apache.storm.utils.ObjectReader;
 import org.apache.storm.utils.Time;
 import org.apache.storm.utils.Utils;
@@ -62,6 +66,9 @@ import org.slf4j.LoggerFactory;
  */
 public class LogCleaner implements Runnable, Closeable {
     private static final Logger LOG = LoggerFactory.getLogger(LogCleaner.class);
+    private static final Timer cleanupRoutineDuration = StormMetricsRegistry.registerTimer("logviewer:cleanup-routine-duration-ms");
+    private static final Histogram numFilesCleanedUp = StormMetricsRegistry.registerHistogram("logviewer:num-files-cleaned-up");
+    private static final Histogram diskSpaceFreed = StormMetricsRegistry.registerHistogram("logviewer:disk-space-freed-in-bytes");
 
     private final Map<String, Object> stormConf;
     private final Integer intervalSecs;
@@ -95,6 +102,8 @@ public class LogCleaner implements Runnable, Closeable {
 
         LOG.info("configured max total size of worker logs: {} MB, max total size of worker
logs per directory: {} MB",
                 maxSumWorkerLogsSizeMb, maxPerWorkerLogsSizeMb);
+        //Switch to CachedGauge if this starts to hurt performance
+        StormMetricsRegistry.registerGauge("logviewer:worker-log-dir-size", () -> FileUtils.sizeOf(logRootDir));
     }
 
     /**
@@ -131,7 +140,9 @@ public class LogCleaner implements Runnable, Closeable {
      */
     @Override
     public void run() {
-        try {
+        int numFilesCleaned = 0;
+        long diskSpaceCleaned = 0L;
+        try (Timer.Context t = cleanupRoutineDuration.time()) {
             final long nowMills = Time.currentTimeMillis();
             Set<File> oldLogDirs = selectDirsForCleanup(nowMills);
 
@@ -142,30 +153,41 @@ public class LogCleaner implements Runnable, Closeable {
                     oldLogDirs.stream().map(File::getName).collect(joining(",")),
                     deadWorkerDirs.stream().map(File::getName).collect(joining(",")));
 
-            deadWorkerDirs.forEach(Unchecked.consumer(dir -> {
+            for (File dir : deadWorkerDirs) {
                 String path = dir.getCanonicalPath();
-                LOG.info("Cleaning up: Removing {}", path);
+                long sizeInBytes = FileUtils.sizeOf(dir);
+                LOG.info("Cleaning up: Removing {}, {} KB", path, sizeInBytes * 1e-3);
 
                 try {
                     Utils.forceDelete(path);
                     cleanupEmptyTopoDirectory(dir);
+                    numFilesCleaned++;
+                    diskSpaceCleaned += sizeInBytes;
                 } catch (Exception ex) {
+                    ExceptionMeters.NUM_FILE_REMOVAL_EXCEPTIONS.mark();
                     LOG.error(ex.getMessage(), ex);
                 }
-            }));
+            }
 
-            perWorkerDirCleanup(maxPerWorkerLogsSizeMb * 1024 * 1024);
-            globalLogCleanup(maxSumWorkerLogsSizeMb * 1024 * 1024);
+            final List<DeletionMeta> perWorkerDirCleanupMeta = perWorkerDirCleanup(maxPerWorkerLogsSizeMb
* 1024 * 1024);
+            numFilesCleaned += perWorkerDirCleanupMeta.stream().mapToInt(meta -> meta.deletedFiles).sum();
+            diskSpaceCleaned += perWorkerDirCleanupMeta.stream().mapToLong(meta -> meta.deletedSize).sum();
+            final DeletionMeta globalLogCleanupMeta = globalLogCleanup(maxSumWorkerLogsSizeMb
* 1024 * 1024);
+            numFilesCleaned += globalLogCleanupMeta.deletedFiles;
+            diskSpaceCleaned += globalLogCleanupMeta.deletedSize;
         } catch (Exception ex) {
+            ExceptionMeters.NUM_CLEANUP_EXCEPTIONS.mark();
             LOG.error("Exception while cleaning up old log.", ex);
         }
+        numFilesCleanedUp.update(numFilesCleaned);
+        diskSpaceFreed.update(diskSpaceCleaned);
     }
 
     /**
      * Delete the oldest files in each overloaded worker log dir.
      */
     @VisibleForTesting
-    List<Integer> perWorkerDirCleanup(long size) {
+    List<DeletionMeta> perWorkerDirCleanup(long size) {
         return workerLogs.getAllWorkerDirs().stream()
                 .map(Unchecked.function(dir ->
                         directoryCleaner.deleteOldestWhileTooLarge(Collections.singletonList(dir),
size, true, null)))
@@ -176,7 +198,7 @@ public class LogCleaner implements Runnable, Closeable {
      * Delete the oldest files in overloaded worker-artifacts globally.
      */
     @VisibleForTesting
-    int globalLogCleanup(long size) throws Exception {
+    DeletionMeta globalLogCleanup(long size) throws Exception {
         List<File> workerDirs = new ArrayList<>(workerLogs.getAllWorkerDirs());
         Set<String> aliveWorkerDirs = new HashSet<>(workerLogs.getAliveWorkerDirs());
 
@@ -223,8 +245,8 @@ public class LogCleaner implements Runnable, Closeable {
 
     @VisibleForTesting
     FileFilter mkFileFilterForLogCleanup(long nowMillis) {
-        final long cutoffAgeMillis = cleanupCutoffAgeMillis(nowMillis);
-        return file -> !file.isFile() && lastModifiedTimeWorkerLogdir(file) <=
cutoffAgeMillis;
+        //Doesn't it make more sense to do file.isDirectory here?
+        return file -> !file.isFile() && lastModifiedTimeWorkerLogdir(file) <=
cleanupCutoffAgeMillis(nowMillis);
     }
 
     /**
@@ -235,7 +257,7 @@ public class LogCleaner implements Runnable, Closeable {
     private long lastModifiedTimeWorkerLogdir(File logDir) {
         long dirModified = logDir.lastModified();
 
-        DirectoryStream<Path> dirStream = null;
+        DirectoryStream<Path> dirStream;
         try {
             dirStream = directoryCleaner.getStreamForDirectory(logDir);
         } catch (IOException e) {
@@ -256,9 +278,7 @@ public class LogCleaner implements Runnable, Closeable {
             LOG.error(ex.getMessage(), ex);
             return dirModified;
         } finally {
-            if (DirectoryStream.class.isInstance(dirStream)) {
-                IOUtils.closeQuietly(dirStream);
-            }
+            IOUtils.closeQuietly(dirStream);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogFileDownloader.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogFileDownloader.java
b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogFileDownloader.java
index 67b265d..bfb3065 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogFileDownloader.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogFileDownloader.java
@@ -18,12 +18,19 @@
 
 package org.apache.storm.daemon.logviewer.utils;
 
+import com.codahale.metrics.Histogram;
+
 import java.io.File;
 import java.io.IOException;
 
 import javax.ws.rs.core.Response;
 
+import org.apache.commons.io.FileUtils;
+import org.apache.storm.metric.StormMetricsRegistry;
+
+
 public class LogFileDownloader {
+    private static final Histogram fileDownloadSizeDistMB= StormMetricsRegistry.registerHistogram("logviewer:download-file-size-rounded-MB");
 
     private final String logRoot;
     private final String daemonLogRoot;
@@ -55,6 +62,7 @@ public class LogFileDownloader {
         File file = new File(rootDir, fileName).getCanonicalFile();
         if (file.exists()) {
             if (isDaemon || resourceAuthorizer.isUserAllowedToAccessFile(user, fileName))
{
+                fileDownloadSizeDistMB.update(Math.round((double) file.length() / FileUtils.ONE_MB));
                 return LogviewerResponseBuilder.buildDownloadFile(file);
             } else {
                 return LogviewerResponseBuilder.buildResponseUnauthorizedUser(user);

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogviewerResponseBuilder.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogviewerResponseBuilder.java
b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogviewerResponseBuilder.java
index b92a559..4c8a191 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogviewerResponseBuilder.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/LogviewerResponseBuilder.java
@@ -76,13 +76,18 @@ public class LogviewerResponseBuilder {
      * @param file file to download
      */
     public static Response buildDownloadFile(File file) throws IOException {
-        // do not close this InputStream in method: it will be used from jetty server
-        InputStream is = new FileInputStream(file);
-        return Response.status(OK)
-                .entity(wrapWithStreamingOutput(is))
-                .type(MediaType.APPLICATION_OCTET_STREAM_TYPE)
-                .header("Content-Disposition", "attachment; filename=\"" + file.getName()
+ "\"")
-                .build();
+        try {
+            // do not close this InputStream in method: it will be used from jetty server
+            InputStream is = new FileInputStream(file);
+            return Response.status(OK)
+                    .entity(wrapWithStreamingOutput(is))
+                    .type(MediaType.APPLICATION_OCTET_STREAM_TYPE)
+                    .header("Content-Disposition", "attachment; filename=\"" + file.getName()
+ "\"")
+                    .build();
+        } catch (IOException e) {
+            ExceptionMeters.NUM_FILE_DOWNLOAD_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/WorkerLogs.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/WorkerLogs.java
b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/WorkerLogs.java
index eda2478..d566e3d 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/WorkerLogs.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/WorkerLogs.java
@@ -22,7 +22,6 @@ import static java.util.stream.Collectors.toCollection;
 import static java.util.stream.Collectors.toMap;
 import static org.apache.storm.Config.SUPERVISOR_RUN_WORKER_AS_USER;
 import static org.apache.storm.Config.TOPOLOGY_SUBMITTER_USER;
-import static org.apache.storm.daemon.utils.ListFunctionalSupport.takeLast;
 
 import com.google.common.collect.Lists;
 
@@ -88,9 +87,14 @@ public class WorkerLogs {
 
         if (runAsUser && topoOwner.isPresent() && file.exists() &&
!Files.isReadable(file.toPath())) {
             LOG.debug("Setting permissions on file {} with topo-owner {}", fileName, topoOwner);
-            ClientSupervisorUtils.processLauncherAndWait(stormConf, topoOwner.get(),
-                    Lists.newArrayList("blob", file.getCanonicalPath()), null,
-                    "setup group read permissions for file: " + fileName);
+            try {
+                ClientSupervisorUtils.processLauncherAndWait(stormConf, topoOwner.get(),
+                        Lists.newArrayList("blob", file.getCanonicalPath()), null,
+                        "setup group read permissions for file: " + fileName);
+            } catch (IOException e) {
+                ExceptionMeters.NUM_SET_PERMISSION_EXCEPTIONS.mark();
+                throw e;
+            }
         }
     }
 
@@ -127,7 +131,7 @@ public class WorkerLogs {
     /**
      * Return a sorted set of java.io.Files that were written by workers that are now active.
      */
-    public SortedSet<String> getAliveWorkerDirs() throws Exception {
+    public SortedSet<String> getAliveWorkerDirs() {
         Set<String> aliveIds = getAliveIds(Time.currentTimeSecs());
         Set<File> logDirs = getAllWorkerDirs();
         Map<String, File> idToDir = identifyWorkerLogDirs(logDirs);
@@ -177,7 +181,7 @@ public class WorkerLogs {
      *
      * @param nowSecs current time in seconds
      */
-    public Set<String> getAliveIds(int nowSecs) throws Exception {
+    public Set<String> getAliveIds(int nowSecs) {
         return SupervisorUtils.readWorkerHeartbeats(stormConf).entrySet().stream()
                 .filter(entry -> Objects.nonNull(entry.getValue())
                         && !SupervisorUtils.isWorkerHbTimedOut(nowSecs, entry.getValue(),
stormConf))

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/webapp/LogviewerResource.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/webapp/LogviewerResource.java
b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/webapp/LogviewerResource.java
index 85285ac..08881be 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/webapp/LogviewerResource.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/webapp/LogviewerResource.java
@@ -19,6 +19,7 @@
 package org.apache.storm.daemon.logviewer.webapp;
 
 import com.codahale.metrics.Meter;
+import com.codahale.metrics.Timer;
 
 import java.io.IOException;
 import java.net.URLDecoder;
@@ -38,6 +39,7 @@ import org.apache.storm.daemon.logviewer.handler.LogviewerLogDownloadHandler;
 import org.apache.storm.daemon.logviewer.handler.LogviewerLogPageHandler;
 import org.apache.storm.daemon.logviewer.handler.LogviewerLogSearchHandler;
 import org.apache.storm.daemon.logviewer.handler.LogviewerProfileHandler;
+import org.apache.storm.daemon.logviewer.utils.ExceptionMeters;
 import org.apache.storm.daemon.ui.InvalidRequestException;
 import org.apache.storm.daemon.ui.UIHelpers;
 import org.apache.storm.daemon.ui.resources.StormApiResource;
@@ -62,6 +64,14 @@ public class LogviewerResource {
             "logviewer:num-download-log-daemon-file-http-requests");
     private static final Meter meterListLogsHttpRequests = StormMetricsRegistry.registerMeter("logviewer:num-list-logs-http-requests");
 
+    private static final Meter numSearchLogRequests = StormMetricsRegistry.registerMeter("logviewer:num-search-logs-requests");
+    private static final Meter numDeepSearchArchived = StormMetricsRegistry.registerMeter(
+        "logviewer:num-deep-search-requests-with-archived");
+    private static final Meter numDeepSearchNonArchived = StormMetricsRegistry.registerMeter(
+            "logviewer:num-deep-search-requests-without-archived");
+    private static final Timer searchLogRequestDuration = StormMetricsRegistry.registerTimer("logviewer:search-requests-duration-ms");
+    private static final Timer deepSearchRequestDuration = StormMetricsRegistry.registerTimer("logviewer:deep-search-request-duration-ms");
+
     private final LogviewerLogPageHandler logviewer;
     private final LogviewerProfileHandler profileHandler;
     private final LogviewerLogDownloadHandler logDownloadHandler;
@@ -105,6 +115,9 @@ public class LogviewerResource {
         } catch (InvalidRequestException e) {
             LOG.error(e.getMessage(), e);
             return Response.status(400).entity(e.getMessage()).build();
+        } catch (IOException e) {
+            ExceptionMeters.NUM_READ_LOG_EXCEPTIONS.mark();
+            throw e;
         }
     }
 
@@ -126,6 +139,9 @@ public class LogviewerResource {
         } catch (InvalidRequestException e) {
             LOG.error(e.getMessage(), e);
             return Response.status(400).entity(e.getMessage()).build();
+        } catch (IOException e) {
+            ExceptionMeters.NUM_READ_DAEMON_LOG_EXCEPTIONS.mark();
+            throw e;
         }
     }
 
@@ -158,7 +174,12 @@ public class LogviewerResource {
         String callback = request.getParameter(StormApiResource.callbackParameterName);
         String origin = request.getHeader("Origin");
 
-        return logviewer.listLogFiles(user, portStr != null ? Integer.parseInt(portStr) :
null, topologyId, callback, origin);
+        try {
+            return logviewer.listLogFiles(user, portStr != null ? Integer.parseInt(portStr)
: null, topologyId, callback, origin);
+        } catch (IOException e) {
+            ExceptionMeters.NUM_LIST_LOG_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**
@@ -169,7 +190,12 @@ public class LogviewerResource {
     public Response listDumpFiles(@PathParam("topo-id") String topologyId, @PathParam("host-port")
String hostPort,
                                   @Context HttpServletRequest request) throws IOException
{
         String user = httpCredsHandler.getUserName(request);
-        return profileHandler.listDumpFiles(topologyId, hostPort, user);
+        try {
+            return profileHandler.listDumpFiles(topologyId, hostPort, user);
+        } catch (IOException e) {
+            ExceptionMeters.NUM_LIST_DUMP_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**
@@ -180,7 +206,12 @@ public class LogviewerResource {
     public Response downloadDumpFile(@PathParam("topo-id") String topologyId, @PathParam("host-port")
String hostPort,
                                      @PathParam("filename") String fileName, @Context HttpServletRequest
request) throws IOException {
         String user = httpCredsHandler.getUserName(request);
-        return profileHandler.downloadDumpFile(topologyId, hostPort, fileName, user);
+        try {
+            return profileHandler.downloadDumpFile(topologyId, hostPort, fileName, user);
+        } catch (IOException e) {
+            ExceptionMeters.NUM_DOWNLOAD_DUMP_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**
@@ -194,7 +225,12 @@ public class LogviewerResource {
         String user = httpCredsHandler.getUserName(request);
         String file = request.getParameter("file");
         String decodedFileName = URLDecoder.decode(file);
-        return logDownloadHandler.downloadLogFile(decodedFileName, user);
+        try {
+            return logDownloadHandler.downloadLogFile(decodedFileName, user);
+        } catch (IOException e) {
+            ExceptionMeters.NUM_DOWNLOAD_LOG_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**
@@ -208,7 +244,12 @@ public class LogviewerResource {
         String user = httpCredsHandler.getUserName(request);
         String file = request.getParameter("file");
         String decodedFileName = URLDecoder.decode(file);
-        return logDownloadHandler.downloadDaemonLogFile(decodedFileName, user);
+        try {
+            return logDownloadHandler.downloadDaemonLogFile(decodedFileName, user);
+        } catch (IOException e) {
+            ExceptionMeters.NUM_DOWNLOAD_DAEMON_LOG_EXCEPTIONS.mark();
+            throw e;
+        }
     }
 
     /**
@@ -217,6 +258,8 @@ public class LogviewerResource {
     @GET
     @Path("/search")
     public Response search(@Context HttpServletRequest request) throws IOException {
+        numSearchLogRequests.mark();
+
         String user = httpCredsHandler.getUserName(request);
         boolean isDaemon = StringUtils.equals(request.getParameter("is-daemon"), "yes");
         String file = request.getParameter("file");
@@ -227,14 +270,17 @@ public class LogviewerResource {
         String callback = request.getParameter(StormApiResource.callbackParameterName);
         String origin = request.getHeader("Origin");
 
-        try {
-            return logSearchHandler.searchLogFile(decodedFileName, user, isDaemon, searchString,
numMatchesStr,
-                    startByteOffset, callback, origin);
+        try (Timer.Context t = searchLogRequestDuration.time()) {
+            return logSearchHandler.searchLogFile(decodedFileName, user, isDaemon,
+                searchString, numMatchesStr, startByteOffset, callback, origin);
         } catch (InvalidRequestException e) {
             LOG.error(e.getMessage(), e);
             int statusCode = 400;
             return new JsonResponseBuilder().setData(UIHelpers.exceptionToJson(e, statusCode)).setCallback(callback)
-                    .setStatus(statusCode).build();
+                .setStatus(statusCode).build();
+        } catch (IOException e) {
+            ExceptionMeters.NUM_SEARCH_EXCEPTIONS.mark();
+            throw e;
         }
     }
 
@@ -244,7 +290,7 @@ public class LogviewerResource {
     @GET
     @Path("/deepSearch/{topoId}")
     public Response deepSearch(@PathParam("topoId") String topologyId,
-                               @Context HttpServletRequest request) throws IOException {
+                               @Context HttpServletRequest request) {
         String user = httpCredsHandler.getUserName(request);
         String searchString = request.getParameter("search-string");
         String numMatchesStr = request.getParameter("num-matches");
@@ -255,8 +301,16 @@ public class LogviewerResource {
         String callback = request.getParameter(StormApiResource.callbackParameterName);
         String origin = request.getHeader("Origin");
 
-        return logSearchHandler.deepSearchLogsForTopology(topologyId, user, searchString,
numMatchesStr, portStr,
-                startFileOffset, startByteOffset, BooleanUtils.toBooleanObject(searchArchived),
callback, origin);
+        Boolean alsoSearchArchived = BooleanUtils.toBooleanObject(searchArchived);
+        if (BooleanUtils.isTrue(alsoSearchArchived)) {
+            numDeepSearchArchived.mark();
+        } else {
+            numDeepSearchNonArchived.mark();
+        }
+        try (Timer.Context t = deepSearchRequestDuration.time()) {
+            return logSearchHandler.deepSearchLogsForTopology(topologyId, user, searchString,
numMatchesStr, portStr, startFileOffset,
+                startByteOffset, alsoSearchArchived, callback, origin);
+        }
     }
 
     private int parseIntegerFromMap(Map<String, String[]> map, String parameterKey)
throws InvalidRequestException {

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandlerTest.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandlerTest.java
b/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandlerTest.java
index c7a1fd8..0a450be 100644
--- a/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandlerTest.java
+++ b/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandlerTest.java
@@ -627,6 +627,7 @@ public class LogviewerLogSearchHandlerTest {
 
     public static class TestDeepSearchLogs {
 
+        public static final int METRIC_SCANNED_FILES = 0;
         private List<File> logFiles;
         private String topoPath;
 
@@ -857,7 +858,7 @@ public class LogviewerLogSearchHandlerTest {
                 int fileOffset = (Integer) arguments[2];
                 String search = (String) arguments[4];
 
-                return new LogviewerLogSearchHandler.Matched(fileOffset, search, Collections.emptyList());
+                return new LogviewerLogSearchHandler.Matched(fileOffset, search, Collections.emptyList(),
METRIC_SCANNED_FILES);
             }).when(handler).findNMatches(any(), anyInt(), anyInt(), anyInt(), any());
 
             return handler;

http://git-wip-us.apache.org/repos/asf/storm/blob/bf81b684/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/utils/LogCleanerTest.java
----------------------------------------------------------------------
diff --git a/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/utils/LogCleanerTest.java
b/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/utils/LogCleanerTest.java
index 491de54..8b1c0b4 100644
--- a/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/utils/LogCleanerTest.java
+++ b/storm-webapp/src/test/java/org/apache/storm/daemon/logviewer/utils/LogCleanerTest.java
@@ -159,7 +159,10 @@ public class LogCleanerTest {
             WorkerLogs workerLogs = new WorkerLogs(conf, rootDir);
             LogCleaner logCleaner = new LogCleaner(conf, workerLogs, mockDirectoryCleaner,
rootDir);
 
-            List<Integer> deletedFiles = logCleaner.perWorkerDirCleanup(1200);
+            List<Integer> deletedFiles = logCleaner.perWorkerDirCleanup(1200)
+                .stream()
+                .map(deletionMeta -> deletionMeta.deletedFiles)
+                .collect(toList());
             assertEquals(Integer.valueOf(4), deletedFiles.get(0));
             assertEquals(Integer.valueOf(4), deletedFiles.get(1));
             assertEquals(Integer.valueOf(4), deletedFiles.get(deletedFiles.size() - 1));
@@ -218,13 +221,13 @@ public class LogCleanerTest {
             Map<String, Object> conf = Utils.readStormConfig();
             WorkerLogs stubbedWorkerLogs = new WorkerLogs(conf, rootDir) {
                 @Override
-                public SortedSet<String> getAliveWorkerDirs() throws Exception {
+                public SortedSet<String> getAliveWorkerDirs() {
                     return new TreeSet<>(Collections.singletonList("/workers-artifacts/topo1/port1"));
                 }
             };
 
             LogCleaner logCleaner = new LogCleaner(conf, stubbedWorkerLogs, mockDirectoryCleaner,
rootDir);
-            int deletedFiles = logCleaner.globalLogCleanup(2400);
+            int deletedFiles = logCleaner.globalLogCleanup(2400).deletedFiles;
             assertEquals(18, deletedFiles);
         } finally {
             Utils.setInstance(prevUtils);


Mime
View raw message