nutch-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (NUTCH-2242) lastModified not always set
Date Sat, 04 Nov 2017 17:17:00 GMT

    [ https://issues.apache.org/jira/browse/NUTCH-2242?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16239113#comment-16239113
] 

ASF GitHub Bot commented on NUTCH-2242:
---------------------------------------

Omkar20895 closed pull request #238: NUTCH-2242 Injector to stop if job fails to avoid loss
of CrawlDb
URL: https://github.com/apache/nutch/pull/238
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/src/java/org/apache/nutch/crawl/Injector.java b/src/java/org/apache/nutch/crawl/Injector.java
index 5f5fd15ff..0603eb4d1 100644
--- a/src/java/org/apache/nutch/crawl/Injector.java
+++ b/src/java/org/apache/nutch/crawl/Injector.java
@@ -414,7 +414,16 @@ public void inject(Path crawlDb, Path urlDir, boolean overwrite,
 
     try {
       // run the job
-      job.waitForCompletion(true);
+      boolean success = job.waitForCompletion(true);
+      if (!success) {
+        String message = "Injector job did not succeed, job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        cleanupAfterFailure(tempCrawlDb, lock, fs);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message);
+      }
 
       // save output and perform cleanup
       CrawlDb.install(job, crawlDb);
@@ -452,11 +461,21 @@ public void inject(Path crawlDb, Path urlDir, boolean overwrite,
         LOG.info("Injector: finished at " + sdf.format(end) + ", elapsed: "
             + TimingUtil.elapsedTime(start, end));
       }
-    } catch (IOException e) {
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error("Injector job failed", e);
+      cleanupAfterFailure(tempCrawlDb, lock, fs);
+      throw e;
+    }
+  }
+
+  public void cleanupAfterFailure(Path tempCrawlDb, Path lock, FileSystem fs)
+         throws IOException {
+    try{
       if (fs.exists(tempCrawlDb)) {
-        fs.delete(tempCrawlDb, true);
+          fs.delete(tempCrawlDb, true);
       }
-      LockUtil.removeLockFile(conf, lock);
+      LockUtil.removeLockFile(fs, lock);
+    } catch(IOException e) {
       throw e;
     }
   }
diff --git a/src/java/org/apache/nutch/hostdb/ReadHostDb.java b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
index 28a7eb709..257dd6a75 100644
--- a/src/java/org/apache/nutch/hostdb/ReadHostDb.java
+++ b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
@@ -202,8 +202,17 @@ private void readHostDb(Path hostDb, Path output, boolean dumpHomepages,
boolean
     job.setNumReduceTasks(0);
 
     try {
-      job.waitForCompletion(true);
-    } catch (Exception e) {
+      boolean success = job.waitForCompletion(true);
+      if(!success){
+        String message = "ReadHostDb job did not succeed, job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message);
+      }
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error("ReadHostDb job failed", e);
       throw e;
     }
 
diff --git a/src/java/org/apache/nutch/util/CrawlCompletionStats.java b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
index 4920fbf32..4b8e1871f 100644
--- a/src/java/org/apache/nutch/util/CrawlCompletionStats.java
+++ b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
@@ -171,8 +171,17 @@ public int run(String[] args) throws Exception {
     job.setNumReduceTasks(numOfReducers);
 
     try {
-      job.waitForCompletion(true);
-    } catch (Exception e) {
+      boolean success = job.waitForCompletion(true);
+      if(!success){
+        String message = jobName + " job did not succeed, job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message);
+      }
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error(jobName + " job failed");
       throw e;
     }
 
diff --git a/src/java/org/apache/nutch/util/ProtocolStatusStatistics.java b/src/java/org/apache/nutch/util/ProtocolStatusStatistics.java
index a18860634..84d892251 100644
--- a/src/java/org/apache/nutch/util/ProtocolStatusStatistics.java
+++ b/src/java/org/apache/nutch/util/ProtocolStatusStatistics.java
@@ -122,8 +122,17 @@ public int run(String[] args) throws Exception {
     job.setNumReduceTasks(numOfReducers);
 
     try {
-      job.waitForCompletion(true);
-    } catch (Exception e) {
+      boolean success = job.waitForCompletion(true);
+      if(!success){
+        String message = jobName + " job did not succeed, job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message);
+      }
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error(jobName + " job failed", e);
       throw e;
     }
 
diff --git a/src/java/org/apache/nutch/util/SitemapProcessor.java b/src/java/org/apache/nutch/util/SitemapProcessor.java
index 8b1d8cccf..ed740807d 100644
--- a/src/java/org/apache/nutch/util/SitemapProcessor.java
+++ b/src/java/org/apache/nutch/util/SitemapProcessor.java
@@ -358,7 +358,16 @@ public void sitemap(Path crawldb, Path hostdb, Path sitemapUrlDir, boolean
stric
     job.setReducerClass(SitemapReducer.class);
 
     try {
-      job.waitForCompletion(true);
+      boolean success = job.waitForCompletion(true);
+      if(!success){
+        String message = "SitemapProcessor_" + crawldb.toString() + " job did not succeed,
job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        cleanupAfterFailure(tempCrawlDb, lock, fs);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message); 
+      }
 
       boolean preserveBackup = conf.getBoolean("db.preserve.backup", true);
       if (!preserveBackup && fs.exists(old))
@@ -385,11 +394,21 @@ public void sitemap(Path crawldb, Path hostdb, Path sitemapUrlDir, boolean
stric
         long end = System.currentTimeMillis();
         LOG.info("SitemapProcessor: Finished at {}, elapsed: {}", sdf.format(end), TimingUtil.elapsedTime(start,
end));
       }
-    } catch (Exception e) {
-      if (fs.exists(tempCrawlDb))
-        fs.delete(tempCrawlDb, true);
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error("SitemapProcessor_" + crawldb.toString(), e);
+      cleanupAfterFailure(tempCrawlDb, lock, fs);
+      throw e;
+    }
+  }
 
+  public void cleanupAfterFailure(Path tempCrawlDb, Path lock, FileSystem fs)
+         throws IOException {
+    try{
+      if (fs.exists(tempCrawlDb)) {
+          fs.delete(tempCrawlDb, true);
+      }
       LockUtil.removeLockFile(fs, lock);
+    } catch(IOException e) {
       throw e;
     }
   }
diff --git a/src/java/org/apache/nutch/util/domain/DomainStatistics.java b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
index af1c2660e..3ff766725 100644
--- a/src/java/org/apache/nutch/util/domain/DomainStatistics.java
+++ b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
@@ -140,8 +140,17 @@ public int run(String[] args) throws Exception {
     job.setNumReduceTasks(numOfReducers);
 
     try {
-      job.waitForCompletion(true);
-    } catch (Exception e) {
+      boolean success = job.waitForCompletion(true);
+      if(!success){
+        String message = "Injector job did not succeed, job status: "
+            + job.getStatus().getState() + ", reason: "
+            + job.getStatus().getFailureInfo();
+        LOG.error(message);
+        // throw exception so that calling routine can exit with error
+        throw new RuntimeException(message);
+      }
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
+      LOG.error(jobName + " job failed", e);
       throw e;
     }
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> lastModified not always set
> ---------------------------
>
>                 Key: NUTCH-2242
>                 URL: https://issues.apache.org/jira/browse/NUTCH-2242
>             Project: Nutch
>          Issue Type: Bug
>          Components: crawldb
>    Affects Versions: 1.11
>            Reporter: Jurian Broertjes
>            Priority: Minor
>             Fix For: 1.13
>
>         Attachments: NUTCH-2242.patch
>
>
> I observed two issues:
> - When using the DefaultFetchSchedule, CrawlDatum's modifiedTime field is not updated
on the first successful fetch. 
> - When a document modification is detected (protocol- or signature-wise), the modifiedTime
isn't updated
> I can provide a patch later today.



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Mime
View raw message