Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove printStacktrace in favour of log.error #97

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
*/
public class Annotator {

private static final Logger logger = LoggerFactory.getLogger(Annotator.class);
private static final Logger LOGGER = LoggerFactory.getLogger(Annotator.class);

private MongoFileManager mm;

Expand All @@ -44,7 +44,7 @@ public void annotate(Processings annotator_type) {
annotateTeiCollection(annotator_type);
}
} catch (UnreachableAnnotateServiceException | AnnotatorNotAvailableException e) {
logger.error("Error when annotating. ", e);
LOGGER.error("Error when annotating. ", e);

}
}
Expand All @@ -65,7 +65,7 @@ private void annotateTeiCollection(Processings annotator_type)
while (mm.hasMore()) {
BiblioObject biblioObject = mm.nextBiblioObject();
if (!AnnotateProperties.isReset() && mm.isProcessed(annotator_type)) {
logger.info("\t\t Already annotated by " + annotator_type + ", Skipping...");
LOGGER.info("\t\t Already annotated by " + annotator_type + ", Skipping...");
continue;
}
Runnable worker = null;
Expand All @@ -74,7 +74,7 @@ private void annotateTeiCollection(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new NerdAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.KEYTERM) {
if (biblioObject.getIsProcessedByPub2TEI()) {
Expand All @@ -83,7 +83,7 @@ private void annotateTeiCollection(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new KeyTermAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.QUANTITIES) {
if (biblioObject.getIsProcessedByPub2TEI()) {
Expand All @@ -92,7 +92,7 @@ private void annotateTeiCollection(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new QuantitiesAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.PDFQUANTITIES) {
if (biblioObject.getIsWithFulltext()) {
Expand All @@ -101,7 +101,7 @@ private void annotateTeiCollection(Processings annotator_type)
biblioObject.setPdf(bf);
worker = new PDFQuantitiesAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId());
}
}
if (worker != null) {
Expand All @@ -111,7 +111,7 @@ private void annotateTeiCollection(Processings annotator_type)
}
}
}
logger.info("Total: " + nb + " documents annotated.");
LOGGER.info("Total: " + nb + " documents annotated.");
} finally {
mm.close();
}
Expand All @@ -132,18 +132,18 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
ThreadPoolExecutor executor = getThreadsExecutor(annotator_type);

if (mm.initObjects(null, getQuery(AnnotateProperties.isReset(), annotator_type))) {
//logger.info("processing teis for :" + date);
//LOGGER.info("processing teis for :" + date);
while (mm.hasMore()) {
BiblioObject biblioObject = mm.nextBiblioObject();
if (!AnnotateProperties.isReset() && mm.isProcessed(annotator_type)) {
logger.info("\t\t Already annotated by " + annotator_type + ", Skipping...");
LOGGER.info("\t\t Already annotated by " + annotator_type + ", Skipping...");
continue;
}

// filter based on document size... we should actually annotate only
// a given length and then stop
if (biblioObject.getTeiCorpus().length() > 300000) {
logger.info("skipping " + biblioObject.getRepositoryDocId() + ": file too large");
LOGGER.info("skipping " + biblioObject.getRepositoryDocId() + ": file too large");
continue;
}
Runnable worker = null;
Expand All @@ -152,7 +152,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new NerdAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.KEYTERM) {
if (biblioObject.getIsProcessedByPub2TEI()) {
Expand All @@ -161,7 +161,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new KeyTermAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.QUANTITIES) {
if (biblioObject.getIsProcessedByPub2TEI()) {
Expand All @@ -170,7 +170,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject));
worker = new QuantitiesAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId());
}
} else if (annotator_type == Processings.PDFQUANTITIES) {
BinaryFile bf = new BinaryFile();
Expand All @@ -180,7 +180,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
biblioObject.setPdf(bf);
worker = new PDFQuantitiesAnnotatorWorker(mm, biblioObject);
} else {
logger.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId());
LOGGER.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId());
}
}
if (worker != null) {
Expand All @@ -190,16 +190,16 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type)
}
}
executor.shutdown();
logger.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown.");
LOGGER.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown.");
try {
if (!executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) {
executor.shutdownNow();
}
} catch (InterruptedException e) {
executor.shutdownNow();
}
logger.info("Finished all threads");
logger.info("Total: " + nb + " documents annotated.");
LOGGER.info("Finished all threads");
LOGGER.info("Total: " + nb + " documents annotated.");
}
} finally {
mm.close();
Expand Down Expand Up @@ -248,7 +248,7 @@ private ThreadPoolExecutor getThreadsExecutor(Processings annotator_type) {
} else if (annotator_type == annotator_type.PDFQUANTITIES) {
nbThreads = AnnotateProperties.getQuantitiesNbThreads();
}
logger.info("Number of threads: " + nbThreads);
LOGGER.info("Number of threads: " + nbThreads);
ThreadPoolExecutor executor = new ThreadPoolExecutor(nbThreads, nbThreads, 60000,
TimeUnit.MILLISECONDS, blockingQueue);

Expand All @@ -257,15 +257,15 @@ private ThreadPoolExecutor getThreadsExecutor(Processings annotator_type) {
@Override
public void rejectedExecution(Runnable r,
ThreadPoolExecutor executor) {
logger.info("Task Rejected : "
LOGGER.info("Task Rejected : "
+ ((AnnotatorWorker) r).getRepositoryDocId());
logger.info("Waiting for 60 second !!");
LOGGER.info("Waiting for 60 second !!");
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
logger.error("Error when interrupting the thread. ", e);
LOGGER.error("Error when interrupting the thread. ", e);
}
logger.info("Lets add another time : "
LOGGER.info("Lets add another time : "
+ ((AnnotatorWorker) r).getRepositoryDocId());
executor.execute(r);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
*/
public abstract class AnnotatorWorker implements Runnable {

private static final Logger logger = LoggerFactory.getLogger(AnnotatorWorker.class);
private static final Logger LOGGER = LoggerFactory.getLogger(AnnotatorWorker.class);
protected MongoFileManager mm = null;
protected BiblioObject biblioObject = null;
protected String annotationsCollection;
Expand All @@ -29,10 +29,10 @@ public AnnotatorWorker(MongoFileManager mongoManager,
@Override
public void run() {
long startTime = System.nanoTime();
logger.info("\t\t " + Thread.currentThread().getName() + " Start. Processing = "+biblioObject.getRepositoryDocId());
LOGGER.info("\t\t " + Thread.currentThread().getName() + " Start. Processing = "+biblioObject.getRepositoryDocId());
processCommand();
long endTime = System.nanoTime();
logger.info("\t\t " + Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms");
LOGGER.info("\t\t " + Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms");
}
protected abstract void processCommand() ;
protected abstract String annotateDocument() ;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
*/
public class KeyTermAnnotatorWorker extends AnnotatorWorker {

private static final Logger logger = LoggerFactory.getLogger(KeyTermAnnotatorWorker.class);
private static final Logger LOGGER = LoggerFactory.getLogger(KeyTermAnnotatorWorker.class);

public KeyTermAnnotatorWorker(MongoFileManager mongoManager,
BiblioObject biblioObject) {
Expand All @@ -36,14 +36,14 @@ protected void processCommand() {

if (inserted) {
mm.updateBiblioObjectStatus(biblioObject, Processings.KEYTERM, false);
logger.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the KeyTerm extraction and disambiguation service.");
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the KeyTerm extraction and disambiguation service.");
} else {
logger.info("\t\t " + Thread.currentThread().getName() + ": "
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": "
+ biblioObject.getRepositoryDocId() + " error occured trying to annotate Keyterms.");
}
} catch (Exception ex) {
logger.error("\t\t " + Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId());
ex.printStackTrace();
LOGGER.error("\t\t " + Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId());
LOGGER.error("Error: ", ex);
}
}

Expand Down Expand Up @@ -93,7 +93,7 @@ protected String annotateDocument() {
json.append("{} }");
}
} catch (IOException e) {
logger.error(Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId(), e);
LOGGER.error(Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId(), e);
return null;
}
return json.toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
*/
public class NerdAnnotatorWorker extends AnnotatorWorker {

private static final Logger logger = LoggerFactory.getLogger(NerdAnnotatorWorker.class);
private static final Logger LOGGER = LoggerFactory.getLogger(NerdAnnotatorWorker.class);

public NerdAnnotatorWorker(MongoFileManager mongoManager,
BiblioObject biblioObject) {
Expand All @@ -53,9 +53,9 @@ protected void processCommand() {
boolean inserted = mm.insertAnnotation(annotateDocument(), annotationsCollection);
if (inserted) {
mm.updateBiblioObjectStatus(biblioObject, Processings.NERD, false);
logger.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the NERD service.");
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the NERD service.");
} else {
logger.info("\t\t " + Thread.currentThread().getName() + ": "
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": "
+ biblioObject.getRepositoryDocId() + " error occured trying to annotate with NERD.");
}
}
Expand Down Expand Up @@ -89,7 +89,7 @@ protected String annotateDocument() {
annotateNode(metadata, true, json, null);
json.append("] }");
} catch (Exception ex) {
ex.printStackTrace();
LOGGER.error("Error: ", ex);
return null;
}

Expand Down Expand Up @@ -143,11 +143,11 @@ private boolean annotateNode(Node node,
NerdClient nerdService = new NerdClient(AnnotateProperties.getNerdHost());
jsonText = nerdService.disambiguateText(text.trim(), language).toString();
} catch (Exception ex) {
logger.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by NERD: " + text);
ex.printStackTrace();
LOGGER.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by NERD: " + text);
LOGGER.error("Error: ", ex);
}
if (jsonText == null) {
logger.error("\t\t " + Thread.currentThread().getName() + ": NERD failed annotating text : " + text);
LOGGER.error("\t\t " + Thread.currentThread().getName() + ": NERD failed annotating text : " + text);
}
if (jsonText != null) {
// resulting annotations, with the corresponding id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/
public class PDFQuantitiesAnnotatorWorker extends AnnotatorWorker {

private static final Logger logger = LoggerFactory.getLogger(PDFQuantitiesAnnotatorWorker.class);
private static final Logger LOGGER = LoggerFactory.getLogger(PDFQuantitiesAnnotatorWorker.class);

public PDFQuantitiesAnnotatorWorker(MongoFileManager mongoManager,
BiblioObject biblioObject) {
Expand All @@ -28,10 +28,10 @@ protected void processCommand() {
boolean inserted = mm.insertAnnotation(annotateDocument(), annotationsCollection);
if (inserted) {
mm.updateBiblioObjectStatus(biblioObject, Processings.PDFQUANTITIES, false);
logger.info("\t\t " + Thread.currentThread().getName() + ": "
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": "
+ biblioObject.getRepositoryDocId() + " annotated by the QUANTITIES service.");
} else {
logger.info("\t\t " + Thread.currentThread().getName() + ": "
LOGGER.info("\t\t " + Thread.currentThread().getName() + ": "
+ biblioObject.getRepositoryDocId() + " error occured trying to annotate with QUANTITIES.");
}

Expand Down Expand Up @@ -64,8 +64,8 @@ protected String annotateDocument() {
}
biblioObject.getPdf().getStream().close();
} catch (Exception ex) {
logger.error("\t\t " + Thread.currentThread().getName() + ": PDF could not be processed by the quantities extractor: ");
ex.printStackTrace();
LOGGER.error("\t\t " + Thread.currentThread().getName() + ": PDF could not be processed by the quantities extractor: ");
LOGGER.error("Error: ", ex);
return null;
}
return json.toString();
Expand Down
Loading