Correctly exit if error in scraping instead of endless loop
This commit is contained in:
parent
2b035b6975
commit
b4d37fbc3f
|
@ -19,7 +19,7 @@ public class DocumentReceiver {
|
|||
// use default android user agent
|
||||
String user_agent = "Mozilla/5.0 (X11; Linux x86_64)";
|
||||
|
||||
Log.d("scraperLog", "DocumentReceiver: "+url);
|
||||
Log.d("scraperLog", "DocumentReceiver: " + url);
|
||||
|
||||
Connection connection = Jsoup.connect(url).userAgent(user_agent).followRedirects(true);
|
||||
|
||||
|
@ -27,7 +27,7 @@ public class DocumentReceiver {
|
|||
|
||||
document = response.parse();
|
||||
|
||||
Log.d("scraperLog", "Document title: "+document.title());
|
||||
Log.d("scraperLog", "Document title: " + document.title());
|
||||
|
||||
try {
|
||||
// accept cookies needed?
|
||||
|
|
|
@ -99,9 +99,11 @@ public class FbPageScraper extends AsyncTask<Void, Void, Void> {
|
|||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
this.error = R.string.error_connection;
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
this.error = R.string.error_unknown;
|
||||
return null;
|
||||
}
|
||||
} while (url != null);
|
||||
|
||||
|
|
Loading…
Reference in New Issue