Add some logging

This commit is contained in:
akaessens 2021-08-10 12:33:46 +02:00
parent 6c00e63d1f
commit e8893fd712
2 changed files with 11 additions and 1 deletions

View File

@ -1,6 +1,7 @@
package com.akdev.nofbeventscraper; package com.akdev.nofbeventscraper;
import android.os.AsyncTask; import android.os.AsyncTask;
import android.util.Log;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
@ -143,6 +144,8 @@ public class FbEventScraper extends AsyncTask<Void, Void, Void> {
@Override @Override
protected Void doInBackground(Void... voids) { protected Void doInBackground(Void... voids) {
Log.d("scraperLog", "doInBackground: "+url);
Document document = DocumentReceiver.getDocument(url); Document document = DocumentReceiver.getDocument(url);
try { try {

View File

@ -2,6 +2,7 @@ package com.akdev.nofbeventscraper;
import android.content.SharedPreferences; import android.content.SharedPreferences;
import android.os.AsyncTask; import android.os.AsyncTask;
import android.util.Log;
import androidx.preference.PreferenceManager; import androidx.preference.PreferenceManager;
@ -165,6 +166,7 @@ public class FbScraper {
void scrapeEventResultCallback(FbEvent event, int error) { void scrapeEventResultCallback(FbEvent event, int error) {
if (event != null) { if (event != null) {
Log.d("scraperLog", "scrapeEventResultCallback: "+event.url);
main.get().addEvent(event); main.get().addEvent(event);
main.get().input_helper(main.get().getString(R.string.done), false); main.get().input_helper(main.get().getString(R.string.done), false);
} else if (url_type == url_type_enum.EVENT) { } else if (url_type == url_type_enum.EVENT) {
@ -193,10 +195,11 @@ public class FbScraper {
protected void scrapePageResultCallback(List<String> event_urls, int error) { protected void scrapePageResultCallback(List<String> event_urls, int error) {
if (event_urls.size() > 0) { if (event_urls.size() > 0) {
Log.d("scraperLog", "scrapePageResultCallback: "+event_urls.toString());
for (String event_url : event_urls) { for (String event_url : event_urls) {
try { try {
String url = getEventUrl(event_url); String url = getEventUrl(event_url);
Log.d("scraperLog", "scrapePageResultCallback: "+url);
scrapeEvent(url); scrapeEvent(url);
} catch (URISyntaxException | MalformedURLException e) { } catch (URISyntaxException | MalformedURLException e) {
// ignore this event // ignore this event
@ -210,11 +213,15 @@ public class FbScraper {
protected void redirectUrl (String url) { protected void redirectUrl (String url) {
FbRedirectionResolver resolver = new FbRedirectionResolver(this, url); FbRedirectionResolver resolver = new FbRedirectionResolver(this, url);
Log.d("scraperLog", "redirectUrl: "+url);
resolver.execute(); resolver.execute();
} }
protected void redirectionResultCallback(String url) { protected void redirectionResultCallback(String url) {
this.input_url = url; this.input_url = url;
Log.d("scraperLog", "redirectUrlCb: "+url);
// now try again with expanded url // now try again with expanded url
this.run(); this.run();
} }