mirror of
https://github.com/akaessens/NoFbEventScraper
synced 2025-02-16 11:31:15 +01:00
prepare event page scraping, better error/result handling
This commit is contained in:
parent
2750ad86e8
commit
a30756a873
221
app/src/main/java/com/akdev/nofbeventscraper/FbEventScraper.java
Normal file
221
app/src/main/java/com/akdev/nofbeventscraper/FbEventScraper.java
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
package com.akdev.nofbeventscraper;
|
||||||
|
|
||||||
|
import android.content.SharedPreferences;
|
||||||
|
import android.os.AsyncTask;
|
||||||
|
|
||||||
|
import androidx.preference.PreferenceManager;
|
||||||
|
|
||||||
|
import org.json.JSONException;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import org.jsoup.Jsoup;
|
||||||
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.lang.ref.WeakReference;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import static com.akdev.nofbeventscraper.FbEvent.createEventList;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class can asynchronously scrape public facebook events
|
||||||
|
* and gather the most important information. It is stored in a FbEvent object.
|
||||||
|
*/
|
||||||
|
public class FbEventScraper extends AsyncTask<Void, Void, Void> {
|
||||||
|
|
||||||
|
private FbScraper scraper;
|
||||||
|
private int error;
|
||||||
|
private String url;
|
||||||
|
private FbEvent event;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor with WeakReference to the main activity, to update it's text fields.
|
||||||
|
*
|
||||||
|
* @param scraper Reference to FbScraper
|
||||||
|
* @param input_url Input url to scrape from
|
||||||
|
*/
|
||||||
|
FbEventScraper(FbScraper scraper, String input_url) {
|
||||||
|
|
||||||
|
this.scraper = scraper;
|
||||||
|
this.url = input_url;
|
||||||
|
this.error = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Strips the event location from the json string.
|
||||||
|
* This can be a name only or a complete postal address.
|
||||||
|
*
|
||||||
|
* @param location_json JSON formatted string
|
||||||
|
* @return String representation of the location.
|
||||||
|
*/
|
||||||
|
protected String fixLocation(String location_json) {
|
||||||
|
|
||||||
|
String location_name = "";
|
||||||
|
|
||||||
|
try {
|
||||||
|
JSONObject reader = new JSONObject(location_json);
|
||||||
|
|
||||||
|
location_name = reader.getString("name");
|
||||||
|
JSONObject address = reader.getJSONObject("address");
|
||||||
|
|
||||||
|
String type = address.getString("@type");
|
||||||
|
|
||||||
|
if (type.equals("PostalAddress")) {
|
||||||
|
String postal_code = address.getString("postalCode");
|
||||||
|
String address_locality = address.getString("addressLocality");
|
||||||
|
String street_address = address.getString("streetAddress");
|
||||||
|
// included in locality
|
||||||
|
//String address_country = address.getString("addressCountry");
|
||||||
|
|
||||||
|
return location_name + ", "
|
||||||
|
+ street_address + ", "
|
||||||
|
+ postal_code + " "
|
||||||
|
+ address_locality;
|
||||||
|
} else {
|
||||||
|
return location_name;
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (JSONException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return location_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a time string from the facebook event into a Date
|
||||||
|
*
|
||||||
|
* @param time_in time string from the event
|
||||||
|
* @return Date parsed from input or null
|
||||||
|
*/
|
||||||
|
protected Date parseToDate(String time_in) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
// parse e.g. 2011-12-03T10:15:30+0100
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.getDefault());
|
||||||
|
|
||||||
|
return sdf.parse(time_in);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces all occurrences of a facebook internal links in
|
||||||
|
* an event description into an actual URL.
|
||||||
|
*
|
||||||
|
* @param description_in description string from the event
|
||||||
|
* @return corrected String with internal links resolved
|
||||||
|
*/
|
||||||
|
protected String fixDescriptionLinks(String description_in) {
|
||||||
|
try {
|
||||||
|
/* @[152580919265:274:SiteDescription]
|
||||||
|
* to
|
||||||
|
* SiteDescription [m.facebook.com/152580919265] */
|
||||||
|
|
||||||
|
return description_in.replaceAll("@\\[([0-9]{10,}):[0-9]{3}:([^]]*)]",
|
||||||
|
"$2 [m.facebook.com/$1]");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return description_in;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read a single field from a JSONObject
|
||||||
|
*
|
||||||
|
* @param reader JSONObject to read from
|
||||||
|
* @param field Which field to read
|
||||||
|
* @return String of the value of the field or empty string
|
||||||
|
*/
|
||||||
|
private String readFromJson(JSONObject reader, String field) {
|
||||||
|
try {
|
||||||
|
return reader.getString(field);
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Started by execute().
|
||||||
|
* Gets the HTML doc from the input string and scrapes the event information from it.
|
||||||
|
*
|
||||||
|
* @param voids
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected Void doInBackground(Void... voids) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
// use default android user agent
|
||||||
|
String user_agent = "Mozilla/5.0 (X11; Linux x86_64)";
|
||||||
|
Document document = Jsoup.connect(url).userAgent(user_agent).get();
|
||||||
|
|
||||||
|
if (document == null) {
|
||||||
|
}
|
||||||
|
String json = document
|
||||||
|
.select("script[type = application/ld+json]")
|
||||||
|
.first().data();
|
||||||
|
|
||||||
|
JSONObject reader = new JSONObject(json);
|
||||||
|
|
||||||
|
|
||||||
|
String name = readFromJson(reader, "name");
|
||||||
|
Date start_date = parseToDate(readFromJson(reader, "startDate"));
|
||||||
|
Date end_date = parseToDate(readFromJson(reader, "endDate"));
|
||||||
|
String description = fixDescriptionLinks(readFromJson(reader, "description"));
|
||||||
|
String location = fixLocation(readFromJson(reader, "location"));
|
||||||
|
|
||||||
|
String image_url = readFromJson(reader, "image"); // get from json
|
||||||
|
|
||||||
|
try {
|
||||||
|
// possibly get higher res image from event header
|
||||||
|
image_url = document.select("div[id=event_header_primary]")
|
||||||
|
.select("img").first().attr("src");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
this.event = new FbEvent(url, name, start_date, end_date, description, location, image_url);
|
||||||
|
|
||||||
|
} catch (JSONException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
this.error = R.string.error_scraping;
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
this.error = R.string.error_connection;
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
this.error = R.string.error_unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onPreExecute() {
|
||||||
|
super.onPreExecute();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When scraping is finished, the scraper callback will receive the Event.
|
||||||
|
* @param aVoid
|
||||||
|
*/
|
||||||
|
protected void onPostExecute(Void aVoid) {
|
||||||
|
super.onPostExecute(aVoid);
|
||||||
|
|
||||||
|
this.scraper.scrapeEventResultCallback(this.event, this.error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,42 +1,29 @@
|
|||||||
package com.akdev.nofbeventscraper;
|
package com.akdev.nofbeventscraper;
|
||||||
|
|
||||||
import android.content.SharedPreferences;
|
import android.content.SharedPreferences;
|
||||||
import android.os.AsyncTask;
|
|
||||||
|
|
||||||
import androidx.preference.PreferenceManager;
|
import androidx.preference.PreferenceManager;
|
||||||
|
|
||||||
import org.json.JSONException;
|
|
||||||
import org.json.JSONObject;
|
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.ref.WeakReference;
|
import java.lang.ref.WeakReference;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import static com.akdev.nofbeventscraper.FbEvent.createEventList;
|
import static com.akdev.nofbeventscraper.FbEvent.createEventList;
|
||||||
|
|
||||||
/**
|
public class FbScraper {
|
||||||
* This class can asynchronously scrape public facebook events
|
|
||||||
* and gather the most important information. It is stored in a FbEvent object.
|
|
||||||
*/
|
|
||||||
public class FbScraper extends AsyncTask<Void, Void, Void> {
|
|
||||||
|
|
||||||
|
protected List<FbEvent> events;
|
||||||
|
url_type_enum url_type = url_type_enum.EVENT;
|
||||||
private int error;
|
private int error;
|
||||||
private String input_url;
|
private String input_url;
|
||||||
private WeakReference<MainActivity> main; // no context leak with WeakReference
|
private WeakReference<MainActivity> main; // no context leak with WeakReference
|
||||||
private List<FbEvent> events;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor with WeakReference to the main activity, to update it's text fields.
|
* Constructor with WeakReference to the main activity, to add events.
|
||||||
*
|
*
|
||||||
* @param main WeakReference of main activity to prevent context leak
|
* @param main WeakReference of main activity to prevent context leak
|
||||||
* @param input_url Input url to scrape from
|
* @param input_url Input url to scrape from
|
||||||
@ -45,6 +32,12 @@ public class FbScraper extends AsyncTask<Void, Void, Void> {
|
|||||||
this.main = main;
|
this.main = main;
|
||||||
this.input_url = input_url;
|
this.input_url = input_url;
|
||||||
this.events = createEventList();
|
this.events = createEventList();
|
||||||
|
|
||||||
|
run();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected String getPageUrl(String url) throws URISyntaxException, MalformedURLException {
|
||||||
|
throw new URISyntaxException(url, "not implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -55,7 +48,7 @@ public class FbScraper extends AsyncTask<Void, Void, Void> {
|
|||||||
* @throws URISyntaxException if event not found
|
* @throws URISyntaxException if event not found
|
||||||
* @throws MalformedURLException
|
* @throws MalformedURLException
|
||||||
*/
|
*/
|
||||||
protected String fixURI(String url) throws URISyntaxException, MalformedURLException {
|
protected String getEventUrl(String url) throws URISyntaxException, MalformedURLException {
|
||||||
|
|
||||||
// check for url format
|
// check for url format
|
||||||
new URL(url).toURI();
|
new URL(url).toURI();
|
||||||
@ -76,6 +69,7 @@ public class FbScraper extends AsyncTask<Void, Void, Void> {
|
|||||||
// rewrite url to m.facebook and dismiss any query strings or referrals
|
// rewrite url to m.facebook and dismiss any query strings or referrals
|
||||||
String ret = url_prefix + matcher.group(1);
|
String ret = url_prefix + matcher.group(1);
|
||||||
if (matcher.group(2) != null) {
|
if (matcher.group(2) != null) {
|
||||||
|
// add event time identifier
|
||||||
ret += matcher.group(2);
|
ret += matcher.group(2);
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
@ -85,188 +79,68 @@ public class FbScraper extends AsyncTask<Void, Void, Void> {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
void scrapeEvent(String event_url) {
|
||||||
* Strips the event location from the json string.
|
FbEventScraper scraper = new FbEventScraper(this, event_url);
|
||||||
* This can be a name only or a complete postal address.
|
scraper.execute();
|
||||||
*
|
}
|
||||||
* @param location_json JSON formatted string
|
|
||||||
* @return String representation of the location.
|
|
||||||
*/
|
|
||||||
protected String fixLocation(String location_json) {
|
|
||||||
|
|
||||||
String location_name = "";
|
void scrapeEventResultCallback(FbEvent event, int error) {
|
||||||
|
|
||||||
try {
|
if (url_type == url_type_enum.EVENT) {
|
||||||
JSONObject reader = new JSONObject(location_json);
|
if (event != null) {
|
||||||
|
main.get().addEvent(event);
|
||||||
location_name = reader.getString("name");
|
main.get().input_helper(R.string.done, false);
|
||||||
JSONObject address = reader.getJSONObject("address");
|
|
||||||
|
|
||||||
String type = address.getString("@type");
|
|
||||||
|
|
||||||
if (type.equals("PostalAddress")) {
|
|
||||||
String postal_code = address.getString("postalCode");
|
|
||||||
String address_locality = address.getString("addressLocality");
|
|
||||||
String street_address = address.getString("streetAddress");
|
|
||||||
// included in locality
|
|
||||||
//String address_country = address.getString("addressCountry");
|
|
||||||
|
|
||||||
return location_name + ", "
|
|
||||||
+ street_address + ", "
|
|
||||||
+ postal_code + " "
|
|
||||||
+ address_locality;
|
|
||||||
} else {
|
} else {
|
||||||
return location_name;
|
main.get().input_helper(error, true);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
} catch (JSONException e) {
|
main.get().addEvent(event);
|
||||||
e.printStackTrace();
|
|
||||||
return location_name;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
void scrapePage(String page_url) {
|
||||||
* Parses a time string from the facebook event into a Date
|
/*
|
||||||
*
|
FbPageScraper scraper = new FbPageScraper(this, page_url);
|
||||||
* @param time_in time string from the event
|
|
||||||
* @return Date parsed from input or null
|
|
||||||
*/
|
|
||||||
protected Date parseToDate(String time_in) {
|
|
||||||
|
|
||||||
try {
|
scraper.execute();
|
||||||
// parse e.g. 2011-12-03T10:15:30+0100
|
*/
|
||||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.getDefault());
|
|
||||||
|
|
||||||
return sdf.parse(time_in);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
protected void scrapePageResultCallback(String[] event_urls, int error) {
|
||||||
* Replaces all occurrences of a facebook internal links in
|
|
||||||
* an event description into an actual URL.
|
|
||||||
*
|
|
||||||
* @param description_in description string from the event
|
|
||||||
* @return corrected String with internal links resolved
|
|
||||||
*/
|
|
||||||
protected String fixDescriptionLinks(String description_in) {
|
|
||||||
try {
|
|
||||||
/* @[152580919265:274:SiteDescription]
|
|
||||||
* to
|
|
||||||
* SiteDescription [m.facebook.com/152580919265] */
|
|
||||||
|
|
||||||
return description_in.replaceAll("@\\[([0-9]{10,}):[0-9]{3}:([^]]*)]",
|
if (event_urls != null) {
|
||||||
"$2 [m.facebook.com/$1]");
|
for (String event_url : event_urls) {
|
||||||
|
scrapeEvent(event_url);
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
return description_in;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read a single field from a JSONObject
|
|
||||||
*
|
|
||||||
* @param reader JSONObject to read from
|
|
||||||
* @param field Which field to read
|
|
||||||
* @return String of the value of the field or empty string
|
|
||||||
*/
|
|
||||||
private String readFromJson(JSONObject reader, String field) {
|
|
||||||
try {
|
|
||||||
return reader.getString(field);
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Started by scraper.execute().
|
|
||||||
* Gets the HTML doc from the input string and scrapes the event information from it.
|
|
||||||
*
|
|
||||||
* @param voids
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
protected Void doInBackground(Void... voids) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
String url = fixURI(input_url);
|
|
||||||
// use default android user agent
|
|
||||||
String user_agent = "Mozilla/5.0 (X11; Linux x86_64)";
|
|
||||||
Document document = Jsoup.connect(url).userAgent(user_agent).get();
|
|
||||||
|
|
||||||
if (document == null) {
|
|
||||||
}
|
}
|
||||||
String json = document
|
} else if (url_type == url_type_enum.PAGE) {
|
||||||
.select("script[type = application/ld+json]")
|
main.get().input_helper(error, true);
|
||||||
.first().data();
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JSONObject reader = new JSONObject(json);
|
void run() {
|
||||||
|
|
||||||
|
try {
|
||||||
|
String event_url = getEventUrl(input_url);
|
||||||
|
url_type = url_type_enum.EVENT;
|
||||||
|
scrapeEvent(event_url);
|
||||||
|
|
||||||
String name = readFromJson(reader, "name");
|
return;
|
||||||
Date start_date = parseToDate(readFromJson(reader, "startDate"));
|
|
||||||
Date end_date = parseToDate(readFromJson(reader, "endDate"));
|
|
||||||
String description = fixDescriptionLinks(readFromJson(reader, "description"));
|
|
||||||
String location = fixLocation(readFromJson(reader, "location"));
|
|
||||||
|
|
||||||
String image_url = readFromJson(reader, "image"); // get from json
|
|
||||||
|
|
||||||
try {
|
|
||||||
// possibly get higher res image from event header
|
|
||||||
image_url = document.select("div[id=event_header_primary]")
|
|
||||||
.select("img").first().attr("src");
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
FbEvent event = new FbEvent(url, name, start_date, end_date, description, location, image_url);
|
|
||||||
this.events.add(event);
|
|
||||||
this.events.add(new FbEvent());
|
|
||||||
|
|
||||||
} catch (URISyntaxException | MalformedURLException e) {
|
} catch (URISyntaxException | MalformedURLException e) {
|
||||||
e.printStackTrace();
|
url_type = url_type_enum.INVALID;
|
||||||
this.error = R.string.error_url;
|
|
||||||
} catch (JSONException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
this.error = R.string.error_scraping;
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
this.error = R.string.error_connection;
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
this.error = R.string.error_unknown;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
try {
|
||||||
}
|
String page_url = getPageUrl(input_url);
|
||||||
|
url_type = url_type_enum.PAGE;
|
||||||
|
scrapePage(page_url);
|
||||||
|
|
||||||
@Override
|
} catch (URISyntaxException | MalformedURLException e) {
|
||||||
protected void onPreExecute() {
|
url_type = url_type_enum.INVALID;
|
||||||
super.onPreExecute();
|
main.get().input_helper(R.string.error_url, true);
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* When scraping is finished, main activity will be updated.
|
|
||||||
* If an error occurred, main activity is given an error string.
|
|
||||||
*
|
|
||||||
* @param aVoid
|
|
||||||
*/
|
|
||||||
protected void onPostExecute(Void aVoid) {
|
|
||||||
super.onPostExecute(aVoid);
|
|
||||||
|
|
||||||
if (main != null) {
|
|
||||||
if (! this.events.isEmpty()) {
|
|
||||||
main.get().addEvents(this.events);
|
|
||||||
} else {
|
|
||||||
main.get().error(error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
enum url_type_enum {EVENT, PAGE, INVALID}
|
||||||
|
}
|
@ -158,7 +158,7 @@ public class MainActivity extends AppCompatActivity {
|
|||||||
startScraping();
|
startScraping();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
error(R.string.error_clipboard_empty);
|
input_helper(R.string.error_clipboard_empty, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -166,13 +166,16 @@ public class MainActivity extends AppCompatActivity {
|
|||||||
/*
|
/*
|
||||||
* Error in input: clear input on click
|
* Error in input: clear input on click
|
||||||
*/
|
*/
|
||||||
layout_uri_input.setErrorIconOnClickListener(new View.OnClickListener() {
|
View.OnClickListener listener = new View.OnClickListener() {
|
||||||
@Override
|
@Override
|
||||||
public void onClick(View view) {
|
public void onClick(View view) {
|
||||||
layout_uri_input.setError(null);
|
input_helper(R.string.helper_add_link, true);
|
||||||
edit_text_uri_input.setText(null);
|
edit_text_uri_input.setText(null);
|
||||||
|
input_helper(R.string.helper_add_link, false);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
layout_uri_input.setErrorIconOnClickListener(listener);
|
||||||
|
layout_uri_input.setEndIconOnClickListener(listener);
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -213,30 +216,35 @@ public class MainActivity extends AppCompatActivity {
|
|||||||
*/
|
*/
|
||||||
public void startScraping() {
|
public void startScraping() {
|
||||||
|
|
||||||
error(null);
|
input_helper(null, false);
|
||||||
|
|
||||||
String url = Objects.requireNonNull(edit_text_uri_input.getText()).toString();
|
String url = Objects.requireNonNull(edit_text_uri_input.getText()).toString();
|
||||||
|
|
||||||
scraper = new FbScraper(new WeakReference<>(this), url);
|
scraper = new FbScraper(new WeakReference<>(this), url);
|
||||||
scraper.execute();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void error(Integer resId) {
|
public void input_helper(Integer resId, boolean error) {
|
||||||
if (resId != null) {
|
|
||||||
layout_uri_input.setError(getString(resId));
|
String str = (resId != null) ? getString(resId) : " ";
|
||||||
} else {
|
|
||||||
|
if (error) {
|
||||||
|
layout_uri_input.setError(str);
|
||||||
|
}
|
||||||
|
else {
|
||||||
layout_uri_input.setError(null);
|
layout_uri_input.setError(null);
|
||||||
|
layout_uri_input.setHelperText(str);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds new events to the start of the events list.
|
* Adds new events to the start of the events list.
|
||||||
*
|
*
|
||||||
* @param new_events the list of events that was scraped by FbScraper
|
* @param new_event the event that was scraped by FbScraper
|
||||||
*/
|
*/
|
||||||
public void addEvents(List<FbEvent> new_events) {
|
public void addEvent(FbEvent new_event) {
|
||||||
|
|
||||||
if (new_events != null) {
|
if (new_event != null) {
|
||||||
this.events.addAll(0, new_events);
|
this.events.add(0, new_event);
|
||||||
this.adapter.notifyDataSetChanged();
|
this.adapter.notifyDataSetChanged();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,4 +18,5 @@
|
|||||||
<string name="preferences_events_header">Veranstaltungen</string>
|
<string name="preferences_events_header">Veranstaltungen</string>
|
||||||
<string name="preferences_event_setting">Veranstaltungsliste löschen</string>
|
<string name="preferences_event_setting">Veranstaltungsliste löschen</string>
|
||||||
<string name="preferences_event_snackbar">"Veranstaltungen gelöscht "</string>
|
<string name="preferences_event_snackbar">"Veranstaltungen gelöscht "</string>
|
||||||
|
<string name="done">Fertig</string>
|
||||||
</resources>
|
</resources>
|
@ -30,5 +30,6 @@
|
|||||||
<string name="preferences_event_setting">Clear event list</string>
|
<string name="preferences_event_setting">Clear event list</string>
|
||||||
<string name="preferences_event_snackbar">Events list cleared</string>
|
<string name="preferences_event_snackbar">Events list cleared</string>
|
||||||
<string name="event_placeholder" translatable="false">Placeholder</string>
|
<string name="event_placeholder" translatable="false">Placeholder</string>
|
||||||
|
<string name="done">Done</string>
|
||||||
|
|
||||||
</resources>
|
</resources>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user