git://wamblee.org
/
utils
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Programs for which there was already an attempt to record them will no
[utils]
/
crawler
/
kiss
/
src
/
org
/
wamblee
/
crawler
/
kiss
/
main
/
KissCrawler.java
diff --git
a/crawler/kiss/src/org/wamblee/crawler/kiss/main/KissCrawler.java
b/crawler/kiss/src/org/wamblee/crawler/kiss/main/KissCrawler.java
index b2f6064a06a524394e808f6c0957ade3a35384c9..c522131d8bb8b1ddb49f9040bdc4b6463dfe1d9d 100644
(file)
--- a/
crawler/kiss/src/org/wamblee/crawler/kiss/main/KissCrawler.java
+++ b/
crawler/kiss/src/org/wamblee/crawler/kiss/main/KissCrawler.java
@@
-19,10
+19,8
@@
package org.wamblee.crawler.kiss.main;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
@@
-57,11
+55,6
@@
public class KissCrawler {
private static final Log LOG = LogFactory.getLog(KissCrawler.class);
private static final Log LOG = LogFactory.getLog(KissCrawler.class);
- /**
- * Log file name for the crawler.
- */
- private static final String LOG_FILE = "kiss.log";
-
/**
* Start URL of the electronic programme guide.
*/
/**
* Start URL of the electronic programme guide.
*/
@@
-121,14
+114,11
@@
public class KissCrawler {
_pattern = Pattern.compile(TIME_REGEX);
_pattern = Pattern.compile(TIME_REGEX);
- FileOutputStream fos = new FileOutputStream(new File(LOG_FILE));
- PrintStream os = new PrintStream(fos);
-
try {
HttpClient client = new HttpClient();
// client.getHostConfiguration().setProxy("127.0.0.1", 3128);
try {
HttpClient client = new HttpClient();
// client.getHostConfiguration().setProxy("127.0.0.1", 3128);
- Crawler crawler = createCrawler(aCrawlerConfig,
os,
client);
+ Crawler crawler = createCrawler(aCrawlerConfig, client);
InputStream programConfigFile = new FileInputStream(new File(
aProgramConfig));
ProgramConfigurationParser parser = new ProgramConfigurationParser();
InputStream programConfigFile = new FileInputStream(new File(
aProgramConfig));
ProgramConfigurationParser parser = new ProgramConfigurationParser();
@@
-141,9
+131,7
@@
public class KissCrawler {
guide.accept(printer);
processResults(programFilters, guide, parser.getNotifier());
} finally {
guide.accept(printer);
processResults(programFilters, guide, parser.getNotifier());
} finally {
- os.flush();
- os.close();
- System.out.println("Output written on '" + LOG_FILE + "'");
+ System.out.println("Crawler finished");
}
}
}
}
@@
-188,9
+176,9
@@
public class KissCrawler {
* @throws FileNotFoundException
* In case configuration files cannot be found.
*/
* @throws FileNotFoundException
* In case configuration files cannot be found.
*/
- private Crawler createCrawler(String aCrawlerConfig,
PrintStream aOs,
+ private Crawler createCrawler(String aCrawlerConfig,
HttpClient aClient) throws FileNotFoundException {
HttpClient aClient) throws FileNotFoundException {
- ConfigurationParser parser = new ConfigurationParser(
aOs
);
+ ConfigurationParser parser = new ConfigurationParser();
InputStream crawlerConfigFile = new FileInputStream(new File(
aCrawlerConfig));
Configuration config = parser.parse(crawlerConfigFile);
InputStream crawlerConfigFile = new FileInputStream(new File(
aCrawlerConfig));
Configuration config = parser.parse(crawlerConfigFile);