forked from I2P_Developers/i2p.i2p
2005-09-05 jrandom
* Expose the HTTP headers to EepGet status listeners * Handle DSA key failures properly (if the signature is not invertable, it is obviously invalid) also, syndie now properly detects whether the remote archive can send a filtered export.zip by examining the HTTP headers for X-Syndie-Export-Capable: true. If the remote archive does not set that header (and neither freesites, nor apache or anything other than the ArchiveServlet will), it uses individual HTTP requests for individual blog posts and metadata fetches.
This commit is contained in:
@@ -224,4 +224,5 @@ public class NewsFetcher implements Runnable, EepGet.StatusListener {
|
||||
File temp = new File(TEMP_NEWS_FILE);
|
||||
temp.delete();
|
||||
}
|
||||
public void headerReceived(String url, int attemptNum, String key, String val) {}
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@ package net.i2p.router.web;
|
||||
import java.io.File;
|
||||
import java.text.DecimalFormat;
|
||||
|
||||
import net.i2p.I2PAppContext;
|
||||
import net.i2p.crypto.TrustedUpdate;
|
||||
import net.i2p.router.Router;
|
||||
import net.i2p.router.RouterContext;
|
||||
@@ -165,6 +166,7 @@ public class UpdateHandler {
|
||||
_status = "<b>Transfer failed</b><br />";
|
||||
System.setProperty("net.i2p.router.web.UpdateHandler.updateInProgress", "false");
|
||||
}
|
||||
public void headerReceived(String url, int attemptNum, String key, String val) {}
|
||||
}
|
||||
|
||||
private void restart() {
|
||||
|
@@ -89,9 +89,12 @@ public class ArchiveServlet extends HttpServlet {
|
||||
out.close();
|
||||
}
|
||||
|
||||
public static final String HEADER_EXPORT_CAPABLE = "X-Syndie-Export-Capable";
|
||||
|
||||
private void renderSummary(HttpServletResponse resp) throws ServletException, IOException {
|
||||
resp.setContentType("text/plain;charset=utf-8");
|
||||
//resp.setCharacterEncoding("UTF-8");
|
||||
resp.setHeader(HEADER_EXPORT_CAPABLE, "true");
|
||||
OutputStream out = resp.getOutputStream();
|
||||
ArchiveIndex index = BlogManager.instance().getArchive().getIndex();
|
||||
out.write(DataHelper.getUTF8(index.toString()));
|
||||
|
@@ -24,6 +24,7 @@ public class RemoteArchiveBean {
|
||||
private ArchiveIndex _remoteIndex;
|
||||
private List _statusMessages;
|
||||
private boolean _fetchIndexInProgress;
|
||||
private boolean _exportCapable;
|
||||
|
||||
public RemoteArchiveBean() {
|
||||
reinitialize();
|
||||
@@ -35,6 +36,7 @@ public class RemoteArchiveBean {
|
||||
_fetchIndexInProgress = false;
|
||||
_proxyHost = null;
|
||||
_proxyPort = -1;
|
||||
_exportCapable = false;
|
||||
_statusMessages = new ArrayList();
|
||||
}
|
||||
|
||||
@@ -149,31 +151,57 @@ public class RemoteArchiveBean {
|
||||
entries[i] = ((BlogURI)uris.get(i)).toString();
|
||||
}
|
||||
if ( (entries == null) || (entries.length <= 0) ) return;
|
||||
StringBuffer url = new StringBuffer(512);
|
||||
url.append(buildExportURL());
|
||||
Set meta = new HashSet();
|
||||
for (int i = 0; i < entries.length; i++) {
|
||||
BlogURI uri = new BlogURI(entries[i]);
|
||||
if (uri.getEntryId() >= 0) {
|
||||
url.append("entry=").append(uri.toString()).append('&');
|
||||
meta.add(uri.getKeyHash());
|
||||
_statusMessages.add("Scheduling blog post fetching for " + HTMLRenderer.sanitizeString(entries[i]));
|
||||
if (_exportCapable) {
|
||||
StringBuffer url = new StringBuffer(512);
|
||||
url.append(buildExportURL());
|
||||
Set meta = new HashSet();
|
||||
for (int i = 0; i < entries.length; i++) {
|
||||
BlogURI uri = new BlogURI(entries[i]);
|
||||
if (uri.getEntryId() >= 0) {
|
||||
url.append("entry=").append(uri.toString()).append('&');
|
||||
meta.add(uri.getKeyHash());
|
||||
_statusMessages.add("Scheduling bulk blog post fetch of " + HTMLRenderer.sanitizeString(entries[i]));
|
||||
}
|
||||
}
|
||||
for (Iterator iter = meta.iterator(); iter.hasNext(); ) {
|
||||
Hash blog = (Hash)iter.next();
|
||||
url.append("meta=").append(blog.toBase64()).append('&');
|
||||
_statusMessages.add("Scheduling bulk blog metadata fetch of " + blog.toBase64());
|
||||
}
|
||||
List urls = new ArrayList(1);
|
||||
urls.add(url.toString());
|
||||
List tmpFiles = new ArrayList(1);
|
||||
try {
|
||||
File tmp = File.createTempFile("fetchBulk", ".zip", BlogManager.instance().getTempDir());
|
||||
tmpFiles.add(tmp);
|
||||
fetch(urls, tmpFiles, user, new BulkFetchListener(tmp));
|
||||
} catch (IOException ioe) {
|
||||
_statusMessages.add("Internal error creating temporary file to fetch " + HTMLRenderer.sanitizeString(url.toString()) + ": " + ioe.getMessage());
|
||||
}
|
||||
} else {
|
||||
List urls = new ArrayList(entries.length+8);
|
||||
for (int i = 0; i < entries.length; i++) {
|
||||
BlogURI uri = new BlogURI(entries[i]);
|
||||
if (uri.getEntryId() >= 0) {
|
||||
String metaURL = buildMetaURL(uri.getKeyHash());
|
||||
if (!urls.contains(metaURL)) {
|
||||
urls.add(metaURL);
|
||||
_statusMessages.add("Scheduling blog metadata fetch of " + HTMLRenderer.sanitizeString(entries[i]));
|
||||
}
|
||||
urls.add(buildEntryURL(uri));
|
||||
_statusMessages.add("Scheduling blog post fetch of " + HTMLRenderer.sanitizeString(entries[i]));
|
||||
}
|
||||
}
|
||||
List tmpFiles = new ArrayList(1);
|
||||
try {
|
||||
for (int i = 0; i < urls.size(); i++) {
|
||||
File t = File.createTempFile("fetchBulk", ".dat", BlogManager.instance().getTempDir());
|
||||
tmpFiles.add(t);
|
||||
}
|
||||
fetch(urls, tmpFiles, user, new BlogStatusListener());
|
||||
} catch (IOException ioe) {
|
||||
_statusMessages.add("Internal error creating temporary file to fetch posts: " + HTMLRenderer.sanitizeString(urls.toString()));
|
||||
}
|
||||
}
|
||||
for (Iterator iter = meta.iterator(); iter.hasNext(); ) {
|
||||
Hash blog = (Hash)iter.next();
|
||||
url.append("meta=").append(blog.toBase64()).append('&');
|
||||
_statusMessages.add("Scheduling blog metadata fetching for " + blog.toBase64());
|
||||
}
|
||||
List urls = new ArrayList(1);
|
||||
urls.add(url.toString());
|
||||
List tmpFiles = new ArrayList(1);
|
||||
try {
|
||||
File tmp = File.createTempFile("fetchBulk", ".zip", BlogManager.instance().getTempDir());
|
||||
tmpFiles.add(tmp);
|
||||
fetch(urls, tmpFiles, user, new BulkFetchListener(tmp));
|
||||
} catch (IOException ioe) {
|
||||
_statusMessages.add("Internal error creating temporary file to fetch " + HTMLRenderer.sanitizeString(url.toString()) + ": " + ioe.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,6 +261,7 @@ public class RemoteArchiveBean {
|
||||
_remoteSchema = schema;
|
||||
_proxyHost = null;
|
||||
_proxyPort = -1;
|
||||
_exportCapable = false;
|
||||
|
||||
if ( (schema == null) || (schema.trim().length() <= 0) ||
|
||||
(location == null) || (location.trim().length() <= 0) ) {
|
||||
@@ -295,6 +324,14 @@ public class RemoteArchiveBean {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " failed after " + bytesTransferred);
|
||||
_fetchIndexInProgress = false;
|
||||
}
|
||||
public void headerReceived(String url, int currentAttempt, String key, String val) {
|
||||
if (ArchiveServlet.HEADER_EXPORT_CAPABLE.equals(key) && ("true".equals(val))) {
|
||||
_statusMessages.add("Remote archive is bulk export capable");
|
||||
_exportCapable = true;
|
||||
} else {
|
||||
System.err.println("Header received: [" + key + "] = [" + val + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class MetadataStatusListener implements EepGet.StatusListener {
|
||||
@@ -305,30 +342,35 @@ public class RemoteArchiveBean {
|
||||
|
||||
public void bytesTransferred(long alreadyTransferred, int currentWrite, long bytesTransferred, long bytesRemaining, String url) {}
|
||||
public void transferComplete(long alreadyTransferred, long bytesTransferred, long bytesRemaining, String url, String outputFile) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " successful");
|
||||
File info = new File(outputFile);
|
||||
FileInputStream in = null;
|
||||
try {
|
||||
BlogInfo i = new BlogInfo();
|
||||
in = new FileInputStream(info);
|
||||
i.load(in);
|
||||
boolean ok = BlogManager.instance().getArchive().storeBlogInfo(i);
|
||||
if (ok) {
|
||||
_statusMessages.add("Blog info for " + HTMLRenderer.sanitizeString(i.getProperty(BlogInfo.NAME)) + " imported");
|
||||
BlogManager.instance().getArchive().reloadInfo();
|
||||
} else {
|
||||
_statusMessages.add("Blog info at " + HTMLRenderer.sanitizeString(url) + " was corrupt / invalid / forged");
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
} finally {
|
||||
if (in != null) try { in.close(); } catch (IOException ioe) {}
|
||||
info.delete();
|
||||
}
|
||||
handleMetadata(url, outputFile);
|
||||
}
|
||||
public void transferFailed(String url, long bytesTransferred, long bytesRemaining, int currentAttempt) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " failed after " + bytesTransferred);;
|
||||
}
|
||||
public void headerReceived(String url, int currentAttempt, String key, String val) {}
|
||||
}
|
||||
|
||||
private void handleMetadata(String url, String outputFile) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " successful");
|
||||
File info = new File(outputFile);
|
||||
FileInputStream in = null;
|
||||
try {
|
||||
BlogInfo i = new BlogInfo();
|
||||
in = new FileInputStream(info);
|
||||
i.load(in);
|
||||
boolean ok = BlogManager.instance().getArchive().storeBlogInfo(i);
|
||||
if (ok) {
|
||||
_statusMessages.add("Blog info for " + HTMLRenderer.sanitizeString(i.getProperty(BlogInfo.NAME)) + " imported");
|
||||
BlogManager.instance().getArchive().reloadInfo();
|
||||
} else {
|
||||
_statusMessages.add("Blog info at " + HTMLRenderer.sanitizeString(url) + " was corrupt / invalid / forged");
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
} finally {
|
||||
if (in != null) try { in.close(); } catch (IOException ioe) {}
|
||||
info.delete();
|
||||
}
|
||||
}
|
||||
|
||||
private class BlogStatusListener implements EepGet.StatusListener {
|
||||
@@ -339,6 +381,10 @@ public class RemoteArchiveBean {
|
||||
|
||||
public void bytesTransferred(long alreadyTransferred, int currentWrite, long bytesTransferred, long bytesRemaining, String url) {}
|
||||
public void transferComplete(long alreadyTransferred, long bytesTransferred, long bytesRemaining, String url, String outputFile) {
|
||||
if (url.endsWith(".snm")) {
|
||||
handleMetadata(url, outputFile);
|
||||
return;
|
||||
}
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " successful");
|
||||
File file = new File(outputFile);
|
||||
FileInputStream in = null;
|
||||
@@ -375,6 +421,7 @@ public class RemoteArchiveBean {
|
||||
public void transferFailed(String url, long bytesTransferred, long bytesRemaining, int currentAttempt) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " failed after " + bytesTransferred);
|
||||
}
|
||||
public void headerReceived(String url, int currentAttempt, String key, String val) {}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -455,6 +502,7 @@ public class RemoteArchiveBean {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " failed after " + bytesTransferred);
|
||||
_tmp.delete();
|
||||
}
|
||||
public void headerReceived(String url, int currentAttempt, String key, String val) {}
|
||||
}
|
||||
|
||||
public void postSelectedEntries(User user, Map parameters) {
|
||||
|
Reference in New Issue
Block a user