From 9fce8bf2a56b2d9f35209aa56fb7c132552eedc7 Mon Sep 17 00:00:00 2001 From: Michael Peter Christen Date: Wed, 24 Dec 2014 12:23:59 +0100 Subject: [PATCH] crawling of multi-page pdfs with artificial post part on smb or ftp shares is not possible with the disabled setting; this is not temporary disabled until a better solution is on the hand. --- htroot/js/IndexCreate.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/htroot/js/IndexCreate.js b/htroot/js/IndexCreate.js index bdb367d96..ff6f9c14a 100644 --- a/htroot/js/IndexCreate.js +++ b/htroot/js/IndexCreate.js @@ -77,7 +77,7 @@ function loadInfos() { document.getElementById("ajax").setAttribute("src",AJAX_ON); url=document.getElementById("crawlingURL").value; - if (url.indexOf("ftp") == 0 || url.indexOf("smb") == 0) document.getElementById("crawlingQ").disabled=true; else document.getElementById("crawlingQ").disabled=false; + //if (url.indexOf("ftp") == 0 || url.indexOf("smb") == 0) document.getElementById("crawlingQ").disabled=true; else document.getElementById("crawlingQ").disabled=false; sndReq('/api/getpageinfo_p.xml?actions=title,robots&url='+url); document.getElementById("api").innerHTML = "APISee the page info about the start url."; }