updated solr 6.6.6 -> 7.7.3

dropped GSA support (GSA API is still in YaCy Grid)
The 6.6.6 solr index works without migration also with 7.7.3
pull/402/head
Michael Peter Christen 4 years ago
parent c0d9a3e9a7
commit 43a9f4f574

@ -47,29 +47,10 @@
<classpathentry kind="lib" path="lib/chardet.jar"/>
<classpathentry kind="lib" path="lib/httpclient-4.5.12.jar"/>
<classpathentry kind="lib" path="lib/httpmime-4.5.12.jar"/>
<classpathentry kind="lib" path="lib/noggit-0.6.jar"/>
<classpathentry kind="lib" path="lib/stax2-api-3.1.4.jar"/>
<classpathentry kind="lib" path="lib/woodstox-core-asl-4.4.1.jar"/>
<classpathentry kind="lib" path="lib/apache-mime4j-0.6.jar"/>
<classpathentry kind="lib" path="lib/commons-math3-3.4.1.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-common-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-phonetic-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-backward-codecs-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-classification-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-codecs-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-core-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-grouping-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-highlighter-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-join-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-memory-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-misc-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-queries-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-queryparser-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-spatial-extras-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/lucene-suggest-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/solr-core-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/solr-solrj-6.6.6.jar"/>
<classpathentry kind="lib" path="lib/zookeeper-3.4.10.jar"/>
<classpathentry kind="lib" path="lib/metrics-core-3.2.2.jar"/>
<classpathentry kind="lib" path="lib/spatial4j-0.6.jar"/>
<classpathentry kind="lib" path="lib/jsoup-1.12.1.jar"/>
@ -87,6 +68,28 @@
<classpathentry kind="lib" path="lib/jetty-util-9.4.35.v20201120.jar"/>
<classpathentry kind="lib" path="lib/jetty-webapp-9.4.35.v20201120.jar"/>
<classpathentry kind="lib" path="lib/jetty-xml-9.4.35.v20201120.jar"/>
<classpathentry kind="lib" path="lib/zookeeper-3.4.14.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-common-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-phonetic-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/solr-core-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/solr-solrj-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/noggit-0.8.jar"/>
<classpathentry kind="lib" path="lib/lucene-backward-codecs-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-classification-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-codecs-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-core-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-grouping-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-highlighter-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-join-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-memory-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-misc-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-queries-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-queryparser-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-spatial-extras-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/lucene-suggest-7.7.3.jar"/>
<classpathentry kind="lib" path="lib/langdetect.jar"/>
<classpathentry kind="lib" path="lib/rrd4j-3.2.jar"/>
<classpathentry kind="lib" path="lib/commons-collections-3.2.2.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="lib" path="lib/icu4j-63.1.jar"/>
<classpathentry kind="lib" path="lib/javax.servlet-api-3.1.0.jar"/>
@ -99,12 +102,10 @@
<classpathentry kind="lib" path="lib/imageio-tiff-3.3.2.jar"/>
<classpathentry kind="lib" path="lib/imageio-bmp-3.3.2.jar"/>
<classpathentry kind="lib" path="lib/jsonic-1.3.10.jar"/>
<classpathentry kind="lib" path="lib/langdetect.jar"/>
<classpathentry kind="lib" path="lib/jwat-common-1.1.1.jar"/>
<classpathentry kind="lib" path="lib/jwat-gzip-1.1.1.jar"/>
<classpathentry kind="lib" path="lib/jwat-archive-common-1.1.1.jar"/>
<classpathentry kind="lib" path="lib/jwat-warc-1.1.1.jar"/>
<classpathentry kind="lib" path="lib/commons-collections4-4.1.jar"/>
<classpathentry kind="lib" path="lib/jaudiotagger-2.2.5.jar"/>
<classpathentry kind="lib" path="libt/hamcrest-2.2.jar"/>
<classpathentry kind="lib" path="libt/hamcrest-core-2.2.jar"/>

@ -58,7 +58,7 @@
<property name="git" location=".git"/>
<property name="defaults" location="defaults"/>
<property name="RDFaParser" location="RDFaParser"/>
<!-- pseudo default branch as fallback -->
<property name="branch" value="" />
@ -69,8 +69,8 @@
<property name="jquery" location="htroot/jquery/"/>
<property name="portalsearch" location="htroot/portalsearch/"/>
<property name="img-2" location="htroot/yacy/ui/img-2"/>
<!-- variables for installonlinux target-->
<!-- variables for installonlinux target-->
<property name="DESTDIR" value="DESTDIR"/>
<property name="PKGMANAGER" value="false"/>
<property name="RESTARTCMD" value="/etc/init.d/yacy restart"/>
@ -83,14 +83,14 @@
<target name="buildGitRevTask">
<delete file="${libbuild}/GitRevTask.jar" failonerror="false" />
<javac srcdir="${libbuild}/GitRevTask" includeantruntime="true" encoding="UTF-8">
<classpath>
<pathelement location="${libbuild}/org.eclipse.jgit-4.5.0.201609210915-r.jar" />
<!-- Next are the jgit dependencies -->
<pathelement location="${libbuild}/httpclient-4.3.6.jar" />
<pathelement location="${libbuild}/JavaEWAH-0.7.9.jar" />
<pathelement location="${libbuild}/jsch-0.1.53.jar" />
<pathelement location="${libbuild}/slf4j-api-1.7.2.jar" />
</classpath>
<classpath>
<pathelement location="${libbuild}/org.eclipse.jgit-4.5.0.201609210915-r.jar" />
<!-- Next are the jgit dependencies -->
<pathelement location="${libbuild}/httpclient-4.3.6.jar" />
<pathelement location="${libbuild}/JavaEWAH-0.7.9.jar" />
<pathelement location="${libbuild}/jsch-0.1.53.jar" />
<pathelement location="${libbuild}/slf4j-api-1.7.2.jar" />
</classpath>
</javac>
<jar destfile="${libbuild}/GitRevTask.jar" basedir="${libbuild}/GitRevTask">
<manifest>
@ -100,18 +100,18 @@
</target>
<target name="determineGitRevision" if="isGit" depends="buildGitRevTask">
<taskdef resource="GitRevTask.properties">
<classpath>
<pathelement location="${libbuild}/GitRevTask.jar" />
<pathelement location="${libbuild}/org.eclipse.jgit-4.5.0.201609210915-r.jar" />
<!-- Next are the jgit dependencies -->
<pathelement location="${libbuild}/httpclient-4.3.6.jar" />
<pathelement location="${libbuild}/JavaEWAH-0.7.9.jar" />
<pathelement location="${libbuild}/jsch-0.1.53.jar" />
<pathelement location="${libbuild}/slf4j-api-1.7.2.jar" />
</classpath>
</taskdef>
<gitRev repoPath="${yacyroot}" branchprop="branch" revprop="releaseNr" dateprop="DSTAMP" />
<taskdef resource="GitRevTask.properties">
<classpath>
<pathelement location="${libbuild}/GitRevTask.jar" />
<pathelement location="${libbuild}/org.eclipse.jgit-4.5.0.201609210915-r.jar" />
<!-- Next are the jgit dependencies -->
<pathelement location="${libbuild}/httpclient-4.3.6.jar" />
<pathelement location="${libbuild}/JavaEWAH-0.7.9.jar" />
<pathelement location="${libbuild}/jsch-0.1.53.jar" />
<pathelement location="${libbuild}/slf4j-api-1.7.2.jar" />
</classpath>
</taskdef>
<gitRev repoPath="${yacyroot}" branchprop="branch" revprop="releaseNr" dateprop="DSTAMP" />
</target>
<!-- reading the build properties from file -->
@ -128,9 +128,9 @@
<!-- YaCy Release Date -->
<filter token="REPL_DATE" value="${DSTAMP}"/>
<filter token="REPL_RELEASE" value="${stdReleaseFile}"/>
<!-- YaCy Brach -->
<echo message="YaCy Branch: ${branch}" />
<!-- YaCy Brach -->
<echo message="YaCy Branch: ${branch}" />
<!-- YaCy Release Version number -->
<echo message="YaCy Version number: ${releaseVersion}" />
@ -165,43 +165,44 @@
</fileset>
</javadoc>
</target>
<!-- define the classpath that should be used for compiling -->
<!-- when changing paths here, please also update the paths in /addon/YaCy.app/Contents/Info.plist -->
<path id="project.class.path">
<pathelement location="${build}" />
<pathelement location="${lib}/apache-mime4j-0.6.jar" />
<pathelement location="${lib}/bcmail-jdk15on-1.60.jar" />
<pathelement location="${lib}/bcprov-jdk15on-1.60.jar" />
<pathelement location="${lib}/bcpkix-jdk15on-1.60.jar" />
<pathelement location="${lib}/chardet.jar" />
<pathelement location="${lib}/common-image-3.3.2.jar" />
<pathelement location="${lib}/common-io-3.3.2.jar" />
<pathelement location="${lib}/common-lang-3.3.2.jar" />
<pathelement location="${lib}/commons-codec-1.14.jar" />
<pathelement location="${lib}/commons-compress-1.20.jar" />
<pathelement location="${lib}/xz-1.8.jar" />
<pathelement location="${lib}/commons-fileupload-1.4.jar" />
<pathelement location="${lib}/commons-io-2.7.jar" />
<pathelement location="${lib}/commons-lang-2.6.jar" />
<pathelement location="${lib}/commons-logging-1.2.jar" />
<pathelement location="${lib}/commons-math3-3.4.1.jar" />
<pathelement location="${lib}/fontbox-2.0.15.jar" />
<pathelement location="${lib}/apache-mime4j-0.6.jar" />
<pathelement location="${lib}/bcmail-jdk15on-1.60.jar" />
<pathelement location="${lib}/bcprov-jdk15on-1.60.jar" />
<pathelement location="${lib}/bcpkix-jdk15on-1.60.jar" />
<pathelement location="${lib}/chardet.jar" />
<pathelement location="${lib}/common-image-3.3.2.jar" />
<pathelement location="${lib}/common-io-3.3.2.jar" />
<pathelement location="${lib}/common-lang-3.3.2.jar" />
<pathelement location="${lib}/commons-codec-1.14.jar" />
<pathelement location="${lib}/commons-collections-3.2.2.jar" />
<pathelement location="${lib}/commons-compress-1.20.jar" />
<pathelement location="${lib}/xz-1.8.jar" />
<pathelement location="${lib}/commons-fileupload-1.4.jar" />
<pathelement location="${lib}/commons-io-2.7.jar" />
<pathelement location="${lib}/commons-lang-2.6.jar" />
<pathelement location="${lib}/commons-logging-1.2.jar" />
<pathelement location="${lib}/commons-math3-3.4.1.jar" />
<pathelement location="${lib}/fontbox-2.0.15.jar" />
<pathelement location="${lib}/guava-18.0.jar" />
<pathelement location="${lib}/httpclient-4.5.12.jar" />
<pathelement location="${lib}/httpcore-4.4.13.jar" />
<pathelement location="${lib}/httpmime-4.5.12.jar" />
<pathelement location="${lib}/icu4j-63.1.jar" />
<pathelement location="${lib}/imageio-bmp-3.3.2.jar" />
<pathelement location="${lib}/imageio-core-3.3.2.jar" />
<pathelement location="${lib}/imageio-metadata-3.3.2.jar" />
<pathelement location="${lib}/imageio-tiff-3.3.2.jar" />
<pathelement location="${lib}/imageio-bmp-3.3.2.jar" />
<pathelement location="${lib}/imageio-core-3.3.2.jar" />
<pathelement location="${lib}/imageio-metadata-3.3.2.jar" />
<pathelement location="${lib}/imageio-tiff-3.3.2.jar" />
<pathelement location="${lib}/J7Zip-modified.jar" />
<pathelement location="${lib}/jakarta-oro-2.0.8.jar" />
<pathelement location="${lib}/jaudiotagger-2.2.5.jar" />
<pathelement location="${lib}/jaudiotagger-2.2.5.jar" />
<pathelement location="${lib}/javax.servlet-api-3.1.0.jar" />
<pathelement location="${lib}/jcifs-1.3.17.jar" />
<pathelement location="${lib}/jcl-over-slf4j-1.7.25.jar" />
<pathelement location="${lib}/jcifs-1.3.17.jar" />
<pathelement location="${lib}/jcl-over-slf4j-1.7.25.jar" />
<pathelement location="${lib}/jetty-client-9.4.35.v20201120.jar" />
<pathelement location="${lib}/jetty-continuation-9.4.35.v20201120.jar" />
<pathelement location="${lib}/jetty-deploy-9.4.35.v20201120.jar" />
@ -216,53 +217,55 @@
<pathelement location="${lib}/jetty-util-9.4.35.v20201120.jar" />
<pathelement location="${lib}/jetty-webapp-9.4.35.v20201120.jar" />
<pathelement location="${lib}/jetty-xml-9.4.35.v20201120.jar" />
<pathelement location="${lib}/jsch-0.1.54.jar" />
<pathelement location="${lib}/json-simple-1.1.1.jar" />
<pathelement location="${lib}/jsonic-1.3.10.jar" />
<pathelement location="${lib}/jsoup-1.12.1.jar" />
<pathelement location="${lib}/jsch-0.1.54.jar" />
<pathelement location="${lib}/json-simple-1.1.1.jar" />
<pathelement location="${lib}/jsonic-1.3.10.jar" />
<pathelement location="${lib}/jsoup-1.12.1.jar" />
<pathelement location="${lib}/jwat-archive-common-1.1.1.jar" />
<pathelement location="${lib}/jwat-common-1.1.1.jar" />
<pathelement location="${lib}/jwat-gzip-1.1.1.jar" />
<pathelement location="${lib}/jwat-warc-1.1.1.jar" />
<pathelement location="${lib}/log4j-over-slf4j-1.7.25.jar" />
<pathelement location="${lib}/lucene-analyzers-common-6.6.6.jar" />
<pathelement location="${lib}/lucene-analyzers-phonetic-6.6.6.jar" />
<pathelement location="${lib}/lucene-backward-codecs-6.6.6.jar" />
<pathelement location="${lib}/lucene-classification-6.6.6.jar" />
<pathelement location="${lib}/lucene-codecs-6.6.6.jar" />
<pathelement location="${lib}/lucene-core-6.6.6.jar" />
<pathelement location="${lib}/lucene-grouping-6.6.6.jar" />
<pathelement location="${lib}/lucene-highlighter-6.6.6.jar" />
<pathelement location="${lib}/lucene-join-6.6.6.jar" />
<pathelement location="${lib}/lucene-memory-6.6.6.jar" />
<pathelement location="${lib}/lucene-misc-6.6.6.jar" />
<pathelement location="${lib}/lucene-queries-6.6.6.jar" />
<pathelement location="${lib}/lucene-queryparser-6.6.6.jar" />
<pathelement location="${lib}/lucene-spatial-extras-6.6.6.jar" />
<pathelement location="${lib}/lucene-suggest-6.6.6.jar" />
<pathelement location="${lib}/metadata-extractor-2.11.0.jar" />
<pathelement location="${lib}/metrics-core-3.2.2.jar" />
<pathelement location="${lib}/noggit-0.6.jar" />
<pathelement location="${lib}/org.restlet.jar" />
<pathelement location="${lib}/langdetect.jar" />
<pathelement location="${lib}/log4j-over-slf4j-1.7.25.jar" />
<pathelement location="${lib}/lucene-analyzers-common-7.7.3.jar" />
<pathelement location="${lib}/lucene-analyzers-phonetic-7.7.3.jar" />
<pathelement location="${lib}/lucene-backward-codecs-7.7.3.jar" />
<pathelement location="${lib}/lucene-classification-7.7.3.jar" />
<pathelement location="${lib}/lucene-codecs-7.7.3.jar" />
<pathelement location="${lib}/lucene-core-7.7.3.jar" />
<pathelement location="${lib}/lucene-grouping-7.7.3.jar" />
<pathelement location="${lib}/lucene-highlighter-7.7.3.jar" />
<pathelement location="${lib}/lucene-join-7.7.3.jar" />
<pathelement location="${lib}/lucene-memory-7.7.3.jar" />
<pathelement location="${lib}/lucene-misc-7.7.3.jar" />
<pathelement location="${lib}/lucene-queries-7.7.3.jar" />
<pathelement location="${lib}/lucene-queryparser-7.7.3.jar" />
<pathelement location="${lib}/lucene-spatial-extras-7.7.3.jar" />
<pathelement location="${lib}/lucene-suggest-7.7.3.jar" />
<pathelement location="${lib}/metadata-extractor-2.11.0.jar" />
<pathelement location="${lib}/metrics-core-3.2.2.jar" />
<pathelement location="${lib}/noggit-0.8.jar" />
<pathelement location="${lib}/org.restlet.jar" />
<pathelement location="${lib}/langdetect.jar" />
<pathelement location="${lib}/pdfbox-2.0.15.jar" />
<pathelement location="${lib}/poi-3.17.jar" />
<pathelement location="${lib}/poi-scratchpad-3.17.jar" />
<pathelement location="${lib}/poi-3.17.jar" />
<pathelement location="${lib}/poi-scratchpad-3.17.jar" />
<pathelement location="${lib}/rrd4j-3.2.jar" />
<pathelement location="${lib}/slf4j-api-1.7.25.jar" />
<pathelement location="${lib}/slf4j-jdk14-1.7.25.jar" />
<pathelement location="${lib}/solr-core-6.6.6.jar" />
<pathelement location="${lib}/solr-solrj-6.6.6.jar" />
<pathelement location="${lib}/spatial4j-0.6.jar" />
<pathelement location="${lib}/solr-core-7.7.3.jar" />
<pathelement location="${lib}/solr-solrj-7.7.3.jar" />
<pathelement location="${lib}/spatial4j-0.6.jar" />
<pathelement location="${lib}/stax2-api_3.1.4.jar" />
<pathelement location="${lib}/weupnp-0.1.4.jar" />
<pathelement location="${lib}/woodstox-core-asl-4.4.1.jar" />
<pathelement location="${lib}/xml-apis.jar" />
<pathelement location="${lib}/weupnp-0.1.4.jar" />
<pathelement location="${lib}/woodstox-core-asl-4.4.1.jar" />
<pathelement location="${lib}/xml-apis.jar" />
<pathelement location="${lib}/xmpcore-5.1.3.jar" />
<pathelement location="${lib}/zookeeper-3.4.10.jar" />
<pathelement location="${lib}/xz-1.8.jar" />
<pathelement location="${lib}/zookeeper-3.4.14.jar" />
</path>
<target name="compile-core" depends="init" description="compile YaCy core">
<!-- compile yacyBuildProperties.java -->
<javac srcdir="${build}" destdir="${build}" sourcepath="${src}"
debug="true" debuglevel="lines,vars,source" includeantruntime="false"
@ -281,7 +284,7 @@
<classpath refid="project.class.path" />
<compilerarg value="-Xlint"/>
</javac>
<!-- prepare classpath for MANIFEST (manifest cp is relative to jar) -->
<path id="project.classpath.runtime">
<fileset dir="./lib">
@ -293,7 +296,7 @@
<flattenmapper/>
</pathconvert>
<!-- make the jacycore jar -->
<!-- make the jacycore jar -->
<jar destfile="${lib}/yacycore.jar" basedir="${build}/">
<manifest>
<attribute name="Main-Class" value="net.yacy.yacy"/>
@ -302,11 +305,11 @@
</jar>
</target>
<target name="compile" depends="compile-core" description="compile YaCy core and YaCy servlets">
<!-- compile htroot, htroot/yacy and htroot/htdocsdefault -->
<javac srcdir="${htroot}/"
excludes="processing/**"
excludes="processing/**"
source="${javacSource}" target="${javacTarget}"
debug="true" debuglevel="lines,vars,source" includeantruntime="false" encoding="UTF-8">
<classpath refid="project.class.path" />
@ -317,19 +320,19 @@
<target name="all" depends="compile">
</target>
<target name="copyMain4Dist" depends="compile">
<!-- copy all libs -->
<copy todir="${release_main}/lib">
<fileset dir="${lib}"
includes="**/*"/>
</copy>
<!-- copy build libs -->
<copy todir="${release_main}/libbuild">
<fileset dir="${libbuild}"
includes="**/*"
excludes="**/target/**"/>
</copy>
<target name="copyMain4Dist" depends="compile">
<!-- copy all libs -->
<copy todir="${release_main}/lib">
<fileset dir="${lib}"
includes="**/*"/>
</copy>
<!-- copy build libs -->
<copy todir="${release_main}/libbuild">
<fileset dir="${libbuild}"
includes="**/*"
excludes="**/target/**"/>
</copy>
<!-- copy configuration files -->
<copy todir="${release_main}">
@ -341,13 +344,13 @@
</fileset>
</copy>
<!-- copy defaults -->
<!-- copy defaults -->
<copy todir="${release_main}/defaults">
<fileset dir="${defaults}"
includes="**/*"/>
</copy>
<!-- copy Unix wrappers -->
<!-- copy Unix wrappers -->
<copy todir="${release_main}">
<fileset dir=".">
<include name="startYACY.sh"/>
@ -367,7 +370,7 @@
<include name="startYACY.bat"/>
<include name="startYACY_debug.bat"/>
<include name="stopYACY.bat"/>
<include name="getWin32MaxHeap.bat"/>
<include name="getWin32MaxHeap.bat"/>
<include name="installYaCyWindowsService.bat" />
<include name="uninstallYaCyWindowsService.bat" />
</fileset>
@ -400,9 +403,9 @@
<include name="readme.txt"/>
<include name="gpl.txt"/>
<include name="ChangeLog"/>
<include name="AUTHORS"/>
<include name="COPYRIGHT"/>
<include name="NOTICE"/>
<include name="AUTHORS"/>
<include name="COPYRIGHT"/>
<include name="NOTICE"/>
</fileset>
</copy>
@ -434,7 +437,7 @@
<include name="build.properties"/>
</fileset>
</copy>
<!-- copy searchtest -->
<copy todir="${release_main}/bin">
<fileset dir="bin">
@ -447,7 +450,7 @@
<!-- run unittests-->
<target name="compileTest" depends="compile" description="run unittests">
<javac srcdir="${test}" destdir="${test}"
<javac srcdir="${test}" destdir="${test}"
debug="true" debuglevel="lines,vars,source"
source="${javacSource}" target="${javacTarget}" encoding="UTF-8">
<classpath>
@ -465,14 +468,14 @@
<formatter type="plain"/>
<batchtest>
<fileset dir="${test}">
<include name="**/*Test*.java"/>
<include name="**/*Test*.java"/>
</fileset>
</batchtest>
<classpath>
<pathelement location="${test}"/>
<pathelement location="${build}"/>
<pathelement location="${htroot}"/>
<fileset dir="${libt}" includes="**/*.jar" />
<fileset dir="${libt}" includes="**/*.jar" />
<fileset dir="${lib}" includes="**/*.jar" />
</classpath>
</junit>
@ -487,8 +490,8 @@
<!-- packing all files into a gzipped tar -->
<tarfileset dir="${release_main}" prefix="${releaseFileParentDir}/" dirmode="${accessRightsDir}" mode="${accessRightsExecutable}">
<include name="**/*.sh"/>
<include name="**/*.command"/>
<include name="**/*.sh"/>
<include name="**/*.command"/>
</tarfileset>
<tarfileset dir="${release_main}" prefix="${releaseFileParentDir}/" dirmode="${accessRightsDir}" mode="${accessRightsFile}" >
<include name="**/*"/>
@ -501,22 +504,22 @@
<delete dir="${release_windows}" failonerror="false" />
<delete dir="${release}/SPECS" failonerror="false" />
</target>
<!-- packing YaCy search widget for static hosting -->
<target name="portalsearch" description="pack YaCy search widget for static hosting">
<tar destfile="${release}/yacy-portalsearch.tar.gz" compression="gzip" defaultexcludes="yes" longfile="fail">
<tarfileset dir="${portalsearch}" prefix="yacy/portalsearch/">
<include name="*.*"/>
<tar destfile="${release}/yacy-portalsearch.tar.gz" compression="gzip" defaultexcludes="yes" longfile="fail">
<tarfileset dir="${portalsearch}" prefix="yacy/portalsearch/">
<include name="*.*"/>
</tarfileset>
<tarfileset dir="${jquery}" prefix="yacy/jquery/">
<include name="**/*"/>
<include name="**/*"/>
</tarfileset>
<tarfileset dir="${img-2}" prefix="yacy/ui/img-2/">
<include name="magnify.png"/>
<include name="cancel_round.png"/>
<include name="stop.png"/>
<include name="magnify.png"/>
<include name="cancel_round.png"/>
<include name="stop.png"/>
</tarfileset>
</tar>
</tar>
</target>
<!-- make clean -->
@ -554,10 +557,10 @@
</fileset>
</copy>
<!-- copy defaults -->
<copy todir="${DESTDIR}/usr/share/yacy/defaults">
<fileset dir="${defaults}" includes="**/*"/>
</copy>
<!-- copy defaults -->
<copy todir="${DESTDIR}/usr/share/yacy/defaults">
<fileset dir="${defaults}" includes="**/*"/>
</copy>
<!-- copy locales -->
<copy todir="${DESTDIR}/usr/share/yacy/locales">
@ -595,9 +598,9 @@
<fileset dir=".">
<include name="readme.txt"/>
<include name="ChangeLog"/>
<include name="AUTHORS"/>
<include name="COPYRIGHT"/>
<include name="NOTICE"/>
<include name="AUTHORS"/>
<include name="COPYRIGHT"/>
<include name="NOTICE"/>
</fileset>
</copy>
@ -620,11 +623,11 @@
<mkdir dir="${DESTDIR}/etc" />
<mkdir dir="${DESTDIR}/var/log" />
<symlink link="${DESTDIR}/usr/share/yacy/DATA"
resource="../../../var/lib/yacy"/>
resource="../../../var/lib/yacy"/>
<symlink link="${DESTDIR}/etc/yacy"
resource="../var/lib/yacy/SETTINGS"/>
resource="../var/lib/yacy/SETTINGS"/>
<symlink link="${DESTDIR}/var/log/yacy"
resource="../lib/yacy/LOG"/>
resource="../lib/yacy/LOG"/>
</target>
@ -759,35 +762,35 @@
<delete dir="${release}/RPMS" failonerror="false" />
<delete dir="${release_windows}" failonerror="false" />
</target>
<target name="distWinInstaller" depends="copyMain4Dist" description="Creates an NSIS Windows installer">
<!-- http://nsisant.sourceforge.net/ -->
<mkdir dir="${release_windows}" />
<copy file="build.nsi" tofile="${release_windows}/build.nsi" overwrite="true" filtering="true" />
<taskdef name="nsis" classname="net.sf.nsisant.Task">
<classpath location="${libbuild}/nsisant-1.2.jar"/>
<classpath location="${libbuild}/nsisant-1.2.jar"/>
</taskdef>
<nsis script="${release_windows}/build.nsi" nocd="yes" verbosity="2" />
<move file="${release_windows}/yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.exe"
tofile="${release}/yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.exe" />
<delete dir="${release_main}"/>
<delete dir="${release_windows}" failonerror="false" />
</target>
<target name="distMacApp" depends="copyMain4Dist" description="Creates a YaCy.app">
<copy todir="${release_mac}/YaCy.app">
<fileset dir="${addon}/YaCy.app"></fileset>
</copy>
<copy file="${addon}/YaCy.app/Contents/Info.plist" tofile="${release_mac}/YaCy.app/Contents/Info.plist" filtering="true" overwrite="true" />
<move file="${release_main}" tofile="${release_mac}/YaCy.app/Contents/MacOS" verbose="false" />
<!-- startYACY.sh and startYACYMacOS.sh will be the main entry points : we set permissions to make it executable files -->
<chmod file="${release_mac}/YaCy.app/Contents/MacOS/startYACYMacOS.sh" perm="755"/>
<chmod file="${release_mac}/YaCy.app/Contents/MacOS/startYACY.sh" perm="755"/>
<exec executable="hdiutil">
<arg line="create -srcfolder ${release_mac}/YaCy.app ${release}/yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.dmg"/>
</exec>
<delete dir="${release_mac}" failonerror="false" verbose="false" />
<copy todir="${release_mac}/YaCy.app">
<fileset dir="${addon}/YaCy.app"></fileset>
</copy>
<copy file="${addon}/YaCy.app/Contents/Info.plist" tofile="${release_mac}/YaCy.app/Contents/Info.plist" filtering="true" overwrite="true" />
<move file="${release_main}" tofile="${release_mac}/YaCy.app/Contents/MacOS" verbose="false" />
<!-- startYACY.sh and startYACYMacOS.sh will be the main entry points : we set permissions to make it executable files -->
<chmod file="${release_mac}/YaCy.app/Contents/MacOS/startYACYMacOS.sh" perm="755"/>
<chmod file="${release_mac}/YaCy.app/Contents/MacOS/startYACY.sh" perm="755"/>
<exec executable="hdiutil">
<arg line="create -srcfolder ${release_mac}/YaCy.app ${release}/yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.dmg"/>
</exec>
<delete dir="${release_mac}" failonerror="false" verbose="false" />
</target>
<!-- to use the deb command the following debian packages must be installed: dpkg-dev debhelper m4 fakeroot -->
@ -807,7 +810,7 @@
match="yacy \(.*\) unstable; urgency=low"
replace="yacy (*auto-git-version*) unstable; urgency=low" />
</target>
<target name="sign" depends="readBuildProperties" description="sign current release file in RELEASE/ with ${privateKeyFile}">
<fail message="There is no release file (${release}/${stdReleaseFile}) that could be signed !">
<condition>
@ -825,7 +828,7 @@
<target name="genkey" depends="readBuildProperties" description="generate a pair of keys and write it to ${privateKeyFile} and ${privateKeyFile}.pub">
<fail message="There is already a private key file (${privateKeyFile})!">
<condition>
<available file="${privateKeyFile}" type="file" />
<available file="${privateKeyFile}" type="file" />
</condition>
</fail>
<java classname="net.yacy.utils.CryptoLib" failonerror="true">

2
debian/changelog vendored

@ -1,4 +1,4 @@
yacy (1.2.9018) unstable; urgency=low
yacy (1.922.10016) unstable; urgency=low
* SVN Update

@ -50,12 +50,6 @@
<servlet-class>net.yacy.http.servlets.SolrServlet</servlet-class>
</servlet>
<!-- servlet to provide Google Search Appliance (GSA) formatted search results -->
<servlet>
<servlet-name>GSAServlet</servlet-name>
<servlet-class>net.yacy.http.servlets.GSAsearchServlet</servlet-class>
</servlet>
<!-- servlet to provide searchresults via proxy -->
<servlet>
<servlet-name>URLProxyServlet</servlet-name>
@ -74,11 +68,6 @@
<url-pattern>/solr/collection1/select</url-pattern>
<url-pattern>/solr/webgraph/select</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>GSAServlet</servlet-name>
<url-pattern>/gsa/search</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>URLProxyServlet</servlet-name>

@ -85,36 +85,7 @@
</tr>
</tbody>
</table>
<table style="border-width:0px;">
<caption><strong>Solr Resources</strong></caption>
<thead>
<tr class="TableHeader" style="vertical-align:bottom;">
<th>Class</th>
<th>Type</th>
<th>Description</th>
<th>Statistics</th>
<th>Size</th>
</tr>
</thead>
<tbody>
#{SolrList}#
<tr class="TableCellLight">
<td style="text-align:left;" class="TableCellDark">#[class]#</td>
<td style="text-align:left;" class="TableCellDark">#[type]#</td>
<td style="text-align:left;">#[description]#</td>
<td style="text-align:left;">#[statistics]#</td>
<td style="text-align:right;">#[size]#</td>
</tr>
#{/SolrList}#
</tbody>
<tfoot>
<tr class="TableCellDark">
<td colspan="5">Total Cache Count = #[SolrCacheCount]#</td>
</tr>
</tfoot>
</table>
<table style="border-width:0px;">
<caption><strong>Table RAM Index</strong></caption>
<thead>

@ -28,10 +28,6 @@
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.search.SolrCache;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.RequestHeader;
@ -50,11 +46,9 @@ public class PerformanceMemory_p {
private static final long KB = 1024;
private static final long MB = 1024 * KB;
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
// return variable that accumulates replacements
Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
prop.put("gc", "0");
@ -73,7 +67,7 @@ public class PerformanceMemory_p {
MemoryControl.setStandardStrategy(std);
}
}
prop.put("simulatedshortmemory.checked", MemoryControl.getSimulatedShortStatus() ? 1 : 0);
prop.put("useStandardmemoryStrategy.checked", env.getConfigBool("memory.standardStrategy", true) ? 1 : 0);
prop.put("memoryStrategy", MemoryControl.getStrategyName());
@ -103,37 +97,18 @@ public class PerformanceMemory_p {
prop.putNum("memoryUsedAfterInitAGC", (memoryTotalAfterInitAGC - memoryFreeAfterInitAGC) / MB);
prop.putNum("memoryUsedNow", MemoryControl.used() / MB);
final Map<String, SolrInfoMBean> solrInfoMBeans = sb.index.fulltext().getSolrInfoBeans();
final TreeMap<String, Map.Entry<String, SolrInfoMBean>> solrBeanOM = new TreeMap<String, Map.Entry<String, SolrInfoMBean>>();
int c = 0;
for (Map.Entry<String, SolrInfoMBean> sc: solrInfoMBeans.entrySet()) solrBeanOM.put(sc.getValue().getName() + "$" + sc.getKey() + "$" + c++, sc);
c = 0;
int scc = 0;
for (Map.Entry<String, SolrInfoMBean> sc: solrBeanOM.values()) {
prop.put("SolrList_" + c + "_class", sc.getValue().getName());
prop.put("SolrList_" + c + "_type", sc.getKey());
prop.put("SolrList_" + c + "_description", sc.getValue().getDescription());
prop.put("SolrList_" + c + "_statistics", sc.getValue().getStatistics() == null ? "" : sc.getValue().getStatistics().toString().replaceAll(",", ", "));
prop.put("SolrList_" + c + "_size", sc.getValue() instanceof SolrCache ? Integer.toString(((SolrCache<?,?>)sc.getValue()).size()) : "");
if (sc.getValue() instanceof SolrCache) scc++;
c++;
}
prop.put("SolrList", c);
prop.put("SolrCacheCount", scc);
// write table for Table index sizes
Iterator<String> i = Table.filenames();
String filename;
TableStatistics stats;
int p;
c = 0;
int c = 0;
long totalmem = 0;
while (i.hasNext()) {
filename = i.next();
stats = Table.memoryStats(filename);
totalmem += stats.getTotalMem();
totalmem += stats.getTotalMem();
prop.put("EcoList_" + c + "_tableIndexPath", ((p = filename.indexOf("DATA",0)) < 0) ? filename : filename.substring(p));
prop.putNum("EcoList_" + c + "_tableSize", stats.getTableSize());

@ -50,7 +50,6 @@
<li><a href="solr/select?hl=false&wt=yjson&facet=true&facet.mincount=1&facet.field=url_file_ext_s&start=0&rows=10&query=www" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Default Core / JSON</a></li>
<li><a href="solr/collection1/select?q=*:*&defType=edismax&start=0&rows=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Default Core / XML</a></li>
<li><a href="solr/webgraph/select?q=*:*&defType=edismax&start=0&rows=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Webgraph Core / XML</a></li>
<li><a href="gsa/search?q=www&num=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Google Appliance API / XML</a></li>
</ul>
</li>
<li id="header_help" class="dropdown">

@ -30,7 +30,6 @@
<li><a href="solr/select?hl=false&wt=yjson&facet=true&facet.mincount=1&facet.field=url_file_ext_s&start=0&rows=10&query=www" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Default Core / JSON</a></li>
<li><a href="solr/collection1/select?q=*:*&defType=edismax&start=0&rows=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Default Core / XML</a></li>
<li><a href="solr/webgraph/select?q=*:*&defType=edismax&start=0&rows=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Solr Webgraph Core / XML</a></li>
<li><a href="gsa/search?q=www&num=3" target="_blank"><i>API</i>&nbsp;&nbsp;&nbsp;Google Appliance API / XML</a></li>
</ul>
</li>
<li id="header_help" class="dropdown">

Binary file not shown.

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -53,7 +53,6 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.handler.component.SearchHandler;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase;
@ -122,10 +121,6 @@ public class EmbeddedSolrConnector extends SolrServerConnector implements SolrCo
return 0;
}
public Map<String, SolrInfoMBean> getSolrInfoBeans() {
return this.core.getInfoRegistry();
}
@Override
public void clearCaches() {
SolrConfig solrConfig = this.core.getSolrConfig();
@ -141,10 +136,6 @@ public class EmbeddedSolrConnector extends SolrServerConnector implements SolrCo
@SuppressWarnings("unchecked")
SolrCache<Integer, Document> documentCache = solrConfig.documentCacheConfig == null ? null : solrConfig.documentCacheConfig.newInstance();
if (documentCache != null) documentCache.clear();
for (SolrInfoMBean ib: this.core.getInfoRegistry().values()) {
// clear 'lost' caches
if (ib instanceof SolrCache) ((SolrCache<?,?>) ib).clear();
}
}
public SolrInstance getInstance() {
@ -405,7 +396,7 @@ public class EmbeddedSolrConnector extends SolrServerConnector implements SolrCo
@Override
public long getCountByQuery(String querystring) {
int numFound = 0;
long numFound = 0;
DocListSearcher docListSearcher = null;
try {
docListSearcher = new DocListSearcher(querystring, null, 0, 0, CollectionSchema.id.getSolrFieldName());

@ -58,7 +58,6 @@ import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.update.UpdateShardHandler.IdleConnectionsEvictor;
import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.protocol.HeaderFramework;
@ -97,20 +96,6 @@ public class RemoteInstance implements SolrInstance {
public static final AtomicBoolean ENABLE_SNI_EXTENSION = new AtomicBoolean(
Boolean.parseBoolean(System.getProperty("jsse.enableSNIExtension", Boolean.toString(ENABLE_SNI_EXTENSION_DEFAULT))));
/**
* Background daemon thread evicting expired idle connections from the pool.
* This may be eventually already done by the pool itself on connection request,
* but this background task helps when no request is made to the pool for a long
* time period.
*/
private static final IdleConnectionsEvictor EXPIRED_CONNECTIONS_EVICTOR = new IdleConnectionsEvictor(
CONNECTION_MANAGER, DEFAULT_CONNECTION_EVICTOR_SLEEP_TIME, TimeUnit.SECONDS,
DEFAULT_POOLED_CONNECTION_TIME_TO_LIVE, TimeUnit.SECONDS);
static {
EXPIRED_CONNECTIONS_EVICTOR.start();
}
/** A custom scheme registry allowing https connections to servers using self-signed certificate */
private static final org.apache.http.conn.scheme.SchemeRegistry SCHEME_REGISTRY = buildTrustSelfSignedSchemeRegistry();
@ -286,7 +271,7 @@ public class RemoteInstance implements SolrInstance {
params.set(HttpClientUtil.PROP_SO_TIMEOUT, this.timeout);
this.client = HttpClientUtil.createClient(params, CONNECTION_MANAGER);
this.client = HttpClientUtil.createClient(params);
if(this.client instanceof org.apache.http.impl.client.DefaultHttpClient) {
if(this.client.getParams() != null) {
/* Set the maximum time to get a connection from the shared connections pool */
@ -587,14 +572,6 @@ public class RemoteInstance implements SolrInstance {
*/
public static void closeConnectionManager() {
try {
if (EXPIRED_CONNECTIONS_EVICTOR != null) {
// Shut down the evictor thread
EXPIRED_CONNECTIONS_EVICTOR.shutdown();
try {
EXPIRED_CONNECTIONS_EVICTOR.awaitTermination(1L, TimeUnit.SECONDS);
} catch (final InterruptedException ignored) {
}
}
} finally {
if (CONNECTION_MANAGER != null) {
CONNECTION_MANAGER.shutdown();

@ -1,584 +0,0 @@
/**
* GSAResponseWriter
* Copyright 2012 by Michael Peter Christen
* First released 14.08.2012 at http://yacy.net
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package net.yacy.cora.federate.solr.responsewriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.time.DateTimeException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XML;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.ResultContext;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.SolrIndexSearcher;
import net.yacy.cora.date.ISO8601Formatter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.util.CommonPattern;
import net.yacy.http.servlets.GSAsearchServlet;
import net.yacy.peers.operation.yacyVersion;
import net.yacy.search.Switchboard;
import net.yacy.search.schema.CollectionSchema;
/**
* implementation of a GSA search result.
* example: GET /gsa/searchresult?q=chicken+teriyaki&output=xml&client=test&site=test&sort=date:D:S:d1
* for a xml reference, see https://developers.google.com/search-appliance/documentation/614/xml_reference
*/
public class GSAResponseWriter implements QueryResponseWriter, SolrjResponseWriter {
private static String YaCyVer = null;
private static final char lb = '\n';
private enum GSAToken {
CACHE_LAST_MODIFIED, // Date that the document was crawled, as specified in the Date HTTP header when the document was crawled for this index.
CRAWLDATE, // An optional element that shows the date when the page was crawled. It is shown only for pages that have been crawled within the past two days.
U, // The URL of the search result.
UE, // The URL-encoded version of the URL that is in the U parameter.
GD, // Contains the description of a KeyMatch result..
T, // The title of the search result.
RK, // Provides a ranking number used internally by the search appliance.
ENT_SOURCE, // Identifies the application ID (serial number) of the search appliance that contributes to a result. Example: <ENT_SOURCE>S5-KUB000F0ADETLA</ENT_SOURCE>
FS, // Additional details about the search result.
R, // details of an individual search result.
S, // The snippet for the search result. Query terms appear in bold in the results. Line breaks are included for proper text wrapping.
LANG, // Indicates the language of the search result. The LANG element contains a two-letter language code.
HAS; // Encapsulates special features that are included for this search result.
}
private static final char[] XML_START = (
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<GSP VER=\"3.2\">\n<!-- This is a Google Search Appliance API result, provided by YaCy. See https://developers.google.com/search-appliance/documentation/614/xml_reference -->\n").toCharArray();
private static final char[] XML_STOP = "</GSP>\n".toCharArray();
// pre-select a set of YaCy schema fields for the solr searcher which should cause a better caching
private static final CollectionSchema[] extrafields = new CollectionSchema[]{
CollectionSchema.id, CollectionSchema.sku, CollectionSchema.title, CollectionSchema.description_txt,
CollectionSchema.last_modified, CollectionSchema.load_date_dt, CollectionSchema.size_i,
CollectionSchema.language_s, CollectionSchema.collection_sxt
};
private static final Set<String> SOLR_FIELDS = new HashSet<>();
static {
SOLR_FIELDS.add(CollectionSchema.language_s.getSolrFieldName());
for (CollectionSchema field: extrafields) SOLR_FIELDS.add(field.getSolrFieldName());
}
private static class ResHead {
public long offset, numFound;
public int rows;
//public int status, QTime;
//public String df, q, wt;
//public float maxScore;
}
public static class Sort {
public String sort = null, action = null, direction = null, mode = null, format = null;
public Sort(String d) {
this.sort = d;
String[] s = CommonPattern.DOUBLEPOINT.split(d);
if (s.length < 1) return;
this.action = s[0]; // date
this.direction = s.length > 1 ? s[1] : "D"; // A or D
this.mode = s.length > 2 ? s[2] : "S"; // S, R, L
this.format = s.length > 3 ? s[3] : "d1"; // d1
}
public String toSolr() {
if (this.action != null && "date".equals(this.action)) {
return CollectionSchema.last_modified.getSolrFieldName() + " " + (("D".equals(this.direction) ? "desc" : "asc"));
}
return null;
}
}
@Override
public String getContentType(final SolrQueryRequest request, final SolrQueryResponse response) {
return CONTENT_TYPE_XML_UTF8;
}
@Override
public void init(@SuppressWarnings("rawtypes") NamedList n) {
}
@Override
public void write(final Writer writer, final SolrQueryRequest request, final SolrQueryResponse rsp) throws IOException {
final long start = System.currentTimeMillis();
final Object responseObj = rsp.getResponse();
if(responseObj instanceof ResultContext) {
/* Regular response object */
final DocList documents = ((ResultContext) responseObj).getDocList();
final Object highlightingObj = rsp.getValues().get("highlighting");
final Map<String, Collection<String>> snippets = highlightingObj instanceof NamedList
? OpensearchResponseWriter.snippetsFromHighlighting((NamedList<?>) highlightingObj)
: new HashMap<>();
// parse response header
final ResHead resHead = new ResHead();
resHead.rows = request.getParams().getInt(CommonParams.ROWS, 0);
resHead.offset = documents.offset(); // equal to 'start'
resHead.numFound = documents.matches();
//resHead.df = (String) val0.get("df");
//resHead.q = (String) val0.get("q");
//resHead.wt = (String) val0.get("wt");
//resHead.status = (Integer) responseHeader.get("status");
//resHead.QTime = (Integer) responseHeader.get("QTime");
//resHead.maxScore = response.maxScore();
// write header
writeHeader(writer, request, resHead, start);
// body introduction
writeBodyIntro(writer, request, resHead, documents.size());
writeDocs(writer, request, documents, snippets, resHead);
writer.write("</RES>"); writer.write(lb);
writer.write(XML_STOP);
} else if(responseObj instanceof SolrDocumentList) {
/*
* The response object can be a SolrDocumentList when the response is partial,
* for example when the allowed processing time has been exceeded
*/
final SolrDocumentList documents = (SolrDocumentList) responseObj;
final Object highlightingObj = rsp.getValues().get("highlighting");
final Map<String, Collection<String>> snippets = highlightingObj instanceof NamedList
? OpensearchResponseWriter.snippetsFromHighlighting((NamedList<?>) highlightingObj)
: new HashMap<>();
writeSolrDocumentList(writer, request, snippets, start, documents);
} else {
throw new IOException("Unable to process Solr response format");
}
}
@Override
public void write(Writer writer, SolrQueryRequest request, String coreName, QueryResponse rsp) throws IOException {
final long start = System.currentTimeMillis();
writeSolrDocumentList(writer, request, snippetsFromHighlighting(rsp.getHighlighting()), start,
rsp.getResults());
}
/**
* Produce snippets from Solr (they call that 'highlighting')
*
* @param sorlHighlighting highlighting from Solr
* @return a map from urlhashes to a list of snippets for that url
*/
private Map<String, Collection<String>> snippetsFromHighlighting(
final Map<String, Map<String, List<String>>> sorlHighlighting) {
final Map<String, Collection<String>> snippets = new HashMap<>();
if (sorlHighlighting == null) {
return snippets;
}
for (final Entry<String, Map<String, List<String>>> highlightingEntry : sorlHighlighting.entrySet()) {
final String urlHash = highlightingEntry.getKey();
final Map<String, List<String>> highlights = highlightingEntry.getValue();
final LinkedHashSet<String> urlSnippets = new LinkedHashSet<>();
for (final List<String> texts : highlights.values()) {
urlSnippets.addAll(texts);
}
snippets.put(urlHash, urlSnippets);
}
return snippets;
}
/**
* Append to the writer a representation of a list of Solr documents. All
* parameters are required and must not be null.
*
* @param writer an open output writer
* @param request the Solr request
* @param snippets the snippets computed from the Solr highlighting
* @param start the results start index
* @param documents the Solr documents to process
* @throws IOException when a write error occurred
*/
private void writeSolrDocumentList(final Writer writer, final SolrQueryRequest request,
final Map<String, Collection<String>> snippets, final long start, final SolrDocumentList documents)
throws IOException {
// parse response header
final ResHead resHead = new ResHead();
resHead.rows = request.getParams().getInt(CommonParams.ROWS, 0);
resHead.offset = documents.getStart();
resHead.numFound = documents.getNumFound();
// write header
writeHeader(writer, request, resHead, start);
// body introduction
writeBodyIntro(writer, request, resHead, documents.size());
writeDocs(writer, documents, snippets, resHead, request.getParams().get("originalQuery"));
writer.write("</RES>"); writer.write(lb);
writer.write(XML_STOP);
}
/**
* Append the response header to the writer. All parameters are required and
* must not be null.
*
* @param writer an open output writer
* @param request the Solr request
* @param resHead results header information
* @param startTime this writer processing start time in milliseconds since
* Epoch
* @throws IOException when a write error occurred
*/
private void writeHeader(final Writer writer, final SolrQueryRequest request, final ResHead resHead,
final long startTime) throws IOException {
final Map<Object,Object> context = request.getContext();
writer.write(XML_START);
final String query = request.getParams().get("originalQuery");
final String site = getContextString(context, "site", "");
final String sort = getContextString(context, "sort", "");
final String client = getContextString(context, "client", "");
final String ip = getContextString(context, "ip", "");
final String access = getContextString(context, "access", "");
final String entqr = getContextString(context, "entqr", "");
OpensearchResponseWriter.solitaireTag(writer, "TM", Long.toString(System.currentTimeMillis() - startTime));
OpensearchResponseWriter.solitaireTag(writer, "Q", query);
paramTag(writer, "sort", sort);
paramTag(writer, "output", "xml_no_dtd");
paramTag(writer, "ie", StandardCharsets.UTF_8.name());
paramTag(writer, "oe", StandardCharsets.UTF_8.name());
paramTag(writer, "client", client);
paramTag(writer, "q", query);
paramTag(writer, "site", site);
paramTag(writer, "start", Long.toString(resHead.offset));
paramTag(writer, "num", Integer.toString(resHead.rows));
paramTag(writer, "ip", ip);
paramTag(writer, "access", access); // p - search only public content, s - search only secure content, a - search all content, both public and secure
paramTag(writer, "entqr", entqr); // query expansion policy; (entqr=1) -- Uses only the search appliance's synonym file, (entqr=1) -- Uses only the search appliance's synonym file, (entqr=3) -- Uses both standard and local synonym files.
}
/**
* Append the response body introduction to the writer. All parameters are
* required and must not be null.
*
* @param writer an open output writer
* @param resHead results header information
* @param responseCount the number of result documents
* @throws IOException when a write error occurred
*/
private void writeBodyIntro(final Writer writer, final SolrQueryRequest request, final ResHead resHead,
final int responseCount) throws IOException {
final Map<Object,Object> context = request.getContext();
final String site = getContextString(context, "site", "");
final String sort = getContextString(context, "sort", "");
final String client = getContextString(context, "client", "");
final String access = getContextString(context, "access", "");
writer.write("<RES SN=\"" + (resHead.offset + 1) + "\" EN=\"" + (resHead.offset + responseCount) + "\">"); writer.write(lb); // The index (1-based) of the first and last search result returned in this result set.
writer.write("<M>" + resHead.numFound + "</M>"); writer.write(lb); // The estimated total number of results for the search.
writer.write("<FI/>"); writer.write(lb); // Indicates that document filtering was performed during this search.
long nextStart = resHead.offset + responseCount;
long nextNum = Math.min(resHead.numFound - nextStart, responseCount < resHead.rows ? 0 : resHead.rows);
long prevStart = resHead.offset - resHead.rows;
if (prevStart >= 0 || nextNum > 0) {
writer.write("<NB>");
if (prevStart >= 0) {
writer.write("<PU>");
XML.escapeCharData("/gsa/search?q=" + request.getParams().get(CommonParams.Q) + "&site=" + site +
"&lr=&ie=UTF-8&oe=UTF-8&output=xml_no_dtd&client=" + client + "&access=" + access +
"&sort=" + sort + "&start=" + prevStart + "&sa=N", writer); // a relative URL pointing to the NEXT results page.
writer.write("</PU>");
}
if (nextNum > 0) {
writer.write("<NU>");
XML.escapeCharData("/gsa/search?q=" + request.getParams().get(CommonParams.Q) + "&site=" + site +
"&lr=&ie=UTF-8&oe=UTF-8&output=xml_no_dtd&client=" + client + "&access=" + access +
"&sort=" + sort + "&start=" + nextStart + "&num=" + nextNum + "&sa=N", writer); // a relative URL pointing to the NEXT results page.
writer.write("</NU>");
}
writer.write("</NB>");
}
writer.write(lb);
}
/**
* Append to the writer a representation of a list of Solr documents. All
* parameters are required and must not be null.
*
* @param writer an open output writer
* @param request the Solr request
* @param documents the Solr documents to process
* @param snippets the snippets computed from the Solr highlighting
* @param resHead results header information
* @throws IOException when a write error occurred
*/
private void writeDocs(final Writer writer, final SolrQueryRequest request, final DocList documents,
final Map<String, Collection<String>> snippets, final ResHead resHead)
throws IOException {
// parse body
final String query = request.getParams().get("originalQuery");
SolrIndexSearcher searcher = request.getSearcher();
DocIterator iterator = documents.iterator();
String urlhash = null;
final int responseCount = documents.size();
for (int i = 0; i < responseCount; i++) {
int id = iterator.nextDoc();
Document doc = searcher.doc(id, SOLR_FIELDS);
List<IndexableField> fields = doc.getFields();
// pre-scan the fields to get the mime-type
String mime = "";
for (IndexableField value: fields) {
String fieldName = value.name();
if (CollectionSchema.content_type.getSolrFieldName().equals(fieldName)) {
mime = value.stringValue();
break;
}
}
// write the R header for a search result
writer.write("<R N=\"" + (resHead.offset + i + 1) + "\"" + (i == 1 ? " L=\"2\"" : "") + (mime != null && mime.length() > 0 ? " MIME=\"" + mime + "\"" : "") + ">"); writer.write(lb);
List<String> descriptions = new ArrayList<>();
List<String> collections = new ArrayList<>();
int size = 0;
boolean title_written = false; // the solr index may contain several; we take only the first which should be the visible tag in <title></title>
String title = null;
for (IndexableField value: fields) {
String fieldName = value.name();
if (CollectionSchema.language_s.getSolrFieldName().equals(fieldName)) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.LANG.name(), value.stringValue());
} else if (CollectionSchema.id.getSolrFieldName().equals(fieldName)) {
urlhash = value.stringValue();
} else if (CollectionSchema.sku.getSolrFieldName().equals(fieldName)) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.U.name(), value.stringValue());
OpensearchResponseWriter.solitaireTag(writer, GSAToken.UE.name(), value.stringValue());
} else if (CollectionSchema.title.getSolrFieldName().equals(fieldName) && !title_written) {
title = value.stringValue();
OpensearchResponseWriter.solitaireTag(writer, GSAToken.T.name(), highlight(title, query));
title_written = true;
} else if (CollectionSchema.description_txt.getSolrFieldName().equals(fieldName)) {
descriptions.add(value.stringValue());
} else if (CollectionSchema.last_modified.getSolrFieldName().equals(fieldName)) {
Date d = new Date(Long.parseLong(value.stringValue()));
writer.write("<FS NAME=\"date\" VALUE=\"" + formatGSAFS(d) + "\"/>\n");
} else if (CollectionSchema.load_date_dt.getSolrFieldName().equals(fieldName)) {
Date d = new Date(Long.parseLong(value.stringValue()));
OpensearchResponseWriter.solitaireTag(writer, GSAToken.CRAWLDATE.name(), HeaderFramework.formatRFC1123(d));
} else if (CollectionSchema.size_i.getSolrFieldName().equals(fieldName)) {
size = value.stringValue() != null && value.stringValue().length() > 0 ? Integer.parseInt(value.stringValue()) : -1;
} else if (CollectionSchema.collection_sxt.getSolrFieldName().equals(fieldName)) {
collections.add(value.stringValue());
}
}
// compute snippet from texts
Collection<String> snippet = urlhash == null ? null : snippets.get(urlhash);
OpensearchResponseWriter.removeSubsumedTitle(snippet, title);
OpensearchResponseWriter.solitaireTag(writer, GSAToken.S.name(), snippet == null || snippet.size() == 0 ? (descriptions.size() > 0 ? descriptions.get(0) : "") : OpensearchResponseWriter.getLargestSnippet(snippet));
OpensearchResponseWriter.solitaireTag(writer, GSAToken.GD.name(), descriptions.size() > 0 ? descriptions.get(0) : "");
String cols = collections.toString();
if (collections.size() > 0) OpensearchResponseWriter.solitaireTag(writer, "COLS" /*SPECIAL!*/, collections.size() > 1 ? cols.substring(1, cols.length() - 1).replaceAll(" ", "") : collections.get(0));
writer.write("<HAS><L/><C SZ=\""); writer.write(Integer.toString(size / 1024)); writer.write("k\" CID=\""); writer.write(urlhash); writer.write("\" ENC=\"UTF-8\"/></HAS>\n");
if (YaCyVer == null) YaCyVer = yacyVersion.thisVersion().getName() + "/" + Switchboard.getSwitchboard().peers.mySeed().hash;
OpensearchResponseWriter.solitaireTag(writer, GSAToken.ENT_SOURCE.name(), YaCyVer);
OpensearchResponseWriter.closeTag(writer, "R");
}
}
/**
* Append to the writer a representation of a list of Solr documents. All
* parameters are required and must not be null.
*
* @param writer an open output writer
* @param documents the Solr documents to process
* @param snippets the snippets computed from the Solr highlighting
* @param resHead results header information
* @param query the original search query
* @throws IOException when a write error occurred
*/
private void writeDocs(final Writer writer, final SolrDocumentList documents,
final Map<String, Collection<String>> snippets, final ResHead resHead, final String query)
throws IOException {
// parse body
String urlhash = null;
int i = 0;
for (final SolrDocument doc : documents) {
// pre-scan the fields to get the mime-type
final Object contentTypeObj = doc.getFirstValue(CollectionSchema.content_type.getSolrFieldName());
final String mime = contentTypeObj != null ? contentTypeObj.toString() : "";
// write the R header for a search result
writer.write("<R N=\"" + (resHead.offset + i + 1) + "\"" + (i == 1 ? " L=\"2\"" : "") + (mime != null && mime.length() > 0 ? " MIME=\"" + mime + "\"" : "") + ">"); writer.write(lb);
final List<String> descriptions = new ArrayList<>();
final List<String> collections = new ArrayList<>();
int size = 0;
String title = null;
for (final Entry<String, Object> field : doc.entrySet()) {
final String fieldName = field.getKey();
final Object value = field.getValue();
if (CollectionSchema.language_s.getSolrFieldName().equals(fieldName)) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.LANG.name(), value.toString());
} else if (CollectionSchema.id.getSolrFieldName().equals(fieldName)) {
urlhash = value.toString();
} else if (CollectionSchema.sku.getSolrFieldName().equals(fieldName)) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.U.name(), value.toString());
OpensearchResponseWriter.solitaireTag(writer, GSAToken.UE.name(), value.toString());
} else if (CollectionSchema.title.getSolrFieldName().equals(fieldName)) {
if(value instanceof Iterable) {
for(final Object titleObj : (Iterable<?>)value) {
if(titleObj != null) {
/* get only the first title */
title = titleObj.toString();
break;
}
}
} else if(value != null) {
title = value.toString();
}
if(title != null) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.T.name(), highlight(title, query));
}
} else if (CollectionSchema.description_txt.getSolrFieldName().equals(fieldName)) {
if(value instanceof Iterable) {
for(final Object descriptionObj : (Iterable<?>)value) {
if(descriptionObj != null) {
descriptions.add(descriptionObj.toString());
}
}
} else if(value != null) {
descriptions.add(value.toString());
}
} else if (CollectionSchema.last_modified.getSolrFieldName().equals(fieldName) && value instanceof Date) {
writer.write("<FS NAME=\"date\" VALUE=\"" + formatGSAFS((Date)value) + "\"/>\n");
} else if (CollectionSchema.load_date_dt.getSolrFieldName().equals(fieldName) && value instanceof Date) {
OpensearchResponseWriter.solitaireTag(writer, GSAToken.CRAWLDATE.name(), HeaderFramework.formatRFC1123((Date)value));
} else if (CollectionSchema.size_i.getSolrFieldName().equals(fieldName)) {
size = value instanceof Integer ? (Integer)value : -1;
} else if (CollectionSchema.collection_sxt.getSolrFieldName().equals(fieldName)) { // handle collection
if(value instanceof Iterable) {
for(final Object collectionObj : (Iterable<?>)value) {
if(collectionObj != null) {
collections.add(collectionObj.toString());
}
}
} else if(value != null) {
collections.add(value.toString());
}
}
}
// compute snippet from texts
Collection<String> snippet = urlhash == null ? null : snippets.get(urlhash);
OpensearchResponseWriter.removeSubsumedTitle(snippet, title);
OpensearchResponseWriter.solitaireTag(writer, GSAToken.S.name(), snippet == null || snippet.size() == 0 ? (descriptions.size() > 0 ? descriptions.get(0) : "") : OpensearchResponseWriter.getLargestSnippet(snippet));
OpensearchResponseWriter.solitaireTag(writer, GSAToken.GD.name(), descriptions.size() > 0 ? descriptions.get(0) : "");
String cols = collections.toString();
if (!collections.isEmpty()) {
OpensearchResponseWriter.solitaireTag(writer, "COLS" /*SPECIAL!*/, collections.size() > 1 ? cols.substring(1, cols.length() - 1).replaceAll(" ", "") : collections.get(0));
}
writer.write("<HAS><L/><C SZ=\""); writer.write(Integer.toString(size / 1024)); writer.write("k\" CID=\""); writer.write(urlhash); writer.write("\" ENC=\"UTF-8\"/></HAS>\n");
if (YaCyVer == null) YaCyVer = yacyVersion.thisVersion().getName() + "/" + Switchboard.getSwitchboard().peers.mySeed().hash;
OpensearchResponseWriter.solitaireTag(writer, GSAToken.ENT_SOURCE.name(), YaCyVer);
OpensearchResponseWriter.closeTag(writer, "R");
i++;
}
}
private static String getContextString(Map<Object,Object> context, String key, String dflt) {
Object v = context.get(key);
if (v == null) return dflt;
if (v instanceof String) return (String) v;
if (v instanceof String[]) {
String[] va = (String[]) v;
return va.length == 0 ? dflt : va[0];
}
return dflt;
}
public static void paramTag(final Writer writer, final String tagname, String value) throws IOException {
if (value == null || value.length() == 0) return;
writer.write("<PARAM name=\"");
writer.write(tagname);
writer.write("\" value=\"");
XML.escapeAttributeValue(value, writer);
writer.write("\" original_value=\"");
XML.escapeAttributeValue(value, writer);
writer.write("\"/>"); writer.write(lb);
}
public static String highlight(String text, String query) {
if (query != null) {
String[] q = CommonPattern.SPACE.split(CommonPattern.PLUS.matcher(query.trim().toLowerCase()).replaceAll(" "));
for (String s: q) {
int p = text.toLowerCase().indexOf(s.toLowerCase());
if (p < 0) continue;
text = text.substring(0, p) + "<b>" + text.substring(p, p + s.length()) + "</b>" + text.substring(p + s.length());
}
return text.replaceAll(Pattern.quote("</b> <b>"), " ");
}
return text;
}
/**
* Format date for GSA (short form of ISO8601 date format)
* @param date
* @return datestring "yyyy-mm-dd"
* @see ISO8601Formatter
*/
public final String formatGSAFS(final Date date) {
if (date == null) {
return "";
}
try {
return GSAsearchServlet.FORMAT_GSAFS.format(date.toInstant());
} catch (final DateTimeException e) {
return "";
}
}
}

@ -244,6 +244,7 @@ public class GrepHTMLResponseWriter implements QueryResponseWriter, SolrjRespons
private void writeApiLink(final Writer writer, final SolrParams solrParams) throws IOException {
final NamedList<Object> paramsList = solrParams.toNamedList();
paramsList.remove("wt");
@SuppressWarnings("deprecation")
String xmlquery = dqp.matcher("select?" + SolrParams.toSolrParams(paramsList).toString()).replaceAll("%22");
writer.write("<div id=\"api\"><a href=\"" + xmlquery + "\"><img src=\"../env/grafics/api.png\" width=\"60\" height=\"40\" alt=\"API\" /></a>\n");

@ -125,6 +125,7 @@ public class HTMLResponseWriter implements QueryResponseWriter, SolrjResponseWri
final NamedList<Object> paramsList = solrParams.toNamedList();
paramsList.remove("wt");
@SuppressWarnings("deprecation")
final String xmlquery = dqp.matcher("select?" + SolrParams.toSolrParams(paramsList).toString() + "&core=" + coreName).replaceAll("%22");
writer.write("<div id=\"api\"><a href=\"" + xmlquery + "\"><img src=\"../env/grafics/api.png\" width=\"60\" height=\"40\" alt=\"API\" /></a>\n");

@ -1,286 +0,0 @@
/**
* search
* Copyright 2012 by Michael Peter Christen, mc@yacy.net, Frankfurt am Main, Germany
* First released 30.10.2013 at http://yacy.net
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package net.yacy.http.servlets;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.DisMaxParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.ResultContext;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.FastWriter;
import net.yacy.cora.date.ISO8601Formatter;
import net.yacy.cora.federate.solr.Ranking;
import net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector;
import net.yacy.cora.federate.solr.responsewriter.GSAResponseWriter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.search.Switchboard;
import net.yacy.search.query.AccessTracker;
import net.yacy.search.query.QueryGoal;
import net.yacy.search.query.QueryModifier;
import net.yacy.search.query.SearchEvent;
import net.yacy.search.schema.CollectionSchema;
import net.yacy.server.serverObjects;
/**
* This is a gsa result formatter for solr search results.
* The result format is implemented according to
* https://developers.google.com/search-appliance/documentation/614/xml_reference
*/
public class GSAsearchServlet extends HttpServlet {
private static final long serialVersionUID = 7835985518515673885L;
/** GSA date formatter (short form of ISO8601 date format) */
private static final String PATTERN_GSAFS = "uuuu-MM-dd";
public static final DateTimeFormatter FORMAT_GSAFS = DateTimeFormatter.ofPattern(PATTERN_GSAFS)
.withLocale(Locale.US).withZone(ZoneId.systemDefault());
private final static GSAResponseWriter responseWriter = new GSAResponseWriter();
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
doGet(request, response);
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType(QueryResponseWriter.CONTENT_TYPE_XML_UTF8);
response.setStatus(HttpServletResponse.SC_OK);
respond(request, Switchboard.getSwitchboard(), response.getOutputStream());
}
// ------------------------------------------
/**
* from here copy of old htroot/gsa/gsasearchresult.java
* with modification to use HttpServletRequest instead of (yacy) RequestHeader
*/
private void respond(final HttpServletRequest header, final Switchboard sb, final OutputStream out) {
// remember the peer contact for peer statistics
String clientip = RequestHeader.client(header);
if (clientip == null) clientip = "<unknown>"; // read an artificial header addendum
String userAgent = header.getHeader(HeaderFramework.USER_AGENT);
if (userAgent == null) userAgent = "<unknown>";
sb.peers.peerActions.setUserAgent(clientip, userAgent);
// --- handled by Servlet securityHandler
// check if user is allowed to search (can be switched in /ConfigPortal_p.html)
boolean authenticated = header.isUserInRole(UserDB.AccessRight.ADMIN_RIGHT.toString()); //sb.adminAuthenticated(header) >= 2;
// final boolean searchAllowed = authenticated || sb.getConfigBool(SwitchboardConstants.PUBLIC_SEARCHPAGE, true);
// if (!searchAllowed) return null;
// create post
serverObjects post = new serverObjects();
post.put(CommonParams.Q, ""); post.put("num", "0");
// convert servletrequest parameter to old style serverObjects map
Map<String, String[]> map = header.getParameterMap();
Iterator<Map.Entry<String, String[]>> it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String[]> param = it.next();
post.put(param.getKey(), param.getValue()); // hint: post.put uses String[] for String value anyways
}
ConcurrentLog.info("GSA Query", post.toString());
sb.intermissionAllThreads(3000); // tell all threads to do nothing for a specific time
// rename post fields according to result style
//post.put(CommonParams.Q, post.remove("q")); // same as solr
//post.put(CommonParams.START, post.remove("start")); // same as solr
//post.put(, post.remove("client"));//required, example: myfrontend
//post.put(, post.remove("output"));//required, example: xml,xml_no_dtd
String originalQuery = post.get(CommonParams.Q, "");
post.put("originalQuery", originalQuery);
// get a solr query string
QueryGoal qg = new QueryGoal(originalQuery);
List<String> solrFQ = qg.collectionTextFilterQuery(false);
StringBuilder solrQ = qg.collectionTextQuery();
post.put("defType", "edismax");
for (String fq: solrFQ) post.add(CommonParams.FQ, fq);
post.put(CommonParams.Q, solrQ.toString());
post.put(CommonParams.ROWS, post.remove("num"));
post.put(CommonParams.ROWS, Math.min(post.getInt(CommonParams.ROWS, 10), (authenticated) ? 100000000 : 100));
// set ranking
final Ranking ranking = sb.index.fulltext().getDefaultConfiguration().getRanking(0);
final String qf = ranking.getQueryFields();
if (!qf.isEmpty()) post.put(DisMaxParams.QF, qf);
if (post.containsKey(CommonParams.SORT)) {
// if a gsa-style sort attribute is given, use this to set the solr sort attribute
GSAResponseWriter.Sort sort = new GSAResponseWriter.Sort(post.get(CommonParams.SORT, ""));
String sorts = sort.toSolr();
if (sorts == null) {
post.remove(CommonParams.SORT);
} else {
post.put(CommonParams.SORT, sorts);
}
} else {
// if no such sort attribute is given, use the ranking as configured for YaCy
String fq = ranking.getFilterQuery();
String bq = ranking.getBoostQuery();
String bf = ranking.getBoostFunction();
if (fq.length() > 0) post.put(CommonParams.FQ, fq);
if (bq.length() > 0) post.put(DisMaxParams.BQ, bq);
if (bf.length() > 0) post.put("boost", bf); // a boost function extension, see http://wiki.apache.org/solr/ExtendedDisMax#bf_.28Boost_Function.2C_additive.29
}
String daterange[] = post.remove("daterange");
if (daterange != null) {
String origfq = post.get(CommonParams.FQ);
String datefq = "";
for (String dr: daterange) {
String from_to[] = dr.endsWith("..") ? new String[]{dr.substring(0, dr.length() - 2), ""} : dr.startsWith("..") ? new String[]{"", dr.substring(2)} : dr.split("\\.\\.");
if (from_to.length != 2) continue;
Date from = this.parseGSAFS(from_to[0]);
if (from == null) from = new Date(0);
Date to = this.parseGSAFS(from_to[1]);
if (to == null) to = new Date();
to.setTime(to.getTime() + 24L * 60L * 60L * 1000L); // we add a day because the day is inclusive
String z = CollectionSchema.last_modified.getSolrFieldName() + ":[" + ISO8601Formatter.FORMATTER.format(from) + " TO " + ISO8601Formatter.FORMATTER.format(to) + "]";
datefq = datefq.length() == 0 ? z : " OR " + z;
}
if (datefq.length() > 0) post.put(CommonParams.FQ, origfq == null || origfq.length() == 0 ? datefq : "(" + origfq + ") AND (" + datefq + ")");
}
post.put(CommonParams.FL,
CollectionSchema.content_type.getSolrFieldName() + ',' +
CollectionSchema.id.getSolrFieldName() + ',' +
CollectionSchema.sku.getSolrFieldName() + ',' +
CollectionSchema.title.getSolrFieldName() + ',' +
CollectionSchema.description_txt.getSolrFieldName() + ',' +
CollectionSchema.load_date_dt.getSolrFieldName() + ',' +
CollectionSchema.last_modified.getSolrFieldName() + ',' +
CollectionSchema.size_i.getSolrFieldName());
post.put("hl", "true");
post.put("hl.q", originalQuery);
post.put("hl.fl", CollectionSchema.description_txt + "," + CollectionSchema.h4_txt.getSolrFieldName() + "," + CollectionSchema.h3_txt.getSolrFieldName() + "," + CollectionSchema.h2_txt.getSolrFieldName() + "," + CollectionSchema.h1_txt.getSolrFieldName() + "," + CollectionSchema.text_t.getSolrFieldName());
post.put("hl.alternateField", CollectionSchema.description_txt.getSolrFieldName());
post.put("hl.simple.pre", "<b>");
post.put("hl.simple.post", "</b>");
post.put("hl.fragsize", Integer.toString(SearchEvent.SNIPPET_MAX_LENGTH));
//String[] access = post.remove("access");
//String[] entqr = post.remove("entqr");
// add sites operator
String[] site = post.remove("site"); // example: col1|col2
if (site != null && site[0].length() > 0) {
String origfq = post.get(CommonParams.FQ);
String sitefq = QueryModifier.parseCollectionExpression(site[0]);
post.put(CommonParams.FQ, origfq == null || origfq.length() == 0 ? sitefq : "(" + origfq + ") AND (" + sitefq + ")");
}
// get the embedded connector
EmbeddedSolrConnector connector = sb.index.fulltext().getDefaultEmbeddedConnector();
if (connector == null) return;
// do the solr request
SolrQueryRequest req = connector.request(post.toSolrParams(null));
SolrQueryResponse response = null;
Exception e = null;
try {response = connector.query(req);} catch (final SolrException ee) {e = ee;}
if (response != null) e = response.getException();
if (e != null) {
ConcurrentLog.logException(e);
if (req != null) req.close();
SolrRequestInfo.clearRequestInfo();
return;
}
// set some context for the writer
/*
Map<Object,Object> context = req.getContext();
context.put("ip", header.get("CLIENTIP", ""));
context.put("client", "vsm_frontent");
context.put("sort", sort.sort);
context.put("site", site == null ? "" : site);
context.put("access", access == null ? "p" : access[0]);
context.put("entqr", entqr == null ? "3" : entqr[0]);
*/
// write the result directly to the output stream
Writer ow = new FastWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
try {
responseWriter.write(ow, req, response);
ow.flush();
} catch (final IOException e1) {
} finally {
req.close();
SolrRequestInfo.clearRequestInfo();
try {ow.close();} catch (final IOException e1) {}
}
// log result
Object rv = response.getValues().get("response");
int matches = 0;
if (rv != null && rv instanceof ResultContext) {
matches = ((ResultContext) rv).getDocList().matches();
} else if (rv != null && rv instanceof SolrDocumentList) {
matches = (int) ((SolrDocumentList) rv).getNumFound();
}
AccessTracker.addToDump(originalQuery, matches);
ConcurrentLog.info("GSA Query", "results: " + matches + ", for query:" + post.toString());
}
/**
* Parse GSA date string (short form of ISO8601 date format)
* @param datestring
* @return date or null
* @see ISO8601Formatter
*/
public final Date parseGSAFS(final String datestring) {
try {
return Date
.from(LocalDate.parse(datestring, FORMAT_GSAFS).atStartOfDay(ZoneId.systemDefault()).toInstant());
} catch (final RuntimeException e) {
return null;
}
}
}

@ -42,7 +42,6 @@ import net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector;
import net.yacy.cora.federate.solr.connector.SolrConnector;
import net.yacy.cora.federate.solr.responsewriter.EmbeddedSolrResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.EnhancedXMLResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.GSAResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.GrepHTMLResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.HTMLResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.OpensearchResponseWriter;
@ -86,7 +85,6 @@ import org.apache.solr.search.DocList;
import org.apache.solr.servlet.SolrRequestParsers;
import org.apache.solr.servlet.cache.HttpCacheHeaderUtil;
import org.apache.solr.servlet.cache.Method;
import org.apache.solr.util.FastWriter;
/*
* taken from the Solr 3.6.0 code, which is now deprecated;
@ -95,7 +93,7 @@ import org.apache.solr.util.FastWriter;
*/
public class SolrSelectServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
public final Map<String, QueryResponseWriter> RESPONSE_WRITER = new HashMap<String, QueryResponseWriter>();
/**
@ -117,7 +115,6 @@ public class SolrSelectServlet extends HttpServlet {
RESPONSE_WRITER.put("rss", opensearchResponseWriter); //try http://localhost:8090/solr/select?wt=rss&q=olympia&hl=true&hl.fl=text_t,h1,h2
RESPONSE_WRITER.put("opensearch", opensearchResponseWriter); //try http://localhost:8090/solr/select?wt=rss&q=olympia&hl=true&hl.fl=text_t,h1,h2
RESPONSE_WRITER.put("yjson", new YJsonResponseWriter()); //try http://localhost:8090/solr/select?wt=yjson&q=olympia&hl=true&hl.fl=text_t,h1,h2
RESPONSE_WRITER.put("gsa", new GSAResponseWriter());
}
@Override
@ -128,7 +125,7 @@ public class SolrSelectServlet extends HttpServlet {
SolrQueryRequest req = null;
final Method reqMethod = Method.getMethod(hrequest.getMethod());
Writer out = null;
try {
// prepare request to solr
@ -137,16 +134,16 @@ public class SolrSelectServlet extends HttpServlet {
Switchboard sb = Switchboard.getSwitchboard();
// TODO: isUserInRole needs a login to jetty container (not done automatically on admin from localhost)
boolean authenticated = hrequest.isUserInRole(UserDB.AccessRight.ADMIN_RIGHT.toString());
// count remote searches if this was part of a p2p search
if (mmsp.getMap().containsKey("partitions")) {
final int partitions = mmsp.getInt("partitions", 30);
sb.searchQueriesGlobal += 1.0f / partitions; // increase query counter
}
// get the ranking profile id
int profileNr = mmsp.getInt("profileNr", 0);
// rename post fields according to result style
String querystring = "";
if (!mmsp.getMap().containsKey(CommonParams.Q) && mmsp.getMap().containsKey(CommonParams.QUERY)) {
@ -158,7 +155,7 @@ public class SolrSelectServlet extends HttpServlet {
QueryGoal qg = new QueryGoal(querystring);
StringBuilder solrQ = qg.collectionTextQuery();
mmsp.getMap().put(CommonParams.Q, new String[]{solrQ.toString()}); // sru patch
// experimental p2p enrichment if flag to do so is set
/*
final String p2pQuery = querystring;
@ -185,7 +182,7 @@ public class SolrSelectServlet extends HttpServlet {
mmsp.getMap().put(CommonParams.ROWS, new String[]{Integer.toString(maximumRecords)}); // sru patch
}
mmsp.getMap().put(CommonParams.ROWS, new String[]{Integer.toString(Math.min(mmsp.getInt(CommonParams.ROWS, CommonParams.ROWS_DEFAULT), (authenticated) ? 100000000 : 100))});
// set ranking according to profile number if ranking attributes are not given in the request
Ranking ranking = sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr);
if (!mmsp.getMap().containsKey(CommonParams.SORT) && !mmsp.getMap().containsKey(DisMaxParams.BQ) && !mmsp.getMap().containsKey(DisMaxParams.BF) && !mmsp.getMap().containsKey("boost")) {
@ -210,7 +207,7 @@ public class SolrSelectServlet extends HttpServlet {
"") : sb.getConfig(SwitchboardConstants.GREETING, "");
((OpensearchResponseWriter) responseWriter).setTitle(promoteSearchPageGreeting);
}
// if this is a call to YaCys special search formats, enhance the query with field assignments
if ((responseWriter instanceof YJsonResponseWriter || responseWriter instanceof OpensearchResponseWriter) && "true".equals(mmsp.get("hl", "true"))) {
// add options for snippet generation
@ -262,10 +259,10 @@ public class SolrSelectServlet extends HttpServlet {
final SolrQueryResponse rsp;
if (connector instanceof EmbeddedSolrConnector) {
req = ((EmbeddedSolrConnector) connector).request(mmsp);
/* Add the servlet request URI to the context for eventual computation of relative paths in writers */
req.getContext().put("requestURI", requestURI);
rsp = ((EmbeddedSolrConnector) connector).query(req);
// prepare response
@ -278,22 +275,21 @@ public class SolrSelectServlet extends HttpServlet {
sendError(hresponse, rsp.getException());
return;
}
final Object responseObj = rsp.getResponse();
if(responseObj instanceof ResultContext) {
/* Regular response object */
final DocList r = ((ResultContext) responseObj).getDocList();
/* Regular response object */
final DocList r = ((ResultContext) responseObj).getDocList();
AccessTracker.addToDump(querystring, r.matches(), new Date(), "sq");
} else if(responseObj instanceof SolrDocumentList){
/*
* The response object can be a SolrDocumentList when the response is partial,
* for example when the allowed processing time has been exceeded
*/
final SolrDocumentList r = (SolrDocumentList) responseObj;
/*
* The response object can be a SolrDocumentList when the response is partial,
* for example when the allowed processing time has been exceeded
*/
final SolrDocumentList r = (SolrDocumentList) responseObj;
AccessTracker.addToDump(querystring, r.getNumFound(), new Date(), "sq");
}
// write response header
final String contentType = responseWriter.getContentType(req, rsp);
if (null != contentType) response.setContentType(contentType);
@ -306,78 +302,78 @@ public class SolrSelectServlet extends HttpServlet {
if (responseWriter instanceof BinaryResponseWriter) {
((BinaryResponseWriter) responseWriter).write(response.getOutputStream(), req, rsp);
} else {
out = new FastWriter(new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8));
responseWriter.write(out, req, rsp);
out.flush();
OutputStreamWriter osw = new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8);
responseWriter.write(osw, req, rsp);
osw.close();
}
} else {
if (responseWriter instanceof EmbeddedSolrResponseWriter || responseWriter instanceof CSVResponseWriter
|| responseWriter instanceof XSLTResponseWriter || responseWriter instanceof RawResponseWriter) {
/* These writers need a non null req.getSearcher(), req.getSchema() and/or req.getCore() */
throw new ServletException("The writer " + responseWriter.getClass().getSimpleName() + " can only process responses from an embedded Solr server.");
if (responseWriter instanceof EmbeddedSolrResponseWriter || responseWriter instanceof CSVResponseWriter
|| responseWriter instanceof XSLTResponseWriter || responseWriter instanceof RawResponseWriter) {
/* These writers need a non null req.getSearcher(), req.getSchema() and/or req.getCore() */
throw new ServletException("The writer " + responseWriter.getClass().getSimpleName() + " can only process responses from an embedded Solr server.");
}
QueryResponse queryRsp = connector.getResponseByParams(ModifiableSolrParams.of(mmsp));
QueryResponse queryRsp = connector.getResponseByParams(ModifiableSolrParams.of(mmsp));
/* Create SolrQueryRequestBase and SolrQueryResponse instances as these types are requited by Solr standard writers.
* WARNING : the SolrQueryRequestBase instance will return null for the getSearcher(), getCore() and getSchema() functions.
* Be sure thath the responseWriter instance can handle this properly. */
req = new SolrQueryRequestBase(null, mmsp) {};
/* Add the servlet request URI to the context for eventual computation of relative paths in writers */
req.getContext().put("requestURI", requestURI);
rsp = new SolrQueryResponse();
req = new SolrQueryRequestBase(null, mmsp) {};
/* Add the servlet request URI to the context for eventual computation of relative paths in writers */
req.getContext().put("requestURI", requestURI);
rsp = new SolrQueryResponse();
rsp.setHttpCaching(false);
rsp.setAllValues(queryRsp.getResponse());
if(!mmsp.getBool(CommonParams.OMIT_HEADER, false)) {
NamedList<Object> responseHeader = rsp.getResponseHeader();
if (responseHeader == null) {
/* The remote Solr provided no response header ? Not likely to happen but let's add one */
responseHeader = new SimpleOrderedMap<Object>();
responseHeader.add("params", mmsp.toNamedList());
rsp.addResponseHeader(responseHeader);
} else {
final int paramsIndex = responseHeader.indexOf("params", 0);
if (paramsIndex >= 0) {
/* Write this Solr servlet initial params to the response header and not the params sent to the remote Solr that differ a little (notably the wt param) */
responseHeader.setVal(paramsIndex, mmsp.toNamedList());
} else {
responseHeader.add("params", mmsp.toNamedList());
}
}
NamedList<Object> responseHeader = rsp.getResponseHeader();
if (responseHeader == null) {
/* The remote Solr provided no response header ? Not likely to happen but let's add one */
responseHeader = new SimpleOrderedMap<Object>();
responseHeader.add("params", mmsp.toNamedList());
rsp.addResponseHeader(responseHeader);
} else {
final int paramsIndex = responseHeader.indexOf("params", 0);
if (paramsIndex >= 0) {
/* Write this Solr servlet initial params to the response header and not the params sent to the remote Solr that differ a little (notably the wt param) */
responseHeader.setVal(paramsIndex, mmsp.toNamedList());
} else {
responseHeader.add("params", mmsp.toNamedList());
}
}
}
// prepare response
hresponse.setHeader("Cache-Control", "no-cache, no-store");
final SolrDocumentList documentsList = queryRsp.getResults();
long numFound = documentsList.getNumFound();
AccessTracker.addToDump(querystring, numFound, new Date(), "sq");
// write response header
final String contentType = responseWriter.getContentType(req, rsp);
if (null != contentType) {
response.setContentType(contentType);
response.setContentType(contentType);
}
if (Method.HEAD == reqMethod) {
return;
}
// write response body
if (responseWriter instanceof SolrjResponseWriter) {
out = new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8);
((SolrjResponseWriter) responseWriter).write(out, req,
defaultConnector ? CollectionSchema.CORE_NAME : WebgraphSchema.CORE_NAME, queryRsp);
out = new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8);
((SolrjResponseWriter) responseWriter).write(out, req,
defaultConnector ? CollectionSchema.CORE_NAME : WebgraphSchema.CORE_NAME, queryRsp);
} else if(responseWriter instanceof BinaryResponseWriter) {
((BinaryResponseWriter) responseWriter).write(response.getOutputStream(), req, rsp);
} else {
out = new FastWriter(new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8));
responseWriter.write(out, req, rsp);
out.flush();
}
((BinaryResponseWriter) responseWriter).write(response.getOutputStream(), req, rsp);
} else {
OutputStreamWriter osw = new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8);
responseWriter.write(osw, req, rsp);
osw.close();
}
}
} catch (final Throwable ex) {
sendError(hresponse, ex);
@ -387,12 +383,12 @@ public class SolrSelectServlet extends HttpServlet {
}
SolrRequestInfo.clearRequestInfo();
if (out != null) {
try {
out.close();
} catch (final IOException e1) {
ConcurrentLog.info("SolrSelect", "Could not close output writer."
+ (e1.getMessage() != null ? "Cause : " + e1.getMessage() : ""));
}
try {
out.close();
} catch (final IOException e1) {
ConcurrentLog.info("SolrSelect", "Could not close output writer."
+ (e1.getMessage() != null ? "Cause : " + e1.getMessage() : ""));
}
}
}
}

@ -84,7 +84,6 @@ import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.lucene.util.Version;
public final class Fulltext {
@ -236,12 +235,6 @@ public final class Fulltext {
}
}
public Map<String, SolrInfoMBean> getSolrInfoBeans() {
EmbeddedSolrConnector esc = this.solrInstances.getDefaultEmbeddedConnector();
if (esc == null) return new HashMap<String, SolrInfoMBean>();
return esc.getSolrInfoBeans();
}
public int bufferSize() {
return this.solrInstances.bufferSize();
}

Loading…
Cancel
Save