# robots.txt for #[clientname]#.yacy User-agent: * #(all)# #{dirs}# # dirs Disallow: /#[dir]##{/dirs}# #{locked}# # locked Disallow: /#[page]##{/locked}# #(wiki)#:: # wiki Disallow: /Wiki.html#(/wiki)# #(blog)#:: # blog Disallow: /Blog.html Disallow: /Blog.rss Disallow: /Blog.xml#(/blog)# #(news)#:: # news Disallow: /News.html#(/news)# #(status)#:: # status Disallow: /Status.html#(/status)# #(network)#:: # network Disallow: /Network.html Disallow: /Network.csv Disallow: /Network.xml#(/network)# #(homepage)#:: # homepage Disallow: /www#(/homepage)# #(fileshare)#:: # fileshare Disallow: /share#(/fileshare)# #(surftips)#:: # surftips Disallow: /Surftips.html#(/surftips)# :: # all Disallow: / #(/all)#