# The FULL URL to the DSpace sitemaps # XML sitemap is listed first as it is preferred by most search engines # Make sure to replace "[dspace.url]" with the value of your 'dspace.url' setting in your dspace.cfg file. Sitemap: https://researchspace.auckland.ac.nz/sitemap Sitemap: https://researchspace.auckland.ac.nz/htmlmap ########################## # Default Access Group # (NOTE: blank lines are not allowable in a group record) ########################## User-agent: * # Disable access to Discovery search and filters Disallow: /discover Disallow: /*discover* Disallow: /search-filter Disallow: /*search-filter* Disallow: /handle/*/browse* Disallow: /*stats* Disallow: /*/*.pdf.txt* Disallow: /*/license.txt* Disallow: /statistics User-agent: * Crawl-delay: 180 Disallow: /advanced-search Disallow: /community-list Disallow: /controlledvocabulary Disallow: /dspace-oai Disallow: /feedback Disallow: /feed Disallow: /forgot Disallow: /internal-error Disallow: /ldap-login Disallow: /metadata Disallow: /mydspace Disallow: /openURL Disallow: /password-login Disallow: /profile Disallow: /register Disallow: /retrieve Disallow: /search Disallow: /simple-search Disallow: /shibboleth-login Disallow: /statistics Disallow: /subscribe Disallow: /subject-search Disallow: /suggest Disallow: /view-workspaceitem Disallow: /workspace Disallow: /xmlui Disallow: /contact Disallow: /login User-agent: SemrushBot Disallow: / User-agent: Baiduspider Disallow: / User-agent: PetalBot Disallow: / User-agent: Lucidworks-Anda/2.0 Disallow: /browse Disallow: /discover Disallow: /*discover* Disallow: /search-filter Disallow: /*search-filter* Disallow: /handle/*/browse* Disallow: /*browse* Disallow: /feed Disallow: /statistics Disallow: /dstats Disallow: /stats Disallow: /forgot Disallow: /login Disallow: /password-login Disallow: /ldap-login Disallow: /shibboleth-login Disallow: /register Disallow: /search Disallow: /deposit Disallow: /profile Disallow: /mydspace Disallow: /subscribe Disallow: /feedback Disallow: /metadata Disallow: /simple-search Disallow: /advanced-search Disallow: /contact-us Disallow: /dspace-oai Disallow: /*stats* Disallow: /*/*.pdf.txt* Disallow: /*/license.txt* Disallow: /bitstream User-agent: gsa-crawler Disallow: /discover Disallow: /*discover* Disallow: /search-filter Disallow: /*search-filter* Disallow: /feed Disallow: /statistics Disallow: /stats Disallow: /forgot Disallow: /login Disallow: /password-login Disallow: /ldap-login Disallow: /shibboleth-login Disallow: /register Disallow: /search Disallow: /deposit Disallow: /profile Disallow: /mydspace Disallow: /subscribe Disallow: /feedback Disallow: /metadata Disallow: /simple-search Disallow: /advanced-search Disallow: /contact-us Disallow: /dspace-oai Disallow: /*stats* Disallow: /*/*.pdf.txt* Disallow: /*/license.txt* Disallow: /bitstream ############################## # from Wikipedia's robots.txt ############################## # Funnelback crawler at 225.168.15.103.squiz.net (103.15.168.225) User-agent: RPT-HTTPClient Disallow: / # advertising-related bots: User-agent: Mediapartners-Google* Disallow: / # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Misbehaving: requests much too fast: User-agent: fast Disallow: / # recursive wget, User-agent: wget Disallow: / # The 'grub' distributed client has been *very* poorly behaved. User-agent: grub-client Disallow: / # Doesn't follow robots.txt anyway, but... User-agent: k2spider Disallow: / # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / # ecommerce / pagerank bot User-agent: dotbot Disallow: / # https://www.semrush.com/bot/ User-agent: SemrushBot Disallow: / User-agent: SemrushBot-SA Disallow: / User-agent: MauiBot Disallow: /