Skip to content

Commit

Permalink
Allow robots file for crawlers
Browse files Browse the repository at this point in the history
Signed-off-by: Paolo Di Tommaso <[email protected]>
  • Loading branch information
pditommaso committed Mar 3, 2025
1 parent b4df7d7 commit 1cf23c9
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class DenyCrawlerFilter implements HttpServerFilter {
Publisher<MutableHttpResponse<?>> doFilter(HttpRequest<?> request, ServerFilterChain chain) {
final userAgent = request.getHeaders().get("User-Agent")?.toLowerCase()
// Check if the request path matches any of the ignored paths
if (isCrawler(userAgent)) {
if (isCrawler(userAgent) && request.path!='/robots.txt') {
// Return immediately without processing the request
log.warn("Request denied [${request.methodName}] ${request.uri}\n- Headers:${RegHelper.dumpHeaders(request)}")
return Flux.just(HttpResponse.status(HttpStatus.METHOD_NOT_ALLOWED))
Expand Down
58 changes: 58 additions & 0 deletions src/test/groovy/io/seqera/wave/filter/DenyCrawlerFilterTest.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* Wave, containers provisioning service
* Copyright (c) 2023-2024, Seqera Labs
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/

package io.seqera.wave.filter

import spock.lang.Specification

import io.micronaut.http.HttpRequest
import io.micronaut.http.client.HttpClient
import io.micronaut.http.client.annotation.Client
import io.micronaut.http.client.exceptions.HttpClientResponseException
import io.micronaut.test.extensions.spock.annotation.MicronautTest
import jakarta.inject.Inject

/**
*
* @author Paolo Di Tommaso <[email protected]>
*/
@MicronautTest
class DenyCrawlerFilterTest extends Specification {

@Inject
@Client("/")
HttpClient client

def 'should allow robots.txt' () {
when:
def request = HttpRequest.GET("/robots.txt").header("User-Agent", "Googlebot")
def resp = client.toBlocking().exchange(request, String)
then:
resp.status.code == 200
}

def 'should disallow anything else' () {
when:
def request = HttpRequest.GET("/service-info").header("User-Agent", "Googlebot")
client.toBlocking().exchange(request, String)
then:
HttpClientResponseException e = thrown(HttpClientResponseException)
e.status.code == 405
}

}

0 comments on commit 1cf23c9

Please sign in to comment.