Commit c626d5ea authored by Arkadiusz Hiler's avatar Arkadiusz Hiler

Add robots.txt

Disallow all crawling.
Signed-off-by: default avatarArkadiusz Hiler <arkadiusz.hiler@intel.com>
parent 69391d0d
Pipeline #87601 failed with stage
in 1 minute and 12 seconds
User-agent: *
Disallow: /
......@@ -24,6 +24,7 @@ from django.contrib import admin
from django.contrib.auth import views as auth_views
from rest_framework_nested import routers
from patchwork.views.series import SeriesListView, SeriesView
from django.views.generic import TemplateView
import patchwork.views.api as api
import patchwork.views
import patchwork.views.bundle
......@@ -77,6 +78,8 @@ patch_results_router.register(r'test-results', api.PatchResultViewSet,
admin.autodiscover()
urlpatterns = [
path('robots.txt', TemplateView.as_view(template_name="robots.txt", content_type="text/plain"), name="robots_file"),
path('admin/', admin.site.urls),
# API
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment