Commit c626d5ea authored by Arkadiusz Hiler's avatar Arkadiusz Hiler

Add robots.txt

Disallow all crawling.
Signed-off-by: default avatarArkadiusz Hiler <arkadiusz.hiler@intel.com>
parent 69391d0d
Pipeline #87601 failed with stage
in 1 minute and 12 seconds
User-agent: *
Disallow: /
...@@ -24,6 +24,7 @@ from django.contrib import admin ...@@ -24,6 +24,7 @@ from django.contrib import admin
from django.contrib.auth import views as auth_views from django.contrib.auth import views as auth_views
from rest_framework_nested import routers from rest_framework_nested import routers
from patchwork.views.series import SeriesListView, SeriesView from patchwork.views.series import SeriesListView, SeriesView
from django.views.generic import TemplateView
import patchwork.views.api as api import patchwork.views.api as api
import patchwork.views import patchwork.views
import patchwork.views.bundle import patchwork.views.bundle
...@@ -77,6 +78,8 @@ patch_results_router.register(r'test-results', api.PatchResultViewSet, ...@@ -77,6 +78,8 @@ patch_results_router.register(r'test-results', api.PatchResultViewSet,
admin.autodiscover() admin.autodiscover()
urlpatterns = [ urlpatterns = [
path('robots.txt', TemplateView.as_view(template_name="robots.txt", content_type="text/plain"), name="robots_file"),
path('admin/', admin.site.urls), path('admin/', admin.site.urls),
# API # API
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment