diff --git a/docs/configs/settings.md b/docs/configs/settings.md
index 62df7e91c..035da32cd 100644
--- a/docs/configs/settings.md
+++ b/docs/configs/settings.md
@@ -571,3 +571,18 @@ or per service widget (`services.yaml`) with:
```
If either value is set to true, the error message will be hidden.
+
+## Disable Search Engine Indexing
+
+You can request that search engines not to index your Homepage instance by enabling the `disableIndexing` setting.
+
+```yaml
+disableIndexing: true
+```
+
+When enabled, this will:
+
+- Disallow all crawlers in `robots.txt`
+- Add `` tags to prevent indexing
+
+By default this feature is disabled.
diff --git a/src/pages/index.jsx b/src/pages/index.jsx
index e1893e70a..e66a19d9e 100644
--- a/src/pages/index.jsx
+++ b/src/pages/index.jsx
@@ -400,6 +400,7 @@ function Home({ initialSettings }) {
"A highly customizable homepage (or startpage / application dashboard) with Docker and service API integrations."
}
/>
+ {settings.disableIndexing && }
{settings.base && }
{settings.favicon ? (
<>
diff --git a/src/pages/robots.txt.js b/src/pages/robots.txt.js
new file mode 100644
index 000000000..bced34f8b
--- /dev/null
+++ b/src/pages/robots.txt.js
@@ -0,0 +1,19 @@
+import { getSettings } from "utils/config/config";
+
+export async function getServerSideProps({ res }) {
+ const settings = getSettings();
+ const content = ["User-agent: *", !!settings.disableIndexing ? "Disallow: /" : "Allow: /"].join("\n");
+
+ res.setHeader("Content-Type", "text/plain");
+ res.write(content);
+ res.end();
+
+ return {
+ props: {},
+ };
+}
+
+export default function RobotsTxt() {
+ // placeholder component
+ return null;
+}