fixed robots.txt

This commit is contained in:
Daniele Polencic
2019-10-13 19:45:37 +08:00
parent eb6448a438
commit b77c3a7053
2 changed files with 3 additions and 2 deletions

View File

@@ -1,4 +1,4 @@
User-agent: *
Sitemap: https://learnk8s.io/sitemap.xml
Disallow: /404/
Disallow: /404

View File

@@ -1,5 +1,5 @@
import React from 'react'
import { writeFileSync, readFileSync, existsSync } from 'fs'
import { writeFileSync, readFileSync, existsSync, copyFileSync } from 'fs'
import { resolve, extname, basename } from 'path'
import { mkdir, cp } from 'shelljs'
import { syncEvents } from './eventbrite'
@@ -416,6 +416,7 @@ run({
})(Sitemap)
writeFileSync('_site/sitemap.xml', runSiteMap(Sitemap, 'https://learnk8s.io'))
copyFileSync('robots.txt', resolve('_site', 'robots.txt'))
function injectGoogleAnalytics({ $, gaId }: { gaId: string; $: Cheerio }): Cheerio {
$.find('head').append(