mirror of
https://github.com/terribleplan/next.js.git
synced 2024-01-19 02:48:18 +00:00
Update example: with-sitemap-and-robots-express-server (#4579)
I simplified the example by removing `.eslintrc.js` and related packages, as well as `nodemon`. I also added a description in the README to address the question by @kachkaev in the original pull request (#4163).
This commit is contained in:
parent
d4a54b6122
commit
64f3720e7f
|
@ -1,42 +0,0 @@
|
|||
module.exports = {
|
||||
parser: "babel-eslint",
|
||||
extends: "airbnb",
|
||||
env: {
|
||||
browser: true,
|
||||
jest: true
|
||||
},
|
||||
plugins: ["react", "jsx-a11y", "import"],
|
||||
rules: {
|
||||
"max-len": ["error", 100],
|
||||
semi: ["error", "never"],
|
||||
quotes: ["error", "single"],
|
||||
"comma-dangle": ["error", "never"],
|
||||
"space-before-function-paren": ["error", "always"],
|
||||
"no-underscore-dangle": ["error", { allow: ["_id"] }],
|
||||
"prefer-destructuring": [
|
||||
"error",
|
||||
{
|
||||
VariableDeclarator: {
|
||||
array: false,
|
||||
object: true
|
||||
},
|
||||
AssignmentExpression: {
|
||||
array: true,
|
||||
object: false
|
||||
}
|
||||
},
|
||||
{
|
||||
enforceForRenamedProperties: false
|
||||
}
|
||||
],
|
||||
"import/prefer-default-export": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"react/react-in-jsx-scope": "off",
|
||||
"react/jsx-filename-extension": [
|
||||
"error",
|
||||
{
|
||||
extensions: [".js"]
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
|
@ -27,10 +27,10 @@ Install it and run:
|
|||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
npm run start
|
||||
# or
|
||||
yarn
|
||||
yarn dev
|
||||
yarn start
|
||||
```
|
||||
|
||||
Deploy it to the cloud with [now](https://zeit.co/now) ([download](https://zeit.co/download))
|
||||
|
@ -48,7 +48,8 @@ The app is deployed at: https://sitemap-robots.now.sh. Open the page and click t
|
|||
|
||||
Notes:
|
||||
- routes `/a` and `/b` are added to sitemap manually
|
||||
- routes that start with `/posts` are added automatically to sitemap; in a real application, you will get post slugs from a database
|
||||
- routes that start with `/posts` are added automatically to sitemap; the current example creates an array of posts (see `server/posts.js`), but in a production-level web app, you would want to update `sitemap.xml` dynamically by getting posts from a database:
|
||||
- see [this app](https://github.com/builderbook/builderbook/blob/5f33772b8896d646cff89493853f34e61de6179a/server/sitemapAndRobots.js#L11) in which posts are fetched from a database
|
||||
|
||||
When you start this example locally:
|
||||
- your app with run at https://localhost:8000
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
"scripts": {
|
||||
"build": "next build",
|
||||
"start": "node server/app.js",
|
||||
"dev": "nodemon server/app.js --watch server",
|
||||
"now": "now && now alias"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -14,13 +13,5 @@
|
|||
"react": "^16.2.0",
|
||||
"react-dom": "^16.2.0",
|
||||
"sitemap": "^1.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^4.15.0",
|
||||
"eslint-config-airbnb": "^16.1.0",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.0.3",
|
||||
"eslint-plugin-react": "^7.5.1",
|
||||
"nodemon": "^1.14.11"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,25 +1,20 @@
|
|||
/* eslint-disable */
|
||||
import React from 'react'
|
||||
import Head from 'next/head'
|
||||
|
||||
function Index () {
|
||||
return (
|
||||
<div style={{ padding: '10px 45px' }}>
|
||||
<Head>
|
||||
<title>Index page</title>
|
||||
<meta name='description' content='description for indexing bots' />
|
||||
</Head>
|
||||
<p>
|
||||
<a href='/sitemap.xml' target='_blank'>
|
||||
Sitemap
|
||||
</a>
|
||||
<br />
|
||||
<a href='/robots.txt' target='_blank'>
|
||||
Robots
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default Index
|
||||
export default () => (
|
||||
<div style={{ padding: '10px 45px' }}>
|
||||
<Head>
|
||||
<title>Index page</title>
|
||||
<meta name='description' content='description for indexing bots' />
|
||||
</Head>
|
||||
<p>
|
||||
<a href='/sitemap.xml' target='_blank'>
|
||||
Sitemap
|
||||
</a>
|
||||
<br />
|
||||
<a href='/robots.txt' target='_blank'>
|
||||
Robots
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue