# Created by Yifan @ 2008/11/3 # Sorry! You will see no welcome descriptions except this because we want to limit the total output bytes. # Section A: Version 1.0 standard # Please add disallows in this section! # DO NOT add allows in this section, since "allow" is only allowed in robots.txt version 2.0. User-agent: * Disallow: /web/vcodeimg.aspx # verify code imaging Disallow: /plugin/doc/docEdit.aspx # user comment edit page Disallow: /~/plugin/doc/docEdit.aspx # user comment edit page # Section B: Non standard Crawl-delay: 30 Sitemap: http://www.widemax.com.cn/web/sitemapsxml.aspx?sid=10041228 # this is automatically generated by robots.aspx # Section C: Version 2.0 standard # Please check the following url: # http://www.conman.org/people/spc/robots2.html#format.directives.robot-version # You can add allows here! eg: # Allow: / # Request-rate Hints: # Request-rate for the specific time, h=hour, m=minute, omitted=second Request-rate: 1/1h 0030-0329 # 0830-1129, Beijing, zoom +8 Request-rate: 3/1h 0330-0529 # 1130-1329, Beijing, zoom +8 Request-rate: 1/1h 0530-1029 # 1330-1829, Beijing, zoom +8 Request-rate: 1/6m 1030-1329 # 1830-2129, Beijing, zoom +8 Request-rate: 1/1m 1330-0029 # 2130-0829 +1 day, Beijing, zoom +8 # End of file