Chilkat HOME Android™ AutoIt C C# C++ Chilkat2-Python CkPython Classic ASP DataFlex Delphi DLL Go Java Node.js Objective-C PHP Extension Perl PowerBuilder PowerShell PureBasic Ruby SQL Server Swift Tcl Unicode C Unicode C++ VB.NET VBScript Visual Basic 6.0 Visual FoxPro Xojo Plugin
(Node.js) Fetch robots.txt for a SiteThe Chilkat Spider library is robots.txt compliant. It automatically fetches a site's robots.txt file and adheres to it. It will not download pages denied by robots.txt. Pages excluded by robots.txt will not appear in the Spider's "unspidered" list. This example shows how to explicitly download and review the robots.txt for a given site.
var os = require('os'); if (os.platform() == 'win32') { var chilkat = require('@chilkat/ck-node23-win64'); } else if (os.platform() == 'linux') { if (os.arch() == 'arm') { var chilkat = require('@chilkat/ck-node23-linux-arm'); } else if (os.arch() == 'arm64') { var chilkat = require('@chilkat/ck-node23-linux-arm64'); } else { var chilkat = require('@chilkat/ck-node23-linux-x64'); } } else if (os.platform() == 'darwin') { var chilkat = require('@chilkat/ck-node23-mac-universal'); } function chilkatExample() { var spider = new chilkat.Spider(); spider.Initialize("www.chilkatsoft.com"); var robotsText; robotsText = spider.FetchRobotsText(); console.log(robotsText); } chilkatExample(); |
© 2000-2025 Chilkat Software, Inc. All Rights Reserved.