dirs.append(robots_url)
for line in http_response.get_body().split("\n"):
line = line.strip()
if len(line) > 0 and line[0] != "//" and \
(line.upper().find("ALLOW") == 0 or
line.upper().find("DISALLOW") == 0):
After Change
return
// Send the new knowledge to the core!
self.worker_pool.map(self.http_get_and_parse, urls)
// Save it to the kb!
desc = ("A robots.txt file was found at: "%s", this file might"
" expose private URLs and requires a manual review. The"