luasocket/samples/check-links.lua

112 lines
3.3 KiB
Lua
Raw Permalink Normal View History

2003-03-28 13:08:50 -08:00
-----------------------------------------------------------------------------
-- Little program that checks links in HTML files, using coroutines and
-- non-blocking I/O via the dispatcher module.
-- LuaSocket sample files
-- Author: Diego Nehab
2003-03-28 13:08:50 -08:00
-----------------------------------------------------------------------------
2005-09-29 15:26:35 -07:00
local url = require("socket.url")
local dispatch = require("dispatch")
local http = require("socket.http")
dispatch.TIMEOUT = 10
-- make sure the user knows how to invoke us
arg = arg or {}
if #arg < 1 then
print("Usage:\n luasocket check-links.lua [-n] {<url>}")
exit()
end
-- '-n' means we are running in non-blocking mode
if arg[1] == "-n" then
-- if non-blocking I/O was requested, use real dispatcher interface
table.remove(arg, 1)
handler = dispatch.newhandler("coroutine")
else
-- if using blocking I/O, use fake dispatcher interface
handler = dispatch.newhandler("sequential")
end
local nthreads = 0
-- get the status of a URL using the dispatcher
function getstatus(link)
local parsed = url.parse(link, {scheme = "file"})
if parsed.scheme == "http" then
nthreads = nthreads + 1
handler:start(function()
local r, c, h, s = http.request{
method = "HEAD",
url = link,
create = handler.tcp
}
if r and c == 200 then io.write('\t', link, '\n')
else io.write('\t', link, ': ', tostring(c), '\n') end
nthreads = nthreads - 1
end)
end
end
2001-09-27 13:02:58 -07:00
function readfile(path)
path = url.unescape(path)
local file, error = io.open(path, "r")
if file then
2004-03-25 16:18:41 -08:00
local body = file:read("*a")
file:close()
2001-09-27 13:02:58 -07:00
return body
else return nil, error end
end
function load(u)
local parsed = url.parse(u, { scheme = "file" })
2004-03-25 16:18:41 -08:00
local body, headers, code, error
2004-06-04 08:15:45 -07:00
local base = u
if parsed.scheme == "http" then
2004-06-16 15:51:04 -07:00
body, code, headers = http.request(u)
if code == 200 then
-- if there was a redirect, update base to reflect it
2005-08-11 22:56:32 -07:00
base = headers.location or base
2001-09-27 13:02:58 -07:00
end
if not body then
2004-06-16 15:51:04 -07:00
error = code
end
elseif parsed.scheme == "file" then
body, error = readfile(parsed.path)
else error = string.format("unhandled scheme '%s'", parsed.scheme) end
2001-09-27 13:02:58 -07:00
return base, body, error
end
function getlinks(body, base)
-- get rid of comments
body = string.gsub(body, "%<%!%-%-.-%-%-%>", "")
2001-09-27 13:02:58 -07:00
local links = {}
-- extract links
body = string.gsub(body, '[Hh][Rr][Ee][Ff]%s*=%s*"([^"]*)"', function(href)
2004-06-04 08:15:45 -07:00
table.insert(links, url.absolute(base, href))
2001-09-27 13:02:58 -07:00
end)
body = string.gsub(body, "[Hh][Rr][Ee][Ff]%s*=%s*'([^']*)'", function(href)
2004-06-04 08:15:45 -07:00
table.insert(links, url.absolute(base, href))
2001-09-27 13:02:58 -07:00
end)
string.gsub(body, "[Hh][Rr][Ee][Ff]%s*=%s*(.-)>", function(href)
2004-06-04 08:15:45 -07:00
table.insert(links, url.absolute(base, href))
2001-09-27 13:02:58 -07:00
end)
return links
end
function checklinks(address)
local base, body, error = load(address)
2001-09-27 13:02:58 -07:00
if not body then print(error) return end
print("Checking ", base)
2001-09-27 13:02:58 -07:00
local links = getlinks(body, base)
for _, link in ipairs(links) do
getstatus(link)
2001-09-27 13:02:58 -07:00
end
end
for _, address in ipairs(arg) do
checklinks(url.absolute("file:", address))
2001-09-27 13:02:58 -07:00
end
while nthreads > 0 do
handler:step()
end