Move cache into a global directory
Old approach vs new: * Silently creating hidden files in a project directory by default is not very nice, so it's opt in. On the other hand, creating files under ~/.cache is okay. Keep it opt-in for a version. * Iterating through a single cache file adds complexity. On the other hand, it's easy to calculate cache path for each input path separately (SHA1(absolute filename)), and read only relavant check results. * .luacheckcache has to store modification times of all files. With a single cache file per input file can simply compare cache file modification time with the input file. * Single file :get and :put interface of the new cache object is easier to work comparing with the old one that requires building an array of files that can be cached. Test changes: treat cache as a black box.
This commit is contained in:
parent
5b488ac685
commit
352a29f4ec
@ -1,5 +1,6 @@
|
||||
std = "min"
|
||||
cache = true
|
||||
include_files = {"src", "spec/*.lua", "scripts/*.lua", "*.rockspec", "*.luacheckrc"}
|
||||
exclude_files = {"src/luacheck/vendor"}
|
||||
|
||||
files["src/luacheck/unicode_printability_boundaries.lua"].max_line_length = false
|
||||
|
@ -27,6 +27,8 @@ script:
|
||||
- lua -e 'package.path="./src/?.lua;./src/?/init.lua;"..package.path' -lluacov bin/luacheck.lua luacheck-dev-1.rockspec -j2
|
||||
- lua -e 'package.preload.lanes=error;package.path="./src/?.lua;./src/?/init.lua;"..package.path' -lluacov bin/luacheck.lua --version | grep 'Not found'
|
||||
- lua -e 'package.path="./src/?.lua;./src/?/init.lua;"..package.path' -lluacov bin/luacheck.lua spec/*.lua
|
||||
- luacheck .
|
||||
- luacheck .
|
||||
|
||||
after_script:
|
||||
- luacov
|
||||
|
@ -24,7 +24,10 @@ before_build:
|
||||
build_script:
|
||||
- luarocks make
|
||||
|
||||
test_script: busted -c
|
||||
test_script:
|
||||
- busted -c
|
||||
- luacheck .
|
||||
- luacheck .
|
||||
|
||||
after_test:
|
||||
- luacov
|
||||
|
@ -79,7 +79,7 @@ bin/luacheck.lua.c: $(LUA_A) $(LFS_A) $(LANES_A)
|
||||
cp $(LANES_A) .
|
||||
cp $(LANES_DIR)/src/lanes.lua .
|
||||
cp -r ../src/luacheck .
|
||||
CC="" luastatic bin/luacheck.lua luacheck/*.lua luacheck/*/*.lua argparse.lua lanes.lua liblua.a lfs.a lanes.a
|
||||
CC="" luastatic bin/luacheck.lua luacheck/*.lua luacheck/*/*.lua luacheck/*/*/*.lua argparse.lua lanes.lua liblua.a lfs.a lanes.a
|
||||
|
||||
$(TARGET): bin/luacheck.lua.c
|
||||
$(CC) $(if $(LINUX),-static) -Os $< $(LUA_A) $(LFS_A) $(LANES_A) -I$(LUA_DIR)/src -lm $(if $(LINUX),-lpthread) -o $@
|
||||
|
@ -66,6 +66,12 @@ build = {
|
||||
["luacheck.unicode"] = "src/luacheck/unicode.lua",
|
||||
["luacheck.unicode_printability_boundaries"] = "src/luacheck/unicode_printability_boundaries.lua",
|
||||
["luacheck.utils"] = "src/luacheck/utils.lua",
|
||||
["luacheck.vendor.sha1"] = "src/luacheck/vendor/sha1/init.lua",
|
||||
["luacheck.vendor.sha1.bit_ops"] = "src/luacheck/vendor/sha1/bit_ops.lua",
|
||||
["luacheck.vendor.sha1.bit32_ops"] = "src/luacheck/vendor/sha1/bit32_ops.lua",
|
||||
["luacheck.vendor.sha1.common"] = "src/luacheck/vendor/sha1/common.lua",
|
||||
["luacheck.vendor.sha1.lua53_ops"] = "src/luacheck/vendor/sha1/lua53_ops.lua",
|
||||
["luacheck.vendor.sha1.pure_lua_ops"] = "src/luacheck/vendor/sha1/pure_lua_ops.lua",
|
||||
["luacheck.version"] = "src/luacheck/version.lua"
|
||||
},
|
||||
install = {
|
||||
|
@ -1,35 +1,51 @@
|
||||
local cache = require "luacheck.cache"
|
||||
local utils = require "luacheck.utils"
|
||||
|
||||
local actual_format_version
|
||||
local fs = require "luacheck.fs"
|
||||
local lfs = require "lfs"
|
||||
local sha1 = require "luacheck.vendor.sha1"
|
||||
|
||||
setup(function()
|
||||
actual_format_version = cache.format_version
|
||||
cache.format_version = 0
|
||||
end)
|
||||
|
||||
teardown(function()
|
||||
cache.format_version = actual_format_version
|
||||
end)
|
||||
|
||||
describe("cache", function()
|
||||
describe("update", function()
|
||||
local tmpname
|
||||
describe("get_default_dir", function()
|
||||
it("returns a string", function()
|
||||
assert.is_string(cache.get_default_dir())
|
||||
end)
|
||||
end)
|
||||
|
||||
describe("new", function()
|
||||
it("returns nil, error message on failure to init cache", function()
|
||||
local c, err = cache.new("LICENSE")
|
||||
assert.is_nil(c)
|
||||
assert.is_string(err)
|
||||
end)
|
||||
|
||||
it("returns Cache object on success", function()
|
||||
local c = cache.new("src")
|
||||
assert.is_table(c)
|
||||
end)
|
||||
end)
|
||||
|
||||
describe("Cache", function()
|
||||
local filename = "spec/caches/file.lua"
|
||||
local normalized_filename = fs.normalize(fs.join(fs.get_current_dir(), filename))
|
||||
local cache_dir = "spec/caches"
|
||||
local cache_filename = fs.join(cache_dir, sha1.sha1(normalized_filename))
|
||||
|
||||
local c
|
||||
|
||||
before_each(function()
|
||||
tmpname = os.tmpname()
|
||||
|
||||
-- Work around os.tmpname being broken on Windows sometimes.
|
||||
if utils.is_windows and not tmpname:find(':') then
|
||||
tmpname = os.getenv("TEMP") .. tmpname
|
||||
end
|
||||
c = cache.new(cache_dir)
|
||||
assert.is_table(c)
|
||||
end)
|
||||
|
||||
after_each(function()
|
||||
os.remove(tmpname)
|
||||
os.remove(filename)
|
||||
os.remove(cache_filename)
|
||||
end)
|
||||
|
||||
local function report(code)
|
||||
local function make_report(code)
|
||||
return {
|
||||
warnings = {
|
||||
code and {code = code}
|
||||
@ -39,156 +55,42 @@ describe("cache", function()
|
||||
}
|
||||
end
|
||||
|
||||
it("creates new cache", function()
|
||||
cache.update(tmpname, {"foo", "bar", "foo"}, {1, 2, 1}, {report "111", report(), report "112"})
|
||||
local data = utils.read_file(tmpname)
|
||||
assert.equals([[
|
||||
|
||||
0
|
||||
foo
|
||||
1
|
||||
24
|
||||
return {{{"112"}},{},{}}
|
||||
bar
|
||||
2
|
||||
17
|
||||
return {{},{},{}}
|
||||
]], data)
|
||||
end)
|
||||
|
||||
it("appends new entries", function()
|
||||
cache.update(tmpname, {"foo", "bar", "foo"}, {1, 2, 1}, {report "111", report(), report "112"})
|
||||
local ok, appended = cache.update(tmpname, {"baz"}, {3}, {report "122"})
|
||||
assert.is_true(ok)
|
||||
assert.is_true(appended)
|
||||
local data = utils.read_file(tmpname)
|
||||
assert.equals([[
|
||||
|
||||
0
|
||||
foo
|
||||
1
|
||||
24
|
||||
return {{{"112"}},{},{}}
|
||||
bar
|
||||
2
|
||||
17
|
||||
return {{},{},{}}
|
||||
baz
|
||||
3
|
||||
24
|
||||
return {{{"122"}},{},{}}
|
||||
]], data)
|
||||
end)
|
||||
|
||||
it("overwrites old entries", function()
|
||||
cache.update(tmpname, {"foo", "bar", "foo"}, {1, 2, 1}, {report "111", report(), report "112"})
|
||||
local ok, appended = cache.update(tmpname, {"baz", "foo"}, {3, 4}, {report "122", report()})
|
||||
assert.is_true(ok)
|
||||
assert.is_false(appended)
|
||||
local data = utils.read_file(tmpname)
|
||||
assert.equals([[
|
||||
|
||||
0
|
||||
foo
|
||||
4
|
||||
17
|
||||
return {{},{},{}}
|
||||
bar
|
||||
2
|
||||
17
|
||||
return {{},{},{}}
|
||||
baz
|
||||
3
|
||||
24
|
||||
return {{{"122"}},{},{}}
|
||||
]], data)
|
||||
end)
|
||||
end)
|
||||
|
||||
describe("load", function()
|
||||
describe("error handling", function()
|
||||
it("returns {} on cache with bad version", function()
|
||||
assert.same({}, cache.load("spec/caches/different_format.cache", {"foo"}, {123}))
|
||||
describe("put", function()
|
||||
it("returns nil on failure to store cache", function()
|
||||
lfs.mkdir(cache_filename)
|
||||
local ok = c:put(filename, make_report())
|
||||
assert.is_nil(ok)
|
||||
end)
|
||||
|
||||
it("returns {} on cache without version", function()
|
||||
assert.same({}, cache.load("spec/caches/old_format.cache", {"foo"}, {123}))
|
||||
end)
|
||||
|
||||
it("returns nil on cache with bad number of lines", function()
|
||||
assert.is_nil(cache.load("spec/caches/bad_lines.cache", {"foo"}, {123}))
|
||||
end)
|
||||
|
||||
it("returns nil on cache with bad mtime", function()
|
||||
assert.is_nil(cache.load("spec/caches/bad_mtime.cache", {"foo"}, {123}))
|
||||
end)
|
||||
|
||||
it("returns nil on cache with bad result", function()
|
||||
assert.is_nil(cache.load("spec/caches/bad_result.cache", {"foo"}, {123}))
|
||||
assert.is_nil(cache.load("spec/caches/bad_result2.cache", {"foo"}, {123}))
|
||||
it("returns true on successfull cache store", function()
|
||||
local ok = c:put(filename, make_report())
|
||||
assert.is_true(ok)
|
||||
end)
|
||||
end)
|
||||
|
||||
describe("loading", function()
|
||||
local tmpname
|
||||
|
||||
local foo_report = {
|
||||
warnings = {
|
||||
{code = "111", name = "not_print", line = 1, column = 1},
|
||||
{code = "111", name = "not_print", line = 4, column = 1},
|
||||
{code = "111", name = "print", line = 5, column = 1},
|
||||
{code = "111", name = "print", line = 7, column = 1},
|
||||
},
|
||||
inline_options = {
|
||||
{options = {std = "none"}, line = 3, column = 1},
|
||||
{options = {ignore = {",*"}}, line = 4, column = 10},
|
||||
{pop_count = 1, line = 5},
|
||||
{pop_count = 1, line = 6},
|
||||
{options = {std = "bad_std"}, line = 8, column = 1},
|
||||
{options = {std = "max"}, line = 1000, column = 1},
|
||||
{pop_count = 1, options = {std = "another_bad_std"}, line = 1001, column = 20},
|
||||
{pop_count = 1, line = 1002},
|
||||
},
|
||||
line_lengths = {10, 20, 30}
|
||||
}
|
||||
|
||||
local bar_report = {
|
||||
warnings = {{code = "011", line = 2, column = 4, msg = "message"}},
|
||||
inline_options = {},
|
||||
line_lengths = {40, 50}
|
||||
}
|
||||
|
||||
before_each(function()
|
||||
tmpname = os.tmpname()
|
||||
cache.update(tmpname,
|
||||
{"foo", "bar"},
|
||||
{1, 2},
|
||||
{foo_report, bar_report})
|
||||
describe("get", function()
|
||||
it("returns nil on cache miss", function()
|
||||
local report, err = c:get(filename)
|
||||
assert.is_nil(report)
|
||||
assert.is_nil(err)
|
||||
end)
|
||||
|
||||
after_each(function()
|
||||
os.remove(tmpname)
|
||||
it("returns nil on outdated cache", function()
|
||||
assert.is_true(c:put(filename, make_report()))
|
||||
io.open(filename, "w"):close()
|
||||
assert.is_true(lfs.touch(filename, os.time() + 100000))
|
||||
local report, err = c:get(filename)
|
||||
assert.is_nil(report)
|
||||
assert.is_nil(err)
|
||||
end)
|
||||
|
||||
it("loads {} from non-existent cache", function()
|
||||
assert.same({}, cache.load("non-existent.file", {"foo"}))
|
||||
end)
|
||||
|
||||
it("loads cached results", function()
|
||||
assert.same({
|
||||
foo = foo_report,
|
||||
bar = bar_report
|
||||
}, cache.load(tmpname, {"foo", "bar"}, {1, 2}))
|
||||
end)
|
||||
|
||||
it("does not load results for missing files", function()
|
||||
assert.same({foo = foo_report}, cache.load(tmpname, {"foo", "baz"}, {1, 2}))
|
||||
end)
|
||||
|
||||
it("does not load outdated results", function()
|
||||
assert.same(
|
||||
{bar = bar_report},
|
||||
cache.load(tmpname, {"foo", "bar", "baz"}, {2, 2}))
|
||||
it("returns report on success", function()
|
||||
local original_report = make_report("111")
|
||||
assert.is_true(c:put(filename, original_report))
|
||||
io.open(filename, "w"):close()
|
||||
assert.is_true(lfs.touch(filename, os.time() - 100000))
|
||||
local cached_report = c:get(filename)
|
||||
assert.same(original_report, cached_report)
|
||||
end)
|
||||
end)
|
||||
end)
|
||||
|
@ -1,4 +0,0 @@
|
||||
|
||||
0
|
||||
foo
|
||||
123
|
@ -1,5 +0,0 @@
|
||||
|
||||
0
|
||||
foo
|
||||
bar
|
||||
return {}
|
@ -1,5 +0,0 @@
|
||||
|
||||
0
|
||||
foo
|
||||
123
|
||||
return {
|
@ -1,5 +0,0 @@
|
||||
|
||||
0
|
||||
foo
|
||||
123
|
||||
return (nil)()
|
@ -1,5 +0,0 @@
|
||||
|
||||
-1
|
||||
foo
|
||||
123
|
||||
return {}
|
@ -1,3 +0,0 @@
|
||||
foo
|
||||
123
|
||||
return {}
|
@ -1,4 +1,3 @@
|
||||
local fs = require "luacheck.fs"
|
||||
local utils = require "luacheck.utils"
|
||||
local multithreading = require "luacheck.multithreading"
|
||||
local helper = require "spec.helper"
|
||||
@ -832,152 +831,6 @@ Total: 3 warnings / 0 errors in 1 file
|
||||
]], get_output "spec/samples/custom_std_inline_options.lua --config=spec/configs/custom_stds_config.luacheckrc")
|
||||
end)
|
||||
|
||||
describe("caching", function()
|
||||
local tmpname
|
||||
|
||||
before_each(function()
|
||||
tmpname = os.tmpname()
|
||||
|
||||
-- Work around os.tmpname being broken on Windows sometimes.
|
||||
if utils.is_windows and not tmpname:find(':') then
|
||||
tmpname = os.getenv("TEMP") .. tmpname
|
||||
end
|
||||
end)
|
||||
|
||||
after_each(function()
|
||||
os.remove(tmpname)
|
||||
end)
|
||||
|
||||
it("caches results", function()
|
||||
local normal_output = [[
|
||||
Checking spec/samples/good_code.lua OK
|
||||
Checking spec/samples/bad_code.lua 5 warnings
|
||||
|
||||
spec/samples/bad_code.lua:3:16: unused function 'helper'
|
||||
spec/samples/bad_code.lua:3:23: unused variable length argument
|
||||
spec/samples/bad_code.lua:7:10: setting non-standard global variable 'embrace'
|
||||
spec/samples/bad_code.lua:8:10: variable 'opt' was previously defined as an argument on line 7
|
||||
spec/samples/bad_code.lua:9:11: accessing undefined variable 'hepler'
|
||||
|
||||
Checking spec/samples/python_code.lua 1 error
|
||||
|
||||
spec/samples/python_code.lua:1:6: expected '=' near '__future__'
|
||||
|
||||
Total: 5 warnings / 1 error in 3 files
|
||||
]]
|
||||
|
||||
local mocked_output = [[
|
||||
Checking spec/samples/good_code.lua 1 error
|
||||
|
||||
spec/samples/good_code.lua:5:7: this code is actually bad
|
||||
|
||||
Checking spec/samples/bad_code.lua 5 warnings
|
||||
|
||||
spec/samples/bad_code.lua:3:16: unused function 'helper'
|
||||
spec/samples/bad_code.lua:3:23: unused variable length argument
|
||||
spec/samples/bad_code.lua:7:10: setting non-standard global variable 'embrace'
|
||||
spec/samples/bad_code.lua:8:10: variable 'opt' was previously defined as an argument on line 7
|
||||
spec/samples/bad_code.lua:9:11: accessing undefined variable 'hepler'
|
||||
|
||||
Checking spec/samples/python_code.lua 2 warnings
|
||||
|
||||
spec/samples/python_code.lua:1:1: setting non-standard global variable 'global'
|
||||
spec/samples/python_code.lua:6:8: accessing uninitialized variable 'uninit'
|
||||
|
||||
Checking spec/samples/unused_code.lua 9 warnings
|
||||
|
||||
spec/samples/unused_code.lua:3:18: unused argument 'baz'
|
||||
spec/samples/unused_code.lua:4:8: unused loop variable 'i'
|
||||
spec/samples/unused_code.lua:5:13: unused variable 'q'
|
||||
spec/samples/unused_code.lua:7:11: unused loop variable 'a'
|
||||
spec/samples/unused_code.lua:7:14: unused loop variable 'b'
|
||||
spec/samples/unused_code.lua:7:17: unused loop variable 'c'
|
||||
spec/samples/unused_code.lua:13:7: value assigned to variable 'x' is overwritten on line 14 before use
|
||||
spec/samples/unused_code.lua:14:1: value assigned to variable 'x' is overwritten on line 15 before use
|
||||
spec/samples/unused_code.lua:21:7: variable 'z' is never accessed
|
||||
|
||||
Total: 16 warnings / 1 error in 4 files
|
||||
]]
|
||||
|
||||
assert.equal(normal_output, get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
|
||||
local cache = utils.read_file(tmpname)
|
||||
assert.string(cache)
|
||||
|
||||
local function replace_abspath(s)
|
||||
return (s:gsub("abspath{(.-)}", function(p) return fs.normalize(fs.join(fs.get_current_dir(), p)) end))
|
||||
end
|
||||
|
||||
-- luacheck: push no max string line length
|
||||
local format_version, good_mtime, bad_mtime, python_mtime = cache:match(replace_abspath([[
|
||||
|
||||
(%d+)
|
||||
abspath{spec/samples/good_code.lua}
|
||||
(%d+)
|
||||
%d+
|
||||
local A,B="561","function";return {{{A,1,1,1,1,"main_chunk"},{A,3,7,23,1,B,"helper"},{A,7,1,30,2,B,"embracer.embrace"}},{},{19,0,23,17,3,0,30,25,26,3,0,15,0},{[4]="comment"}}
|
||||
abspath{spec/samples/bad_code.lua}
|
||||
(%d+)
|
||||
%d+
|
||||
local A,B,C,D="561","helper","function","embrace";return {{{"112",1,1,7,"package",{"loaded",true}},{A,1,1,1,1,"main_chunk"},{A,3,7,26,1,C,B},{"211",3,16,21,B,true},{"212",3,23,25,"..."},{A,7,1,21,2,C,D},{"111",7,10,16,D,nil,nil,true},{"412",8,10,12,"opt",7,18,20},{"113",9,11,16,"hepler"}},{},{24,0,26,9,3,0,21,31,26,3,0,0},{[4]="comment"}}
|
||||
abspath{spec/samples/python_code.lua}
|
||||
(%d+)
|
||||
%d+
|
||||
return {{{"011",1,6,15,"expected '=' near '__future__'"}},{},{},{}}
|
||||
]]):gsub("[%[%]%-]", "%%%0"), nil)
|
||||
-- luacheck: pop
|
||||
|
||||
format_version = tonumber(format_version)
|
||||
assert.number(format_version, "Cache string is:" .. cache)
|
||||
assert.string(good_mtime)
|
||||
assert.string(bad_mtime)
|
||||
assert.string(python_mtime)
|
||||
|
||||
assert.equal(normal_output, get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
|
||||
local python_result = 'return {{{"111", 1, 1, nil, "global"}, {"321", 6, 8, nil, "uninit"}},{},{1, 1, 1, 1, 1, 1},{}}'
|
||||
local good_result = 'return {{{"011",5,7,nil, "this code is actually bad"}},{},{},{}}'
|
||||
local bad_result = 'return {{},{},{}}'
|
||||
|
||||
local function write_new_cache(version)
|
||||
local fh = io.open(tmpname, "wb")
|
||||
assert.userdata(fh)
|
||||
|
||||
fh:write(replace_abspath([[
|
||||
%s
|
||||
abspath{spec/samples/python_code.lua}
|
||||
%s
|
||||
%d
|
||||
%s
|
||||
abspath{spec/samples/good_code.lua}
|
||||
%s
|
||||
%d
|
||||
%s
|
||||
abspath{spec/samples/bad_code.lua}
|
||||
%s
|
||||
%d
|
||||
%s
|
||||
]]):format(version,
|
||||
python_mtime, #python_result, python_result,
|
||||
good_mtime, #good_result, good_result,
|
||||
tostring(tonumber(bad_mtime) - 1), #bad_result, bad_result))
|
||||
fh:close()
|
||||
end
|
||||
|
||||
write_new_cache("\n"..tostring(format_version))
|
||||
assert.equal(mocked_output,
|
||||
get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua spec/samples/unused_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
assert.equal(mocked_output,
|
||||
get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua spec/samples/unused_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
|
||||
write_new_cache("\n"..tostring(format_version + 1))
|
||||
assert.equal(normal_output, get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
|
||||
write_new_cache("")
|
||||
assert.equal(normal_output, get_output("spec/samples/good_code.lua spec/samples/bad_code.lua spec/samples/python_code.lua --std=lua52 --no-config --cache "..tmpname))
|
||||
end)
|
||||
end)
|
||||
|
||||
if not multithreading.has_lanes then
|
||||
pending("uses multithreading")
|
||||
else
|
||||
|
@ -26,7 +26,8 @@ function helper.luacov_config(prefix)
|
||||
modules = {
|
||||
luacheck = "src/luacheck/init.lua",
|
||||
["luacheck.*"] = "src",
|
||||
["luacheck.*.*"] = "src"
|
||||
["luacheck.*.*"] = "src",
|
||||
["luacheck.*.*.*"] = "src"
|
||||
},
|
||||
exclude = {
|
||||
"bin/luacheck$"
|
||||
|
@ -1,219 +1,139 @@
|
||||
local fs = require "luacheck.fs"
|
||||
local serializer = require "luacheck.serializer"
|
||||
local sha1 = require "luacheck.vendor.sha1"
|
||||
local utils = require "luacheck.utils"
|
||||
|
||||
local cache = {}
|
||||
|
||||
-- Cache file contains check results for n unique filenames.
|
||||
-- Header format:
|
||||
-- \n(cache format version number)\n
|
||||
-- File record format:
|
||||
-- (file name)\n(file modification time)\n(serialized result length)\n(serialized result)\n
|
||||
-- Check results can be cached inside a given cache directory.
|
||||
-- Check result for a file is stored in `<cache_dir>/<SHA1(filename)>`.
|
||||
-- Cache file format: <format_version>\n<filename>\n<serialized check result`.
|
||||
|
||||
cache.format_version = 35
|
||||
-- Returns default cache directory or nothing.
|
||||
function cache.get_default_dir()
|
||||
if utils.is_windows then
|
||||
local local_app_data_dir = os.getenv("LOCALAPPDATA")
|
||||
|
||||
-- Reads a file record (table with fields `filename`, `mtime`, and `serialized_result`).
|
||||
-- Returns file record or nil + flag indicating whether EOF was reached.
|
||||
local function read_record(fh)
|
||||
local filename = fh:read()
|
||||
if not local_app_data_dir then
|
||||
local user_profile_dir = os.getenv("USERPROFILE")
|
||||
|
||||
if not filename then
|
||||
return nil, true
|
||||
end
|
||||
|
||||
if filename:sub(-1) == "\r" then
|
||||
filename = filename:sub(1, -2)
|
||||
end
|
||||
|
||||
local mtime = tonumber((fh:read()))
|
||||
|
||||
if not mtime then
|
||||
return nil, false
|
||||
end
|
||||
|
||||
local serialized_result_length = tonumber((fh:read()))
|
||||
|
||||
if not serialized_result_length then
|
||||
return nil, false
|
||||
end
|
||||
|
||||
local serialized_result = fh:read(serialized_result_length)
|
||||
|
||||
if not serialized_result or #serialized_result ~= serialized_result_length then
|
||||
return nil, false
|
||||
end
|
||||
|
||||
if not fh:read() then
|
||||
return nil, false
|
||||
end
|
||||
|
||||
return {
|
||||
filename = filename,
|
||||
mtime = mtime,
|
||||
serialized_result = serialized_result
|
||||
}
|
||||
end
|
||||
|
||||
-- Returns array of file records from cache fh.
|
||||
local function read_records(fh)
|
||||
local records = {}
|
||||
|
||||
while true do
|
||||
local record = read_record(fh)
|
||||
|
||||
if not record then
|
||||
break
|
||||
if user_profile_dir then
|
||||
local_app_data_dir = fs.join(user_profile_dir, "Local Settings", "Application Data")
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(records, record)
|
||||
end
|
||||
if local_app_data_dir then
|
||||
return fs.join(local_app_data_dir, "Luacheck", "Cache")
|
||||
end
|
||||
else
|
||||
local fh = assert(io.popen("uname -s"))
|
||||
local system = fh:read("*l")
|
||||
fh:close()
|
||||
|
||||
return records
|
||||
end
|
||||
if system == "Darwin" then
|
||||
local home_dir = os.getenv("HOME")
|
||||
|
||||
-- Writes an array of file records into fh.
|
||||
local function write_records(fh, records)
|
||||
for _, record in ipairs(records) do
|
||||
fh:write(record.filename, "\n")
|
||||
fh:write(tonumber(record.mtime), "\n")
|
||||
fh:write(tonumber(#record.serialized_result), "\n")
|
||||
fh:write(record.serialized_result, "\n")
|
||||
if home_dir then
|
||||
return fs.join(home_dir, "Library", "Caches", "Luacheck")
|
||||
end
|
||||
else
|
||||
local config_home_dir = os.getenv("XDG_CACHE_HOME")
|
||||
|
||||
if not config_home_dir then
|
||||
local home_dir = os.getenv("HOME")
|
||||
|
||||
if home_dir then
|
||||
config_home_dir = fs.join(home_dir, ".cache")
|
||||
end
|
||||
end
|
||||
|
||||
if config_home_dir then
|
||||
return fs.join(config_home_dir, "luacheck")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function check_version_header(fh)
|
||||
local first_line = fh:read()
|
||||
local format_version = "1"
|
||||
|
||||
if first_line ~= "" and first_line ~= "\r" then
|
||||
return false
|
||||
local Cache = utils.class()
|
||||
|
||||
function Cache:__init(cache_directory)
|
||||
local ok, err = fs.make_dirs(cache_directory)
|
||||
|
||||
if not ok then
|
||||
return nil, ("Couldn't initialize cache in %s: %s"):format(cache_directory, err)
|
||||
end
|
||||
|
||||
local second_line = fh:read()
|
||||
return tonumber(second_line) == cache.format_version
|
||||
self._dir = cache_directory
|
||||
self._current_dir = fs.get_current_dir()
|
||||
end
|
||||
|
||||
local function write_version_header(fh)
|
||||
fh:write("\n", tostring(cache.format_version), "\n")
|
||||
-- Caches check result for a file. Returns true on success, nothing on error.
|
||||
function Cache:put(filename, check_result)
|
||||
local normalized_filename = fs.normalize(fs.join(self._current_dir, filename))
|
||||
local cache_filename = fs.join(self._dir, sha1.sha1(normalized_filename))
|
||||
|
||||
local fh = io.open(cache_filename, "wb")
|
||||
|
||||
if not fh then
|
||||
return
|
||||
end
|
||||
|
||||
local serialized_result = serializer.dump_check_result(check_result)
|
||||
fh:write(format_version, "\n", normalized_filename, "\n", serialized_result)
|
||||
fh:close()
|
||||
return true
|
||||
end
|
||||
|
||||
-- Loads cache for filenames given mtimes from cache cache_filename.
|
||||
-- Returns table mapping filenames to cached check results.
|
||||
-- On corrupted cache returns nil, on version mismatch returns {}.
|
||||
function cache.load(cache_filename, filenames, mtimes)
|
||||
-- Retrieves cached check result for a file.
|
||||
-- Returns check result on cache hit, nothing on cache miss,
|
||||
-- nil and true on malformed cache data.
|
||||
function Cache:get(filename)
|
||||
local normalized_filename = fs.normalize(fs.join(self._current_dir, filename))
|
||||
local cache_filename = fs.join(self._dir, sha1.sha1(normalized_filename))
|
||||
|
||||
local file_mtime = fs.get_mtime(filename)
|
||||
local cache_mtime = fs.get_mtime(cache_filename)
|
||||
|
||||
if not file_mtime or not cache_mtime or file_mtime >= cache_mtime then
|
||||
return
|
||||
end
|
||||
|
||||
local fh = io.open(cache_filename, "rb")
|
||||
|
||||
if not fh then
|
||||
return {}
|
||||
return
|
||||
end
|
||||
|
||||
if not check_version_header(fh) then
|
||||
if fh:read() ~= format_version then
|
||||
fh:close()
|
||||
return {}
|
||||
return
|
||||
end
|
||||
|
||||
local result = {}
|
||||
local not_yet_found = utils.array_to_set(filenames)
|
||||
|
||||
while next(not_yet_found) do
|
||||
local record, reached_eof = read_record(fh)
|
||||
|
||||
if not record then
|
||||
fh:close()
|
||||
return reached_eof and result or nil
|
||||
end
|
||||
|
||||
if not_yet_found[record.filename] then
|
||||
if mtimes[not_yet_found[record.filename]] == record.mtime then
|
||||
local check_result = serializer.load_check_result(record.serialized_result)
|
||||
|
||||
if not check_result then
|
||||
fh:close()
|
||||
return
|
||||
end
|
||||
|
||||
result[record.filename] = check_result
|
||||
end
|
||||
|
||||
not_yet_found[record.filename] = nil
|
||||
end
|
||||
if fh:read() ~= normalized_filename then
|
||||
fh:close()
|
||||
return
|
||||
end
|
||||
|
||||
local serialized_result = fh:read("*a")
|
||||
fh:close()
|
||||
|
||||
if not serialized_result then
|
||||
return nil, true
|
||||
end
|
||||
|
||||
local result = serializer.load_check_result(serialized_result)
|
||||
|
||||
if not result then
|
||||
return nil, true
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- Updates cache at cache_filename with results for filenames.
|
||||
-- Returns success flag + whether update was append-only.
|
||||
function cache.update(cache_filename, filenames, mtimes, results)
|
||||
local old_records = {}
|
||||
local can_append = false
|
||||
local fh = io.open(cache_filename, "rb")
|
||||
|
||||
if fh then
|
||||
if check_version_header(fh) then
|
||||
old_records = read_records(fh)
|
||||
can_append = true
|
||||
end
|
||||
|
||||
fh:close()
|
||||
end
|
||||
|
||||
local filename_set = utils.array_to_set(filenames)
|
||||
local old_filename_set = {}
|
||||
|
||||
-- Update old cache for files which got a new result.
|
||||
for _, record in ipairs(old_records) do
|
||||
old_filename_set[record.filename] = true
|
||||
local file_index = filename_set[record.filename]
|
||||
|
||||
if file_index then
|
||||
can_append = false
|
||||
record.mtime = mtimes[file_index]
|
||||
record.serialized_result = serializer.dump_check_result(results[file_index])
|
||||
end
|
||||
end
|
||||
|
||||
local new_records = {}
|
||||
|
||||
for _, filename in ipairs(filenames) do
|
||||
-- Use unique index (there could be duplicate filenames).
|
||||
local file_index = filename_set[filename]
|
||||
|
||||
if file_index and not old_filename_set[filename] then
|
||||
table.insert(new_records, {
|
||||
filename = filename,
|
||||
mtime = mtimes[file_index],
|
||||
serialized_result = serializer.dump_check_result(results[file_index])
|
||||
})
|
||||
-- Do not save result for this filename again.
|
||||
filename_set[filename] = nil
|
||||
end
|
||||
end
|
||||
|
||||
if can_append then
|
||||
if #new_records > 0 then
|
||||
fh = io.open(cache_filename, "ab")
|
||||
|
||||
if not fh then
|
||||
return false
|
||||
end
|
||||
|
||||
write_records(fh, new_records)
|
||||
fh:close()
|
||||
end
|
||||
else
|
||||
fh = io.open(cache_filename, "wb")
|
||||
|
||||
if not fh then
|
||||
return false
|
||||
end
|
||||
|
||||
write_version_header(fh)
|
||||
write_records(fh, old_records)
|
||||
write_records(fh, new_records)
|
||||
fh:close()
|
||||
end
|
||||
|
||||
return true, can_append
|
||||
function cache.new(cache_directory)
|
||||
return Cache(cache_directory)
|
||||
end
|
||||
|
||||
return cache
|
||||
|
@ -1,3 +1,4 @@
|
||||
local cache = require "luacheck.cache"
|
||||
local options = require "luacheck.options"
|
||||
local builtin_standards = require "luacheck.builtin_standards"
|
||||
local fs = require "luacheck.fs"
|
||||
@ -474,7 +475,7 @@ function ConfigStack:get_top_options()
|
||||
if conf.options.cache ~= nil then
|
||||
if conf.options.cache == true then
|
||||
if not res.cache then
|
||||
res.cache = fs.normalize(fs.join(last_anchor_dir or current_dir, ".luacheckcache"))
|
||||
res.cache = fs.normalize(fs.join(last_anchor_dir or current_dir, cache.get_default_dir()))
|
||||
end
|
||||
elseif conf.options.cache == false then
|
||||
res.cache = false
|
||||
|
@ -174,6 +174,38 @@ function fs.extract_files(dir_path, pattern)
|
||||
return res, err_map
|
||||
end
|
||||
|
||||
local function make_absolute_dirs(dir_path)
|
||||
if fs.is_dir(dir_path) then
|
||||
return true
|
||||
end
|
||||
|
||||
local upper_dir = fs.normalize(fs.join(dir_path, ".."))
|
||||
|
||||
if upper_dir == dir_path then
|
||||
return nil, ("Filesystem root %s is not a directory"):format(upper_dir)
|
||||
end
|
||||
|
||||
local upper_ok, upper_err = make_absolute_dirs(upper_dir)
|
||||
|
||||
if not upper_ok then
|
||||
return nil, upper_err
|
||||
end
|
||||
|
||||
local make_ok, make_error = lfs.mkdir(dir_path)
|
||||
|
||||
if not make_ok then
|
||||
return nil, ("Couldn't make directory %s: %s"):format(dir_path, make_error)
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Ensures that a given path is a directory, creating intermediate directories if necessary.
|
||||
-- Returns true on success, nil and an error message on failure.
|
||||
function fs.make_dirs(dir_path)
|
||||
return make_absolute_dirs(fs.normalize(fs.join(fs.get_current_dir(), dir_path)))
|
||||
end
|
||||
|
||||
-- Returns modification time for a file.
|
||||
function fs.get_mtime(path)
|
||||
return lfs.attributes(path, "modification")
|
||||
|
@ -1,4 +1,5 @@
|
||||
local argparse = require "argparse"
|
||||
local cache = require "luacheck.cache"
|
||||
local config = require "luacheck.config"
|
||||
local luacheck = require "luacheck"
|
||||
local multithreading = require "luacheck.multithreading"
|
||||
@ -206,7 +207,8 @@ Links:
|
||||
|
||||
parser:option("--filename", "Use another filename in output and for selecting configuration overrides.")
|
||||
|
||||
local cache_opt = parser:option("--cache", "Path to cache file (default: .luacheckcache).")
|
||||
local cache_opt = parser:option("--cache", ("Path to cache directory. (default: %s)"):format(
|
||||
cache.get_default_dir()))
|
||||
:args "?"
|
||||
|
||||
local no_cache_opt = parser:flag("--no-cache", "Do not use cache.")
|
||||
|
@ -10,6 +10,7 @@ local metrics = {
|
||||
}
|
||||
|
||||
local functions = {
|
||||
{name = "sha1", module = "vendor.sha1"},
|
||||
{name = "load", module = "cache"},
|
||||
{name = "update", module = "cache"},
|
||||
{name = "decode", module = "decoder"},
|
||||
|
@ -177,41 +177,22 @@ function Runner:_prepare_inputs(inputs)
|
||||
return res
|
||||
end
|
||||
|
||||
-- Adds `mtime` field to inputs eligible for caching.
|
||||
-- On failure no field is added, most likely the file doesn't exist
|
||||
-- or is unreadable and it's better to get the error when trying to read it.
|
||||
local function add_mtimes(inputs)
|
||||
for _, input in ipairs(inputs) do
|
||||
if input.path and not input.fatal then
|
||||
input.mtime = fs.get_mtime(input.path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Loads cached reports for input with `mtime` field, assigns them to `cached_report` field.
|
||||
-- Returns true on success or nil and an error message on failure.
|
||||
-- Loads cached reports for inputs with `path` field, assigns them to `cached_report` field.
|
||||
-- For each file on cache load error sets its `fatal` and `msg` fields.
|
||||
function Runner:_add_cached_reports(inputs)
|
||||
local potentially_cached_filenames = {}
|
||||
local mtimes = {}
|
||||
|
||||
for _, input in ipairs(inputs) do
|
||||
if input.mtime then
|
||||
table.insert(potentially_cached_filenames, input.abs_path)
|
||||
table.insert(mtimes, input.mtime)
|
||||
if not input.fatal and input.path then
|
||||
local report, err = self._cache:get(input.path)
|
||||
|
||||
if err then
|
||||
input.fatal = "I/O"
|
||||
input.msg = ("Couldn't load cache for %s from %s: malformed data"):format(
|
||||
self._top_opts.cache, input.path)
|
||||
else
|
||||
input.cached_report = report
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local filename_to_cached_report = cache.load(self._top_opts.cache, potentially_cached_filenames, mtimes)
|
||||
|
||||
if not filename_to_cached_report then
|
||||
return nil, ("Couldn't load cache from %s: data corrupted"):format(self._top_opts.cache)
|
||||
end
|
||||
|
||||
for _, input in ipairs(inputs) do
|
||||
input.cached_report = filename_to_cached_report[input.abs_path]
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Adds report as `new_report` field to all inputs that don't have a fatal error or a cached report.
|
||||
@ -250,29 +231,17 @@ end
|
||||
-- Saves `new_report` for files eligible for caching to cache.
|
||||
-- Returns true on success or nil and an error message on failure.
|
||||
function Runner:_save_new_reports_to_cache(inputs)
|
||||
local filenames = {}
|
||||
local mtimes = {}
|
||||
local reports = {}
|
||||
|
||||
for _, input in ipairs(inputs) do
|
||||
if input.new_report and input.path then
|
||||
-- If report for a file could be cached but getting its `mtime` has failed,
|
||||
-- ignore the error - report is already here, might as well return it.
|
||||
if input.mtime then
|
||||
table.insert(filenames, input.abs_path)
|
||||
table.insert(mtimes, input.mtime)
|
||||
table.insert(reports, input.new_report)
|
||||
local ok = self._cache:put(input.path, input.new_report)
|
||||
|
||||
if not ok then
|
||||
return nil, ("Couldn't save cache for %s from %s: I/O error"):format(input.path, self._top_opts.cache)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local ok = cache.update(self._top_opts.cache, filenames, mtimes, reports)
|
||||
|
||||
if ok then
|
||||
return true
|
||||
else
|
||||
return nil, ("Couldn't save cache to %s: I/O error"):format(self._top_opts.cache)
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
-- Inputs are prepared here, see `Runner:_prepare_inputs`.
|
||||
@ -280,12 +249,14 @@ end
|
||||
-- On critical error returns nil and an error message.
|
||||
function Runner:_get_reports(inputs)
|
||||
if self._top_opts.cache then
|
||||
add_mtimes(inputs)
|
||||
local ok, err = self:_add_cached_reports(inputs)
|
||||
local err
|
||||
self._cache, err = cache.new(self._top_opts.cache)
|
||||
|
||||
if not ok then
|
||||
if not self._cache then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
self:_add_cached_reports(inputs)
|
||||
end
|
||||
|
||||
self:_add_new_reports(inputs)
|
||||
|
@ -1,4 +1,7 @@
|
||||
local unpack = table.unpack or unpack -- luacheck: compat
|
||||
-- luacheck: push compat
|
||||
local unpack = table.unpack or unpack
|
||||
local pack = table.pack or function(...) return {n = select("#", ...), ...} end
|
||||
-- luacheck: pop
|
||||
|
||||
local utils = {}
|
||||
|
||||
@ -126,7 +129,11 @@ function class_metatable.__call(class, ...)
|
||||
local obj = setmetatable({}, class)
|
||||
|
||||
if class.__init then
|
||||
class.__init(obj, ...)
|
||||
local init_returns = pack(class.__init(obj, ...))
|
||||
|
||||
if init_returns.n > 0 then
|
||||
return unpack(init_returns, 1, init_returns.n)
|
||||
end
|
||||
end
|
||||
|
||||
return obj
|
||||
|
21
src/luacheck/vendor/sha1/LICENSE
vendored
Normal file
21
src/luacheck/vendor/sha1/LICENSE
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
Copyright (c) 2013 Enrique García Cota, Eike Decker, Jeffrey Friedl
|
||||
Copyright (c) 2018 Peter Melnichenko
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
24
src/luacheck/vendor/sha1/bit32_ops.lua
vendored
Normal file
24
src/luacheck/vendor/sha1/bit32_ops.lua
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
local bit32 = require "bit32"
|
||||
|
||||
local ops = {}
|
||||
|
||||
local band = bit32.band
|
||||
local bor = bit32.bor
|
||||
local bxor = bit32.bxor
|
||||
|
||||
ops.uint32_lrot = bit32.lrotate
|
||||
ops.byte_xor = bxor
|
||||
ops.uint32_xor_3 = bxor
|
||||
ops.uint32_xor_4 = bxor
|
||||
|
||||
function ops.uint32_ternary(a, b, c)
|
||||
-- c ~ (a & (b ~ c)) has less bitwise operations than (a & b) | (~a & c).
|
||||
return bxor(c, band(a, bxor(b, c)))
|
||||
end
|
||||
|
||||
function ops.uint32_majority(a, b, c)
|
||||
-- (a & (b | c)) | (b & c) has less bitwise operations than (a & b) | (a & c) | (b & c).
|
||||
return bor(band(a, bor(b, c)), band(b, c))
|
||||
end
|
||||
|
||||
return ops
|
24
src/luacheck/vendor/sha1/bit_ops.lua
vendored
Normal file
24
src/luacheck/vendor/sha1/bit_ops.lua
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
local bit = require "bit"
|
||||
|
||||
local ops = {}
|
||||
|
||||
local band = bit.band
|
||||
local bor = bit.bor
|
||||
local bxor = bit.bxor
|
||||
|
||||
ops.uint32_lrot = bit.rol
|
||||
ops.byte_xor = bxor
|
||||
ops.uint32_xor_3 = bxor
|
||||
ops.uint32_xor_4 = bxor
|
||||
|
||||
function ops.uint32_ternary(a, b, c)
|
||||
-- c ~ (a & (b ~ c)) has less bitwise operations than (a & b) | (~a & c).
|
||||
return bxor(c, band(a, bxor(b, c)))
|
||||
end
|
||||
|
||||
function ops.uint32_majority(a, b, c)
|
||||
-- (a & (b | c)) | (b & c) has less bitwise operations than (a & b) | (a & c) | (b & c).
|
||||
return bor(band(a, bor(b, c)), band(b, c))
|
||||
end
|
||||
|
||||
return ops
|
20
src/luacheck/vendor/sha1/common.lua
vendored
Normal file
20
src/luacheck/vendor/sha1/common.lua
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
local common = {}
|
||||
|
||||
-- Merges four bytes into a uint32 number.
|
||||
function common.bytes_to_uint32(a, b, c, d)
|
||||
return a * 0x1000000 + b * 0x10000 + c * 0x100 + d
|
||||
end
|
||||
|
||||
-- Splits a uint32 number into four bytes.
|
||||
function common.uint32_to_bytes(a)
|
||||
local a4 = a % 256
|
||||
a = (a - a4) / 256
|
||||
local a3 = a % 256
|
||||
a = (a - a3) / 256
|
||||
local a2 = a % 256
|
||||
local a1 = (a - a2) / 256
|
||||
return a1, a2, a3, a4
|
||||
end
|
||||
|
||||
|
||||
return common
|
195
src/luacheck/vendor/sha1/init.lua
vendored
Normal file
195
src/luacheck/vendor/sha1/init.lua
vendored
Normal file
@ -0,0 +1,195 @@
|
||||
local common = require "luacheck.vendor.sha1.common"
|
||||
|
||||
local sha1 = {
|
||||
-- Meta fields retained for compatibility.
|
||||
_VERSION = "sha.lua 0.6.0",
|
||||
_URL = "https://github.com/mpeterv/sha1",
|
||||
_DESCRIPTION = [[
|
||||
SHA-1 secure hash and HMAC-SHA1 signature computation in Lua,
|
||||
using bit and bit32 modules and Lua 5.3 operators when available
|
||||
and falling back to a pure Lua implementation on Lua 5.1.
|
||||
Based on code orignally by Jeffrey Friedl and modified by
|
||||
Eike Decker and Enrique García Cota.]],
|
||||
_LICENSE = [[
|
||||
MIT LICENSE
|
||||
|
||||
Copyright (c) 2013 Enrique García Cota, Eike Decker, Jeffrey Friedl
|
||||
Copyright (c) 2018 Peter Melnichenko
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.]]
|
||||
}
|
||||
|
||||
sha1.version = "0.6.0"
|
||||
|
||||
local function choose_ops()
|
||||
if _VERSION:find("5%.3") then
|
||||
return "lua53_ops"
|
||||
elseif pcall(require, "bit") then
|
||||
return "bit_ops"
|
||||
elseif pcall(require, "bit32") then
|
||||
return "bit32_ops"
|
||||
else
|
||||
return "pure_lua_ops"
|
||||
end
|
||||
end
|
||||
|
||||
local ops = require("sha1." .. choose_ops())
|
||||
local uint32_lrot = ops.uint32_lrot
|
||||
local byte_xor = ops.byte_xor
|
||||
local uint32_xor_3 = ops.uint32_xor_3
|
||||
local uint32_xor_4 = ops.uint32_xor_4
|
||||
local uint32_ternary = ops.uint32_ternary
|
||||
local uint32_majority = ops.uint32_majority
|
||||
|
||||
local bytes_to_uint32 = common.bytes_to_uint32
|
||||
local uint32_to_bytes = common.uint32_to_bytes
|
||||
|
||||
local sbyte = string.byte
|
||||
local schar = string.char
|
||||
local sformat = string.format
|
||||
local srep = string.rep
|
||||
|
||||
local function hex_to_binary(hex)
|
||||
return (hex:gsub("..", function(hexval)
|
||||
return schar(tonumber(hexval, 16))
|
||||
end))
|
||||
end
|
||||
|
||||
-- Calculates SHA1 for a string, returns it encoded as 40 hexadecimal digits.
|
||||
function sha1.sha1(str)
|
||||
-- Input preprocessing.
|
||||
-- First, append a `1` bit and seven `0` bits.
|
||||
local first_append = schar(0x80)
|
||||
|
||||
-- Next, append some zero bytes to make the length of the final message a multiple of 64.
|
||||
-- Eight more bytes will be added next.
|
||||
local non_zero_message_bytes = #str + 1 + 8
|
||||
local second_append = srep(schar(0), -non_zero_message_bytes % 64)
|
||||
|
||||
-- Finally, append the length of the original message in bits as a 64-bit number.
|
||||
-- Assume that it fits into the lower 32 bits.
|
||||
local third_append = schar(0, 0, 0, 0, uint32_to_bytes(#str * 8))
|
||||
|
||||
str = str .. first_append .. second_append .. third_append
|
||||
assert(#str % 64 == 0)
|
||||
|
||||
-- Initialize hash value.
|
||||
local h0 = 0x67452301
|
||||
local h1 = 0xEFCDAB89
|
||||
local h2 = 0x98BADCFE
|
||||
local h3 = 0x10325476
|
||||
local h4 = 0xC3D2E1F0
|
||||
|
||||
local w = {}
|
||||
|
||||
-- Process the input in successive 64-byte chunks.
|
||||
for chunk_start = 1, #str, 64 do
|
||||
-- Load the chunk into W[0..15] as uint32 numbers.
|
||||
local uint32_start = chunk_start
|
||||
|
||||
for i = 0, 15 do
|
||||
w[i] = bytes_to_uint32(sbyte(str, uint32_start, uint32_start + 3))
|
||||
uint32_start = uint32_start + 4
|
||||
end
|
||||
|
||||
-- Extend the input vector.
|
||||
for i = 16, 79 do
|
||||
w[i] = uint32_lrot(uint32_xor_4(w[i - 3], w[i - 8], w[i - 14], w[i - 16]), 1)
|
||||
end
|
||||
|
||||
-- Initialize hash value for this chunk.
|
||||
local a = h0
|
||||
local b = h1
|
||||
local c = h2
|
||||
local d = h3
|
||||
local e = h4
|
||||
|
||||
-- Main loop.
|
||||
for i = 0, 79 do
|
||||
local f
|
||||
local k
|
||||
|
||||
if i <= 19 then
|
||||
f = uint32_ternary(b, c, d)
|
||||
k = 0x5A827999
|
||||
elseif i <= 39 then
|
||||
f = uint32_xor_3(b, c, d)
|
||||
k = 0x6ED9EBA1
|
||||
elseif i <= 59 then
|
||||
f = uint32_majority(b, c, d)
|
||||
k = 0x8F1BBCDC
|
||||
else
|
||||
f = uint32_xor_3(b, c, d)
|
||||
k = 0xCA62C1D6
|
||||
end
|
||||
|
||||
local temp = (uint32_lrot(a, 5) + f + e + k + w[i]) % 0x100000000
|
||||
e = d
|
||||
d = c
|
||||
c = uint32_lrot(b, 30)
|
||||
b = a
|
||||
a = temp
|
||||
end
|
||||
|
||||
-- Add this chunk's hash to result so far.
|
||||
h0 = (h0 + a) % 0x100000000
|
||||
h1 = (h1 + b) % 0x100000000
|
||||
h2 = (h2 + c) % 0x100000000
|
||||
h3 = (h3 + d) % 0x100000000
|
||||
h4 = (h4 + e) % 0x100000000
|
||||
end
|
||||
|
||||
return sformat("%08x%08x%08x%08x%08x", h0, h1, h2, h3, h4)
|
||||
end
|
||||
|
||||
function sha1.binary(str)
|
||||
return hex_to_binary(sha1.sha1(str))
|
||||
end
|
||||
|
||||
-- Precalculate replacement tables.
|
||||
local xor_with_0x5c = {}
|
||||
local xor_with_0x36 = {}
|
||||
|
||||
for i = 0, 0xff do
|
||||
xor_with_0x5c[schar(i)] = schar(byte_xor(0x5c, i))
|
||||
xor_with_0x36[schar(i)] = schar(byte_xor(0x36, i))
|
||||
end
|
||||
|
||||
-- 512 bits.
|
||||
local BLOCK_SIZE = 64
|
||||
|
||||
function sha1.hmac(key, text)
|
||||
if #key > BLOCK_SIZE then
|
||||
key = sha1.binary(key)
|
||||
end
|
||||
|
||||
local key_xord_with_0x36 = key:gsub('.', xor_with_0x36) .. srep(schar(0x36), BLOCK_SIZE - #key)
|
||||
local key_xord_with_0x5c = key:gsub('.', xor_with_0x5c) .. srep(schar(0x5c), BLOCK_SIZE - #key)
|
||||
|
||||
return sha1.sha1(key_xord_with_0x5c .. sha1.binary(key_xord_with_0x36 .. text))
|
||||
end
|
||||
|
||||
function sha1.hmac_binary(key, text)
|
||||
return hex_to_binary(sha1.hmac(key, text))
|
||||
end
|
||||
|
||||
setmetatable(sha1, {__call = function(_, str) return sha1.sha1(str) end})
|
||||
|
||||
return sha1
|
29
src/luacheck/vendor/sha1/lua53_ops.lua
vendored
Normal file
29
src/luacheck/vendor/sha1/lua53_ops.lua
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
local ops = {}
|
||||
|
||||
function ops.uint32_lrot(a, bits)
|
||||
return ((a << bits) & 0xFFFFFFFF) | (a >> (32 - bits))
|
||||
end
|
||||
|
||||
function ops.byte_xor(a, b)
|
||||
return a ~ b
|
||||
end
|
||||
|
||||
function ops.uint32_xor_3(a, b, c)
|
||||
return a ~ b ~ c
|
||||
end
|
||||
|
||||
function ops.uint32_xor_4(a, b, c, d)
|
||||
return a ~ b ~ c ~ d
|
||||
end
|
||||
|
||||
function ops.uint32_ternary(a, b, c)
|
||||
-- c ~ (a & (b ~ c)) has less bitwise operations than (a & b) | (~a & c).
|
||||
return c ~ (a & (b ~ c))
|
||||
end
|
||||
|
||||
function ops.uint32_majority(a, b, c)
|
||||
-- (a & (b | c)) | (b & c) has less bitwise operations than (a & b) | (a & c) | (b & c).
|
||||
return (a & (b | c)) | (b & c)
|
||||
end
|
||||
|
||||
return ops
|
144
src/luacheck/vendor/sha1/pure_lua_ops.lua
vendored
Normal file
144
src/luacheck/vendor/sha1/pure_lua_ops.lua
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
local common = require "luacheck.vendor.sha1.common"
|
||||
|
||||
local ops = {}
|
||||
|
||||
local bytes_to_uint32 = common.bytes_to_uint32
|
||||
local uint32_to_bytes = common.uint32_to_bytes
|
||||
|
||||
function ops.uint32_lrot(a, bits)
|
||||
local power = 2 ^ bits
|
||||
local inv_power = 0x100000000 / power
|
||||
local lower_bits = a % inv_power
|
||||
return (lower_bits * power) + ((a - lower_bits) / inv_power)
|
||||
end
|
||||
|
||||
-- Build caches for bitwise `and` and `xor` over bytes to speed up uint32 operations.
|
||||
-- Building the cache by simply applying these operators over all pairs is too slow and
|
||||
-- duplicates a lot of work over different bits of inputs.
|
||||
-- Instead, when building a cache over bytes, for each pair of bytes split both arguments
|
||||
-- into two 4-bit numbers, calculate values over these two halves, then join the results into a byte again.
|
||||
-- While there are 256 * 256 = 65536 pairs of bytes, there are only 16 * 16 = 256 pairs
|
||||
-- of 4-bit numbers, so that building an 8-bit cache given a 4-bit cache is rather efficient.
|
||||
-- The same logic is applied recursively to make a 4-bit cache from a 2-bit cache and a 2-bit
|
||||
-- cache from a 1-bit cache, which is calculated given the 1-bit version of the operator.
|
||||
|
||||
-- Returns a cache containing all values of a bitwise operator over numbers with given number of bits,
|
||||
-- given an operator over single bits.
|
||||
-- Value of `op(a, b)` is stored in `cache[a * (2 ^ bits) + b]`.
|
||||
local function make_op_cache(bit_op, bits)
|
||||
if bits == 1 then
|
||||
return {[0] = bit_op(0, 0), bit_op(0, 1), bit_op(1, 0), bit_op(1, 1)}
|
||||
end
|
||||
|
||||
local half_bits = bits / 2
|
||||
local size = 2 ^ bits
|
||||
local half_size = 2 ^ half_bits
|
||||
local half_cache = make_op_cache(bit_op, half_bits)
|
||||
|
||||
local cache = {}
|
||||
|
||||
-- The implementation used is an optimized version of the following reference one,
|
||||
-- with intermediate calculations reused and moved to the outermost loop possible.
|
||||
-- It's possible to reorder the loops and move the calculation of one of the
|
||||
-- half-results one level up, but then the cache is not filled in a proper array order
|
||||
-- and its access performance suffers.
|
||||
|
||||
-- for a1 = 0, half_size - 1 do
|
||||
-- for a2 = 0, half_size - 1 do
|
||||
-- for b1 = 0, half_size - 1 do
|
||||
-- for b2 = 0, half_size - 1 do
|
||||
-- local a = a1 * half_size + a2
|
||||
-- local b = b1 * half_size + b2
|
||||
-- local v1 = half_cache[a1 * half_size + b1]
|
||||
-- local v2 = half_cache[a2 * half_size + b2]
|
||||
-- local v = v1 * half_size + v2
|
||||
-- cache[a * size + b] = v
|
||||
-- end
|
||||
-- end
|
||||
-- end
|
||||
-- end
|
||||
|
||||
for a1 = 0, half_size - 1 do
|
||||
local a1_half_size = a1 * half_size
|
||||
|
||||
for a2 = 0, half_size - 1 do
|
||||
local a2_size = a2 * half_size
|
||||
local a_size = (a1_half_size + a2) * size
|
||||
|
||||
for b1 = 0, half_size - 1 do
|
||||
local a_size_plus_b1_half_size = a_size + b1 * half_size
|
||||
local v1_half_size = half_cache[a1_half_size + b1] * half_size
|
||||
|
||||
for b2 = 0, half_size - 1 do
|
||||
cache[a_size_plus_b1_half_size + b2] = v1_half_size + half_cache[a2_size + b2]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return cache
|
||||
end
|
||||
|
||||
local byte_and_cache = make_op_cache(function(a, b) return a * b end, 8)
|
||||
local byte_xor_cache = make_op_cache(function(a, b) return a == b and 0 or 1 end, 8)
|
||||
|
||||
function ops.byte_xor(a, b)
|
||||
return byte_xor_cache[a * 256 + b]
|
||||
end
|
||||
|
||||
function ops.uint32_xor_3(a, b, c)
|
||||
local a1, a2, a3, a4 = uint32_to_bytes(a)
|
||||
local b1, b2, b3, b4 = uint32_to_bytes(b)
|
||||
local c1, c2, c3, c4 = uint32_to_bytes(c)
|
||||
|
||||
return bytes_to_uint32(
|
||||
byte_xor_cache[a1 * 256 + byte_xor_cache[b1 * 256 + c1]],
|
||||
byte_xor_cache[a2 * 256 + byte_xor_cache[b2 * 256 + c2]],
|
||||
byte_xor_cache[a3 * 256 + byte_xor_cache[b3 * 256 + c3]],
|
||||
byte_xor_cache[a4 * 256 + byte_xor_cache[b4 * 256 + c4]]
|
||||
)
|
||||
end
|
||||
|
||||
function ops.uint32_xor_4(a, b, c, d)
|
||||
local a1, a2, a3, a4 = uint32_to_bytes(a)
|
||||
local b1, b2, b3, b4 = uint32_to_bytes(b)
|
||||
local c1, c2, c3, c4 = uint32_to_bytes(c)
|
||||
local d1, d2, d3, d4 = uint32_to_bytes(d)
|
||||
|
||||
return bytes_to_uint32(
|
||||
byte_xor_cache[a1 * 256 + byte_xor_cache[b1 * 256 + byte_xor_cache[c1 * 256 + d1]]],
|
||||
byte_xor_cache[a2 * 256 + byte_xor_cache[b2 * 256 + byte_xor_cache[c2 * 256 + d2]]],
|
||||
byte_xor_cache[a3 * 256 + byte_xor_cache[b3 * 256 + byte_xor_cache[c3 * 256 + d3]]],
|
||||
byte_xor_cache[a4 * 256 + byte_xor_cache[b4 * 256 + byte_xor_cache[c4 * 256 + d4]]]
|
||||
)
|
||||
end
|
||||
|
||||
function ops.uint32_ternary(a, b, c)
|
||||
local a1, a2, a3, a4 = uint32_to_bytes(a)
|
||||
local b1, b2, b3, b4 = uint32_to_bytes(b)
|
||||
local c1, c2, c3, c4 = uint32_to_bytes(c)
|
||||
|
||||
-- (a & b) + (~a & c) has less bitwise operations than (a & b) | (~a & c).
|
||||
return bytes_to_uint32(
|
||||
byte_and_cache[b1 * 256 + a1] + byte_and_cache[c1 * 256 + 255 - a1],
|
||||
byte_and_cache[b2 * 256 + a2] + byte_and_cache[c2 * 256 + 255 - a2],
|
||||
byte_and_cache[b3 * 256 + a3] + byte_and_cache[c3 * 256 + 255 - a3],
|
||||
byte_and_cache[b4 * 256 + a4] + byte_and_cache[c4 * 256 + 255 - a4]
|
||||
)
|
||||
end
|
||||
|
||||
function ops.uint32_majority(a, b, c)
|
||||
local a1, a2, a3, a4 = uint32_to_bytes(a)
|
||||
local b1, b2, b3, b4 = uint32_to_bytes(b)
|
||||
local c1, c2, c3, c4 = uint32_to_bytes(c)
|
||||
|
||||
-- (a & b) + (c & (a ~ b)) has less bitwise operations than (a & b) | (a & c) | (b & c).
|
||||
return bytes_to_uint32(
|
||||
byte_and_cache[a1 * 256 + b1] + byte_and_cache[c1 * 256 + byte_xor_cache[a1 * 256 + b1]],
|
||||
byte_and_cache[a2 * 256 + b2] + byte_and_cache[c2 * 256 + byte_xor_cache[a2 * 256 + b2]],
|
||||
byte_and_cache[a3 * 256 + b3] + byte_and_cache[c3 * 256 + byte_xor_cache[a3 * 256 + b3]],
|
||||
byte_and_cache[a4 * 256 + b4] + byte_and_cache[c4 * 256 + byte_xor_cache[a4 * 256 + b4]]
|
||||
)
|
||||
end
|
||||
|
||||
return ops
|
Loading…
x
Reference in New Issue
Block a user