luci-app-passwall: optimize rule update

This commit is contained in:
ShanStone 2022-10-09 19:27:38 +08:00 committed by sbwml
parent 8641672350
commit 4d93b86d8f

View File

@ -56,11 +56,14 @@ local function trim(text)
end end
-- curl -- curl
local function curl(url, file) local function curl(url, file, valifile)
local cmd = "curl -skL -w %{http_code} --retry 3 --connect-timeout 3 '" .. url .. "'" local cmd = "curl -skL -w %{http_code} --retry 3 --connect-timeout 3 '" .. url .. "'"
if file then if file then
cmd = cmd .. " -o " .. file cmd = cmd .. " -o " .. file
end end
if valifile then
cmd = cmd .. " --dump-header " .. valifile
end
local stdout = luci.sys.exec(cmd) local stdout = luci.sys.exec(cmd)
if file then if file then
@ -87,12 +90,22 @@ local function line_count(file_path)
return num; return num;
end end
local function non_file_check(file_path) local function non_file_check(file_path, vali_file)
if nixio.fs.readfile(file_path, 1000) then if nixio.fs.readfile(file_path, 1000) then
local remote_file_size = luci.sys.exec("cat " .. vali_file .. " | grep -i Content-Length | awk '{print $2}'")
local local_file_size = luci.sys.exec("ls -l " .. file_path .. "| awk '{print $5}'")
if remote_file_size then
if tonumber(remote_file_size) == tonumber(local_file_size) then
return nil; return nil;
else else
return true; return true;
end end
else
return nil;
end
else
return true;
end
end end
--fetch rule --fetch rule
@ -101,16 +114,26 @@ local function fetch_rule(rule_name,rule_type,url,exclude_domain)
local sret_tmp = 0 local sret_tmp = 0
local domains = {} local domains = {}
local file_tmp = "/tmp/" ..rule_name.. "_tmp" local file_tmp = "/tmp/" ..rule_name.. "_tmp"
local vali_file = "/tmp/" ..rule_name.. "_vali"
local download_file_tmp = "/tmp/" ..rule_name.. "_dl" local download_file_tmp = "/tmp/" ..rule_name.. "_dl"
local unsort_file_tmp = "/tmp/" ..rule_name.. "_unsort" local unsort_file_tmp = "/tmp/" ..rule_name.. "_unsort"
log(rule_name.. " 开始更新...") log(rule_name.. " 开始更新...")
for k,v in ipairs(url) do for k,v in ipairs(url) do
sret_tmp = curl(v, download_file_tmp..k) sret_tmp = curl(v, download_file_tmp..k, vali_file..k)
if sret_tmp == 200 and non_file_check(download_file_tmp..k) then if sret_tmp == 200 and non_file_check(download_file_tmp..k, vali_file..k) then
log(rule_name.. "" ..k.. "条规则:" ..v.. "下载文件读取出错,尝试重新下载。")
os.remove(download_file_tmp..k)
os.remove(vali_file..k)
sret_tmp = curl(v, download_file_tmp..k, vali_file..k)
if sret_tmp == 200 and non_file_check(download_file_tmp..k, vali_file..k) then
sret = 0 sret = 0
sret_tmp = 0
log(rule_name.. "" ..k.. "条规则:" ..v.. "下载文件读取出错,请检查网络或下载链接后重试!") log(rule_name.. "" ..k.. "条规则:" ..v.. "下载文件读取出错,请检查网络或下载链接后重试!")
elseif sret_tmp == 200 then end
end
if sret_tmp == 200 then
if rule_name == "gfwlist" then if rule_name == "gfwlist" then
local domains = {} local domains = {}
local gfwlist = io.open(download_file_tmp..k, "r") local gfwlist = io.open(download_file_tmp..k, "r")
@ -168,6 +191,7 @@ local function fetch_rule(rule_name,rule_type,url,exclude_domain)
log(rule_name.. "" ..k.. "条规则:" ..v.. "下载失败,请检查网络或下载链接后重试!") log(rule_name.. "" ..k.. "条规则:" ..v.. "下载失败,请检查网络或下载链接后重试!")
end end
os.remove(download_file_tmp..k) os.remove(download_file_tmp..k)
os.remove(vali_file..k)
end end
if sret == 200 then if sret == 200 then