Add obj cache clearing

This commit is contained in:
Redox 2023-04-22 19:07:39 +02:00
parent bfa0bfb7bd
commit 59451716d0
No known key found for this signature in database
GPG Key ID: C10A433A95068388
2 changed files with 50 additions and 39 deletions

View File

@ -6,12 +6,12 @@ local function CreateCache(cacheId)
local cache = {}
setmetatable (cache, { __index = CACHE })
cache:Initialize (cacheId)
cache:Initialize(cacheId)
return cache
end
function CACHE:Initialize (cacheId)
function CACHE:Initialize(cacheId)
self.Version = 3 -- Update this if the crypto library changes
self.Path = "pac3_cache/" .. string.lower (cacheId)
@ -19,7 +19,7 @@ function CACHE:Initialize (cacheId)
file.CreateDir (self.Path)
end
function CACHE:AddItem (itemId, data)
function CACHE:AddItem(itemId, data)
local hash = self:GetItemIdHash (itemId)
local path = self.Path .. "/" .. hash .. ".txt"
@ -44,26 +44,34 @@ function CACHE:AddItem (itemId, data)
f:Close ()
end
function CACHE:Clear ()
for _, fileName in ipairs (file.Find (self.Path .. "/*", "DATA")) do
function CACHE:Clear()
for _, fileName in ipairs(file.Find (self.Path .. "/*", "DATA")) do
file.Delete (self.Path .. "/" .. fileName)
end
end
function CACHE:ContainsItem (itemId)
return self:GetItem (itemId) ~= nil
function CACHE:ClearBefore(time)
for _, fileName in ipairs(file.Find(self.Path .. "/*", "DATA")) do
if file.Time (self.Path .. "/" .. fileName, "DATA") < time then
file.Delete (self.Path .. "/" .. fileName)
end
end
end
function CACHE:GetItem (itemId)
local hash = self:GetItemIdHash (itemId)
function CACHE:ContainsItem(itemId)
return self:GetItem(itemId) ~= nil
end
function CACHE:GetItem(itemId)
local hash = self:GetItemIdHash(itemId)
local path = self.Path .. "/" .. hash .. ".txt"
if not file.Exists (path, "DATA") then return nil end
if not file.Exists(path, "DATA") then return nil end
local f = file.Open (path, "rb", "DATA")
local f = file.Open(path, "rb", "DATA")
if not f then return nil end
local key = self:GetItemIdEncryptionKey (itemId)
local key = self:GetItemIdEncryptionKey(itemId)
-- Version
local version = f:ReadLong ()
@ -74,8 +82,8 @@ function CACHE:GetItem (itemId)
-- Header
local entryItemIdLength = f:ReadLong ()
local entryItemId = crypto.DecryptString (f:Read (entryItemIdLength), key)
entryItemId = util.Decompress (entryItemId)
local entryItemId = crypto.DecryptString (f:Read(entryItemIdLength), key)
entryItemId = util.Decompress(entryItemId)
if itemId ~= entryItemId then
f:Close ()
@ -84,22 +92,22 @@ function CACHE:GetItem (itemId)
-- Data
local dataLength = f:ReadLong ()
local data = f:Read (dataLength, key)
local data = f:Read(dataLength, key)
f:Close ()
data = crypto.DecryptString (data, key)
data = util.Decompress (data)
data = crypto.DecryptString(data, key)
data = util.Decompress(data)
return data
end
function CACHE:GetItemIdEncryptionKey (itemId)
return crypto.GenerateKey (string.reverse (itemId))
function CACHE:GetItemIdEncryptionKey(itemId)
return crypto.GenerateKey(string.reverse (itemId))
end
function CACHE:GetItemIdHash(itemId)
return string.format ("%08x", tonumber (util.CRC (itemId)))
return string.format("%08x", tonumber (util.CRC (itemId)))
end
return CreateCache

View File

@ -9,12 +9,15 @@ _G.pac_urlobj = nil
urlobj.DataCache = CreateCache("objcache")
local maxAgeConvar = CreateConVar("pac_obj_cache_maxage", "604800", FCVAR_ARCHIVE, "Maximum age of cache entries in seconds, default is 1 week.")
urlobj.DataCache:ClearBefore(os.time() - maxAgeConvar:GetFloat())
concommand.Add("pac_urlobj_clear_disk", function()
urlobj.DataCache:Clear()
pac.Message('Disk cache cleared')
end, nil, 'Clears obj file cache on disk')
pac.Message("Disk cache cleared")
end, nil, "Clears obj file cache on disk")
local SIMULATENOUS_DOWNLOADS = CreateConVar('pac_objdl_streams', '4', {FCVAR_ARCHIVE}, 'OBJ files download streams')
local SIMULATENOUS_DOWNLOADS = CreateConVar("pac_objdl_streams", "4", {FCVAR_ARCHIVE}, "OBJ files download streams")
local CURRENTLY_DOWNLOADING = 0
urlobj.Cache = {}
@ -56,7 +59,7 @@ end
function urlobj.GetObjFromURL(url, forceReload, generateNormals, callback, statusCallback)
if not pac_enable_urlobj:GetBool() then return end
-- if it's already downloaded just return it
-- if it"s already downloaded just return it
if callback and not forceReload and urlobj.Cache[url] then
callback(urlobj.Cache[url])
return
@ -91,13 +94,13 @@ function urlobj.GetObjFromURL(url, forceReload, generateNormals, callback, statu
if callback then urlobj.Queue[url]:AddCallback (callback ) end
if statusCallback then
urlobj.Queue[url]:AddStatusCallback(function(isFinished, mStatus)
statusCallback(isFinished, mStatus ~= '' and mStatus or 'Queued for processing')
statusCallback(isFinished, mStatus ~= "" and mStatus or "Queued for processing")
end)
end
end
local thinkThreads = {}
local PARSING_THERSOLD = CreateConVar('pac_obj_runtime', '0.002', {FCVAR_ARCHIVE}, 'Maximal parse runtime in seconds')
local PARSING_THERSOLD = CreateConVar("pac_obj_runtime", "0.002", {FCVAR_ARCHIVE}, "Maximal parse runtime in seconds")
local PARSE_CHECK_LINES = 30
local function Think()
@ -135,7 +138,7 @@ local function Think()
end
end
pac.AddHook('Think', 'parse_obj', Think)
pac.AddHook("Think", "parse_obj", Think)
local nextParsingHookId = 0
function urlobj.CreateModelFromObjData(objData, generateNormals, statusCallback)
@ -158,7 +161,7 @@ function urlobj.CreateModelFromObjData(objData, generateNormals, statusCallback)
mesh = mesh
})
statusCallback(false, 'Queued')
statusCallback(false, "Queued")
return { mesh }
end
@ -186,11 +189,11 @@ local table_insert = table.insert
local Vector = Vector
local facesMapper = '([0-9]+)/?([0-9]*)/?([0-9]*)'
local numberMatch = '(-?[0-9.+-e0-9]+)'
local vMatch = '^ *v *' .. numberMatch .. ' +' .. numberMatch .. ' +' .. numberMatch
local vtMatch = '^ *vt *' .. numberMatch .. ' +' .. numberMatch
local vnMatch = '^ *vn *' .. numberMatch .. ' +' .. numberMatch .. ' +' .. numberMatch
local facesMapper = "([0-9]+)/?([0-9]*)/?([0-9]*)"
local numberMatch = "(-?[0-9.+-e0-9]+)"
local vMatch = "^ *v *" .. numberMatch .. " +" .. numberMatch .. " +" .. numberMatch
local vtMatch = "^ *vt *" .. numberMatch .. " +" .. numberMatch
local vnMatch = "^ *vn *" .. numberMatch .. " +" .. numberMatch .. " +" .. numberMatch
function urlobj.ParseObj(data, generateNormals)
local coroutine_yield = coroutine.running () and coroutine.yield or function () end
@ -218,8 +221,8 @@ function urlobj.ParseObj(data, generateNormals)
for line in string_gmatch (data, "(.-)\r?\n") do
if #line > 3 then
local first = string_sub(line, 1, 1)
if first ~= '#' and first ~= 'l' and first ~= 'g' and first ~= 'u' then
if string_sub(line, #line) == '\\' then
if first ~= "#" and first ~= "l" and first ~= "g" and first ~= "u" then
if string_sub(line, #line) == "\\" then
line = string_sub (line, 1, #line - 1)
if inContinuation then
continuationLines[#continuationLines + 1] = line
@ -242,13 +245,13 @@ function urlobj.ParseObj(data, generateNormals)
local second = string_sub(currLine, 1, 2)
if second == 'vt' then
if second == "vt" then
vtLines[#vtLines + 1] = currLine
elseif second == 'vn' then
elseif second == "vn" then
vnLines[#vnLines + 1] = currLine
elseif first == 'v' then
elseif first == "v" then
vLines[#vLines + 1] = currLine
elseif first == 'f' then
elseif first == "f" then
facesPreprocess[#facesPreprocess + 1] = currLine
else
lines[#lines + 1] = currLine