Files
lunch-games/source/lunch/update.d
2025-11-27 03:11:44 +01:00

515 lines
13 KiB
D

module lunch.update;
import std.datetime;
import std.stdio;
import std.traits;
import std.json;
import std.conv;
import std.file : dirEntries;
import std.path;
import std.file;
import std.math;
import std.string;
import std.digest.sha;
import std.array;
import std.format;
import std.algorithm;
import std.parallelism;
import std.datetime.stopwatch;
import lunch.conf;
import lunch.term;
import lunch.color;
import lunch.http;
import lunch.logger;
private struct RemoteFile
{
string file;
string source;
string hash;
ulong size;
long mtime;
}
private struct Remote
{
string source;
ulong size;
long mtime;
RemoteFile[] files;
}
private struct LocalFile
{
string file;
ulong size;
long mtime;
string hash() inout @property
{
if (_hash)
return _hash;
synchronized
{
auto path = chainPath(config.updater.workdir, file).array;
auto file = File(path, "rb");
auto result = digest!SHA256(file.byChunk(4096 * 1024));
cast()_hash = result.toHexString().toLower.idup;
}
return _hash;
}
private string _hash = null;
}
private struct Local
{
ulong size;
long mtime;
LocalFile[] files;
}
private struct Action
{
enum What
{
remove,
download,
}
What what;
string file;
string url;
long mtime;
alias this = what;
}
private struct Actions
{
Action[] actions;
ulong total_download_size;
alias this = actions;
}
private bool _remote_set = false;
private immutable(Remote) _remote;
private bool _local_set = false;
private immutable(Local) _local;
private bool _actions_set = false;
private immutable(Actions) _actions;
private JSONValue[string] safe_object(JSONValue value) @trusted
{
if (value.type != JSONType.object)
errorf("JSON format error, object expected got %s", value.type);
return value.object;
}
private JSONValue[] safe_array(JSONValue value) @trusted
{
if (value.type != JSONType.array)
errorf("JSON format error, array expected got %s", value.type);
return value.array;
}
private T safe_get(T)(JSONValue value) @trusted
{
static if (!isBasicType!T && !isSomeString!T)
static assert(0, "Only basic types and strings are supported");
final switch (value.type)
{
case JSONType.array:
error("Arrays are not supported");
break;
case JSONType.false_:
static if (isBoolean!T)
return false;
else
errorf("Can't map boolean to %s", typeid(T));
break;
case JSONType.float_:
static if (isFloatingPoint!T)
return value.get!T;
else
errorf("Can't map float to %s", typeid(T));
break;
case JSONType.integer:
static if (isIntegral!T && isSigned!T)
return value.get!T;
else static if (isIntegral!T && isUnsigned!T)
return value.get!(Signed!T).to!T;
else
errorf("Can't map integer to %s", typeid(T));
break;
case JSONType.null_:
error("Null references are not supported");
break;
case JSONType.object:
error("Objects are not supported");
break;
case JSONType.string:
static if (isSomeString!T)
return value.get!T;
else
errorf("Can't map string to %s", typeid(T));
break;
case JSONType.true_:
static if (isBoolean!T)
return true;
else
errorf("Can't map boolean to %s", typeid(T));
break;
case JSONType.uinteger:
static if (isIntegral!T && isUnsigned!T)
return value.get!T;
else
errorf("Can't map uinteger to %s", typeid(T));
}
assert(0);
}
void wrapJSON(T)(ref T wrapper, JSONValue[string] json, bool ignore_missing = false) @safe
{
static foreach (name; __traits(allMembers, T))
{{
alias member = __traits(getMember, wrapper, name);
alias type = typeof(__traits(getMember, T, name));
type* field = &__traits(getMember, wrapper, name);
enum key = name;
JSONValue* value = key in json;
if (!value)
{
if (!ignore_missing)
error("Missing field \""~key~"\" of type "~type.stringof);
}
else
{
static if (isBasicType!type || isSomeString!type)
{
*field = safe_get!type(*value);
}
else static if (isArray!type)
{
alias el_type = ForeachType!type;
static if (isBasicType!el_type)
{
*field = [];
foreach (_, entry; value.safe_array)
*field ~= entry.get!el_type;
}
else
{
*field = [];
foreach (_, entry; safe_array(*value))
{
el_type el;
wrapJSON(el, safe_object(entry));
*field ~= el;
}
}
}
else
{
wrapJSON(*field, safe_object(value), ignore_missing);
}
}
}}
}
private string workdir(string path)
{
return chainPath(config.updater.workdir, path).array;
}
private void touch(string path, long mtime = Clock.currTime().toUnixTime)
{
infof("Touching %s", path);
SysTime old_atime, old_mtime;
getTimes(path, old_atime, old_mtime);
setTimes(path, old_atime, SysTime.fromUnixTime(mtime));
}
private immutable(Remote) remote()
{
if (_remote_set)
return _remote;
info("Fetching seed");
char[] seed_json;
try
seed_json = get(config.updater.seedurl);
catch (Exception ex)
errore("Failed to fetch seed", ex);
if (!seed_json)
error("Failed fetching seed : %s", config.updater.seedurl);
info("Parsing seed");
Remote rem;
try
wrapJSON(rem, parseJSON(seed_json).safe_object);
catch (Exception ex)
errore("Received malformed seed data", ex);
cast()_remote = rem;
_remote_set = true;
return _remote;
}
private immutable(Local) local()
{
if (_local_set)
return _local;
info("Scanning files");
Local loc;
string workdir = absolutePath(config.updater.workdir);
if (!workdir.exists)
return Local();
if (!workdir.isDir)
errorf("\"%s\" exists and is not a directory", workdir);
foreach (entry; dirEntries(workdir, SpanMode.breadth))
{
if (!entry.isFile)
continue;
loc.files ~= LocalFile(
file: entry.name.relativePath(workdir).replace('\\', '/'),
size: entry.size.to!ulong,
mtime: entry.timeLastModified.toUnixTime,
);
loc.size += entry.size;
}
loc.mtime = loc.files.map!"a.mtime".maxElement(0);
cast()_local = loc;
_local_set = true;
return _local;
}
private immutable(Actions) actions()
{
if (_actions_set)
return _actions;
info("Checking for updates");
RemoteFile[string] rem;
foreach (file; remote.files)
rem[file.file] = file;
LocalFile[string] loc;
foreach (file; local.files)
loc[file.file] = file;
Actions act;
auto exclude = (string file) => config.updater.exclude.map!(
pat => globMatch(file, pat)
).any;
bool[string] to_be_deleted;
foreach (pair; loc.byKeyValue)
if ( !exclude(pair.value.file) )
to_be_deleted[pair.key] = true;
foreach (this_rem; rem.byValue)
{
if ( exclude(this_rem.file) )
continue;
auto this_act = Action(
Action.download,
workdir(this_rem.file),
this_rem.source,
this_rem.mtime
);
auto this_loc = this_rem.file in loc;
if (!this_loc)
{
act ~= this_act;
act.total_download_size += this_rem.size;
continue;
}
to_be_deleted.remove(this_loc.file);
if (this_loc.mtime != this_rem.mtime || this_loc.size != this_rem.size)
{
if (this_loc.hash != this_rem.hash || this_loc.size != this_rem.size)
{
act ~= this_act;
act.total_download_size += this_rem.size;
}
else
{
touch(workdir(this_loc.file), this_rem.mtime);
}
}
}
foreach (file; to_be_deleted.byKey)
act ~= Action(Action.remove, workdir(file), "", 0);
cast()_actions = act;
_actions_set = true;
if (_actions.length == 0)
info("Already up to date");
else
info("Update available");
return _actions;
}
public bool updateAvailable()
{
if (actions.length != 0)
return true;
else
return false;
}
public void update()
{
info("Updating");
scope (exit) cast() _actions = Actions();
auto writeSW = StopWatch(AutoStart.yes);
auto speedSW = StopWatch(AutoStart.yes);
const dl_total = actions.actions.count!"a.what == b"(Action.download);
const dl_pad = cast(int)log10(cast(float)dl_total) + 1;
uint dl_count = 0;
ulong dl_size = 0;
const dl_spad = cast(int)log10(cast(float)actions.total_download_size) + 1;
ulong dl_speed = 0;
ulong dl_speed_old_size = 0;
const old_defaultpool = defaultPoolThreads;
scope (exit) defaultPoolThreads = old_defaultpool;
defaultPoolThreads = config.updater.parallel_downloads.to!uint - 1;
auto writeProgress = delegate()
{
string numbers = format(
"[%s%*d%s|%s%d%s]",
magenta, dl_spad, dl_size, reset,
magenta, actions.total_download_size, reset,
);
int barSpace = termWidth - dl_spad*2 - 3 - 2 - 10 - 11;
float progress = dl_size.to!float / actions.total_download_size;
long filledSpace = lround(progress * barSpace);
long emptySpace = lround((1.0 - progress) * barSpace);
string progress_bar = "["~yellow;
for (int n = 0; n < filledSpace; n++)
progress_bar ~= '=';
for (int n = 0; n < emptySpace; n++)
progress_bar ~= ' ';
progress_bar ~= reset~"]";
string percent = format(
"[%7.2f%%]", progress * 100
);
int speed_rank = 0;
float speed = dl_speed.to!float;
for (speed_rank = 0; speed >= 1000; speed_rank += 1)
speed /= 1000;
string unit = [
" B",
"KB",
"MB",
"GB",
"TB",
"PB",
"EB",
"ZB",
"YB",
"RB",
"QB",
][speed_rank];
string download_speed = (speed < 100)
? format(
"[%s%4.1f %s%s/s]", cyan, speed, reset, unit
)
: format(
"[%s%5.1f%s%s/s]", cyan, speed, reset, unit
);
writef("\r%s%s%s%s\b", numbers, download_speed, progress_bar, percent);
};
foreach (action; parallel(actions.actions, 1))
final switch(action.what)
{
case Action.remove:
info("Remove %s", action.file);
remove(action.file);
break;
case Action.download:
info("Update %s", action.file);
mkdirRecurse(dirName(action.file));
download(action.url, action.file, (size_t amount){
synchronized
{
dl_size += amount;
if (speedSW.peek.total!"msecs" >= 1000)
{
long time = speedSW.peek.total!"msecs";
dl_speed = lround((dl_size - dl_speed_old_size) * time.to!float / 1000);
dl_speed_old_size = dl_size;
speedSW.reset();
}
if (writeSW.peek.total!"msecs" < 100)
return;
writeSW.reset();
}
writeProgress();
});
synchronized dl_count += 1;
writefln(
"\33[2K\r[%s%*d%s|%s%d%s] %s",
cyan, dl_pad, dl_count, reset,
cyan, dl_total, reset,
action.file.replace("/", dirSeparator).green,
);
writeProgress();
touch(action.file, action.mtime);
break;
}
foreach (ent; dirEntries(config.updater.workdir, SpanMode.depth))
{
if (!ent.isDir)
continue;
if (dirEntries(ent, SpanMode.shallow).empty)
rmdir(ent);
}
}