summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main.zig608
-rw-r--r--src/root.zig24
2 files changed, 632 insertions, 0 deletions
diff --git a/src/main.zig b/src/main.zig
new file mode 100644
index 0000000..3d40078
--- /dev/null
+++ b/src/main.zig
@@ -0,0 +1,608 @@
+const std = @import("std");
+const w = std.os.windows;
+const hook = @import("hook");
+
+const c = @cImport({
+ @cInclude("libavcodec/avcodec.h");
+ @cInclude("libavformat/avformat.h");
+ @cInclude("libswscale/swscale.h");
+ @cInclude("libswresample/swresample.h");
+ @cInclude("libavutil/opt.h");
+});
+
+// for whatever reason, winsock just doesn't export this but libsrt expects it
+// so we can just throw it in here
+export const in6addr_any: [32]u8 = .{ 0 } ** 32;
+
+fn virtualFunc(This: type, comptime func_name: []const u8, Params: []const type, RT: type) fn (*This, anytype) RT {
+ var params: [Params.len + 1]std.builtin.Type.Fn.Param = undefined;
+ params[0] = .{ .is_generic = false, .is_noalias = false, .type = *This };
+ for (params[1..], Params) |*param, Param| param.* = .{
+ .is_generic = false,
+ .is_noalias = false,
+ .type = Param,
+ };
+ const Fn = *@Type(.{
+ .@"fn" = .{
+ .is_generic = false,
+ .is_var_args = false,
+ .params = &params,
+ .return_type = RT,
+ .calling_convention = .{ .x86_thiscall = .{} },
+ },
+ });
+
+ return struct {
+ pub fn func(this: *This, args: anytype) RT {
+ const vtable: *[*]Fn = @ptrCast(@constCast(this));
+ const type_name = comptime name: {
+ const fqn = @typeName(This);
+ // find the last .
+ var iter = std.mem.splitScalar(u8, fqn, '.');
+ var shortName: []const u8 = undefined;
+ while (iter.next()) |seg| shortName = seg;
+ break :name shortName;
+ };
+ const idx = @field(@field(gamedata, type_name), "vtidx_" ++ func_name);
+ const target_location = vtable.*[idx];
+ std.log.debug("calling {s}::{s} @ 0x{x}",
+ .{ type_name, func_name, @intFromPtr(target_location) });
+ return @call(.auto, target_location, .{this} ++ args);
+ }
+ }.func;
+}
+
+const CreateInterfaceFn = *const fn ([*:0]const u8, ?*c_int) callconv(.c) *align(4) anyopaque;
+
+const GameData = struct {
+ const HL2PRE20TH = GameData{
+ .IVEngineClient = .{
+ .vtidx_IsPlayingDemo = 76,
+ .vtidx_ClientCmd = 7,
+ .vtidx_Time = 14,
+ .vtidx_GetAppID = 97,
+ .iface_name = "VEngineClient014",
+ },
+ .off_S_TransferStereo16 = 0x7fac0,
+ };
+
+ IVEngineClient: struct {
+ vtidx_IsPlayingDemo: usize,
+ vtidx_Time: usize,
+ vtidx_GetAppID: usize,
+ vtidx_ClientCmd: usize,
+ iface_name: [:0]const u8,
+ },
+ CDemoPlayer: struct {
+ vtidx_GetDemoFile: usize = 2,
+ vtidx_GetPlaybackTicks: usize = 3,
+ vtidx_GetTotalTicks: usize = 4,
+ vtidx_StartPlayback: usize = 5,
+ vtidx_IsPlayingBack: usize = 6,
+ } = .{},
+ CEngineAPI: struct {
+ iface_name: [:0]const u8 = "VENGINE_LAUNCHER_API_VERSION004",
+ vtidx_SetEngineWindow: usize = 7,
+ } = .{},
+ CEngineVGui: struct {
+ iface_name: [:0]const u8 = "VEngineVGui001",
+ vtidx_Init: usize = 3,
+ } = .{},
+ CEngineTool: struct {
+ iface_name: [:0]const u8 = "VENGINETOOL003",
+ vtidx_StartMovieRecording: usize = 65,
+ } = .{},
+ // TODO: implement a good way to find this
+ off_S_TransferStereo16: usize,
+ CVideoMode_MaterialSystem: struct {
+ vtidx_WriteMovieFrame: usize = 27,
+ vtidx_ReadScreenPixels: usize = 30,
+ } = .{},
+};
+
+const IVEngineClient = extern struct {
+ vt: [*]usize,
+ const isRecordingDemo =
+ virtualFunc(IVEngineClient, "IsPlayingDemo", &.{}, bool);
+ const clientCmd =
+ virtualFunc(IVEngineClient, "ClientCmd", &.{[*:0]const u8}, void);
+ const time = virtualFunc(IVEngineClient, "Time", &.{}, f32);
+ const getAppId = virtualFunc(IVEngineClient, "GetAppID", &.{}, c_int);
+};
+
+const CDemoPlayer = extern struct {
+ vt: [*]usize,
+ const getPlaybackTicks =
+ virtualFunc(CDemoPlayer, "GetPlaybackTicks", &.{}, c_int);
+ const getTotalTicks =
+ virtualFunc(CDemoPlayer, "GetTotalTicks", &.{}, c_int);
+ const startPlayback =
+ virtualFunc(CDemoPlayer, "StartPlayback", &.{[*]const u8, bool}, bool);
+ const isPlayingBack =
+ virtualFunc(CDemoPlayer, "IsPlayingBack", &.{}, bool);
+};
+
+const CEngineAPI = extern struct {
+ vt: [*]usize,
+};
+
+const CEngineVGui = extern struct {
+ vt: [*]*const anyopaque,
+};
+
+const CVideoMode_MaterialSystem = extern struct {
+ vt: [*]*const anyopaque,
+ const readScreenPixels = virtualFunc(CVideoMode_MaterialSystem, "ReadScreenPixels",
+ &.{c_int, c_int, c_int, c_int, *anyopaque, i32}, void);
+};
+
+const CEngineTool = extern struct {
+ vt: [*]*const anyopaque,
+};
+
+const Sample = extern struct { left: i32, right: i32 };
+
+var api: struct {
+ engine_factory: CreateInterfaceFn,
+ engclient: *IVEngineClient,
+ demoplayer: *CDemoPlayer,
+ engineapi: *CEngineAPI,
+ videomode: *CVideoMode_MaterialSystem,
+ engvgui: *CEngineVGui,
+ enginetool: *CEngineTool,
+ S_TransferStereo16: *const fn(*anyopaque, [*]Sample, u32, u32) callconv(.c) void,
+ movieinfo: *MovieInfo,
+
+ fn init(this: *@This()) !void {
+ var engine_dll = try std.DynLib.open("engine");
+ defer engine_dll.close();
+
+ const engine_factory = engine_dll.lookup(CreateInterfaceFn, "CreateInterface").?;
+ this.engclient =
+ @ptrCast(engine_factory(gamedata.IVEngineClient.iface_name, null));
+ this.engineapi =
+ @ptrCast(engine_factory(gamedata.CEngineAPI.iface_name, null));
+ this.engvgui =
+ @ptrCast(engine_factory(gamedata.CEngineVGui.iface_name, null));
+ this.enginetool =
+ @ptrCast(engine_factory(gamedata.CEngineTool.iface_name, null));
+ // CEngineClient::IsPlayingDemo immediately loads &demoplayer into ECX
+ const isplayingdemo: [*]u8 =
+ @ptrFromInt(this.engclient.vt[gamedata.IVEngineClient.vtidx_IsPlayingDemo]);
+ std.debug.assert(isplayingdemo[0] == hook.x86.Opcode.Op1Mrm.movrmw);
+ std.debug.assert(isplayingdemo[1] == hook.x86.modrm(0, 1, 5));
+ // +2 is the first byte past the mov opcode and modr/m byte
+ this.demoplayer = @as(*align(1) **CDemoPlayer, @ptrCast(isplayingdemo + 2)).*.*;
+ std.log.debug("demoplayer: {x}", .{@intFromPtr(this.demoplayer)});
+ // CEngineAPI::SetWindow does 2 virtual calls, one to unset the window
+ // and one to apply the new one. videomode is loaded into ECX for the
+ // second one
+ var p: [*]u8 =
+ @ptrFromInt(this.engineapi.vt[gamedata.CEngineAPI.vtidx_SetEngineWindow]);
+ var mov_ecx_count: u8 = 0;
+ while (hook.x86.x86_len(p)) |len| {
+ if (p[0] == hook.x86.Opcode.Op1Mrm.movrmw and
+ p[1] == hook.x86.modrm(0, 1, 5))
+ mov_ecx_count += 1;
+ if (mov_ecx_count == 2) {
+ this.videomode = @as(*align(1) **CVideoMode_MaterialSystem, @ptrCast(p + 2)).*.*;
+ break;
+ }
+ p += len;
+ } else |err| return err;
+ std.log.debug("videomode: {x}", .{ @intFromPtr(this.videomode) });
+ // CEngineTool::StartMovieRecording first checks if there's currently a
+ // movie playing. Check first call.
+ p = @constCast(@ptrCast(this.enginetool.vt[gamedata.CEngineTool.vtidx_StartMovieRecording]));
+ while (hook.x86.x86_len(p)) |len| {
+ if (p[0] == hook.x86.Opcode.Op1IW.call) {
+ const offset = @as(*align(1) usize, @ptrCast(p + 1)).*;
+ p = @ptrFromInt(@addWithOverflow(@intFromPtr(p + 5), offset)[0]);
+ break;
+ }
+ p += len;
+ } else |err| return err;
+ // CL_IsRecordingMovie checks the first byte of the first field of
+ // movieinfo, which, well, is just the pointer to the whole thing.
+ std.log.debug("CL_IsRecordingMovie: {*}", .{p});
+ std.debug.assert(p[0] == hook.x86.Opcode.Op1MrmI8.alumi8);
+ std.debug.assert(p[1] == hook.x86.modrm(0, 7, 5));
+ this.movieinfo = @as(*align(1) *MovieInfo, @ptrCast(p + 2)).*;
+ std.log.debug("movieinfo: {x}", .{ @intFromPtr(this.movieinfo) });
+ }
+} = undefined;
+
+// example
+const gamedata = GameData.HL2PRE20TH;
+
+const Config = struct {
+ pub const GameBuild = enum { Hl2Pre20th };
+
+ build: GameBuild,
+ width: u16 = 1920,
+ height: u16 = 1080,
+ framerate: u9 = 60,
+ bitrate: u32 = 20000,
+ mod: []const u8 = "hl2",
+ video_codec: [:0]const u8 = "libx264",
+ extraargs: []const u8 = "",
+
+ pub fn readFromFile(path: []const u8, allocator: std.mem.Allocator) !Config {
+ const cfg = try std.fs.cwd()
+ .readFileAllocOptions(allocator, path, 4096, null, .@"1", 0);
+ var diag: std.zon.parse.Diagnostics = .{};
+ return std.zon.parse.fromSlice(Config, allocator, cfg, &diag, .{}) catch |err|
+ switch (err) {
+ error.ParseZon => {
+ var stderr = std.fs.File.stderr().writer(&.{});
+ try diag.format(&stderr.interface);
+ return err;
+ },
+ else => return err,
+ };
+ }
+};
+
+const MovieInfo = extern struct {
+ name: [256:0]u8,
+ curframe: c_int,
+ kind: c_int,
+ jpeg_quality: c_int,
+};
+
+var render: struct {
+ orig_WriteMovieFrame: *const @TypeOf(hook_WriteMovieFrame),
+ orig_S_TransferStereo16: *const @TypeOf(hook_S_TransferStereo16),
+
+ v: struct {
+ codec: *const c.AVCodec,
+ ctx: *c.AVCodecContext,
+ stream: *c.AVStream,
+ yuv_frame: *c.AVFrame,
+ sws: *c.SwsContext,
+ nextpts: u32,
+ },
+
+ a: struct {
+ codec: *const c.AVCodec,
+ ctx: *c.AVCodecContext,
+ swr: *c.SwrContext,
+ frame: *c.AVFrame,
+ pcm_frame: *c.AVFrame,
+ stream: *c.AVStream,
+ nextpts: u32,
+ frame_idx: u32,
+ },
+ format: *c.AVFormatContext,
+ pixels: []Bgr,
+
+ const Bgr = extern struct { b: u8, g: u8, r: u8 };
+
+ // HEY!! 'this' in this function is NOT the render struct!
+ fn hook_WriteMovieFrame(this: *CVideoMode_MaterialSystem,
+ _: *MovieInfo) callconv(.{ .x86_thiscall = .{} }) void {
+ std.log.debug("frame!", .{});
+ // 3: source engine IMAGE_FORMAT_BGR888
+ this.readScreenPixels(.{0, 0, config.width, config.height, render.pixels.ptr, 3});
+ const stride: c_int = config.width * @sizeOf(Bgr);
+ // convert to yuv
+ _ = c.sws_scale(render.v.sws, @ptrCast(&render.pixels.ptr), &stride, 0,
+ config.height, &render.v.yuv_frame.data, &render.v.yuv_frame.linesize);
+
+ render.v.yuv_frame.pts = render.v.nextpts; render.v.nextpts += 1;
+ render.v.yuv_frame.time_base = render.v.ctx.time_base;
+ // 1 'time_base'
+ render.v.yuv_frame.duration = 1;
+
+ fftest("send a frame for encoding",
+ c.avcodec_send_frame(render.v.ctx, render.v.yuv_frame)) catch return;
+
+ var pkt: ?*c.AVPacket = c.av_packet_alloc() orelse {
+ std.log.err("failed to alloc packet!", .{});
+ return;
+ };
+
+ var r: c_int = 0;
+ while (true) {
+ r = c.avcodec_receive_packet(render.v.ctx, pkt);
+ if (r == c.AVERROR(c.EAGAIN) or r == c.AVERROR(c.AVERROR_EOF))
+ break;
+ fftest("encode a frame", r) catch return;
+
+ pkt.?.stream_index = render.v.stream.index;
+ c.av_packet_rescale_ts(pkt, render.v.ctx.time_base, render.v.stream.time_base);
+ fftest("write frame to file", c.av_interleaved_write_frame(render.format, pkt)) catch return;
+ }
+ c.av_packet_unref(pkt);
+ c.av_packet_free(&pkt);
+ }
+
+
+ fn hook_S_TransferStereo16(p: *anyopaque, samples_ptr: [*]Sample, start: u32, end: u32) callconv(.c) void {
+ const samples = samples_ptr[0..(end - start)/2];
+ for (samples) |_| {
+ //render.a.pcm_frame.data[0][]
+ }
+ render.orig_S_TransferStereo16(p, samples_ptr, start, end);
+ }
+
+ fn fftest(msg: []const u8, v: c_int) !void {
+ if (v != 0) {
+ var buf: [256:0]u8 = .{0} ** 256;
+ _ = c.av_strerror(v, &buf, 256);
+ std.log.err("failed to {s} ({s})", .{msg, buf[0..:0].ptr});
+ return error.FFmpegError;
+ }
+ }
+
+ pub fn init(this: *@This(), allocator: std.mem.Allocator) !void {
+ std.log.info("available video codecs:", .{});
+ var i: ?*anyopaque = null;
+ while (c.av_codec_iterate(&i)) |codec| {
+ if (codec.*.type == c.AVMEDIA_TYPE_VIDEO) {
+ std.log.info("\t{s} ", .{ codec.*.name.? });
+ }
+ }
+ // make sure the game knows we are rendering a video
+ api.movieinfo.name[0] = 'a';
+ // but we don't want it to do anything else
+ api.movieinfo.kind = 0;
+
+ var r: i32 = 0;
+ var format: ?*c.AVFormatContext = null;
+ r = c.avformat_alloc_output_context2(&format, null, null, "test.mp4");
+ this.format = format orelse {
+ std.log.err("couldn't create output context! ({})", .{r});
+ return error.FFmpegError;
+ };
+
+ // create video codec
+ this.v.codec = c.avcodec_find_encoder_by_name(config.video_codec) orelse {
+ std.log.err("could't find video encoder", .{});
+ return error.FFmpegError;
+ };
+ this.v.ctx = c.avcodec_alloc_context3(this.v.codec) orelse {
+ std.log.err("couldn't alloc video codec context", .{});
+ return error.FFmpegError;
+ };
+ this.v.ctx.width = config.width;
+ this.v.ctx.height = config.height;
+ this.v.ctx.pix_fmt = c.AV_PIX_FMT_YUV420P;
+ this.v.ctx.time_base = c.av_make_q(1, config.framerate);
+ this.v.ctx.framerate = c.av_make_q(config.framerate, 1);
+ this.v.ctx.gop_size = 2 * config.framerate;
+ this.v.ctx.bit_rate = 1000 * config.bitrate;
+ //this.v.ctx.rc_max_rate = this.v.ctx.bit_rate;
+ try fftest("set crf", c.av_opt_set_int(this.v.ctx.priv_data, "crf", 24, 0));
+ // TODO: extra encoder arguments
+ r = c.avcodec_open2(this.v.ctx, this.v.codec, null);
+ if (r < 0) {
+ std.log.err("failed to create video encoder ({})", .{ r });
+ return error.FFmpegError;
+ }
+
+ // stream
+ const stream = c.avformat_new_stream(this.format, this.v.codec);
+ if (stream == null) {
+ std.log.err("failed to create video stream", .{});
+ return error.FFmpegError;
+ }
+ this.v.stream = stream.?;
+
+ r = c.avcodec_parameters_from_context(this.v.stream.codecpar, this.v.ctx);
+ if (r < 0) {
+ std.log.err("failed to copy params from video context ({})", .{r});
+ return error.FFmpegError;
+ }
+ this.v.stream.time_base = this.v.ctx.time_base;
+ this.v.stream.avg_frame_rate = this.v.ctx.framerate;
+
+ // frame
+ this.v.yuv_frame = c.av_frame_alloc();
+ this.v.yuv_frame.width = config.width;
+ this.v.yuv_frame.height = config.height;
+ this.v.yuv_frame.color_range = c.AVCOL_RANGE_JPEG;
+ this.v.yuv_frame.time_base = this.v.ctx.time_base;
+ this.v.yuv_frame.format = this.v.ctx.pix_fmt;
+
+ r = c.av_frame_get_buffer(this.v.yuv_frame, 0);
+ if (r < 0) {
+ std.log.err("failed to alloc frame buffer ({})", .{r});
+ return error.FFmpegError;
+ }
+
+ // sws
+ this.v.sws = c.sws_getContext(config.width, config.height,
+ c.AV_PIX_FMT_BGR24, config.width, config.height,
+ this.v.yuv_frame.format, 0, null, null, null) orelse {
+ std.log.err("failed to create swscontext", .{});
+ return error.FFmpegError;
+ };
+
+ // audio codec
+ const acodec = c.avcodec_find_encoder(c.AV_CODEC_ID_AAC);
+ if (acodec == null) {
+ std.log.err("couldn't find audio encoder", .{});
+ return error.FFmpegError;
+ }
+ this.a.codec = acodec;
+ this.a.ctx = c.avcodec_alloc_context3(this.a.codec) orelse {
+ std.log.err("couldn't create audio encoder context", .{});
+ return error.FFmpegError;
+ };
+ this.a.ctx.bit_rate = 256000;
+ this.a.ctx.sample_fmt = c.AV_SAMPLE_FMT_FLTP;
+ this.a.ctx.sample_rate = 44100;
+ this.a.ctx.profile = c.FF_PROFILE_AAC_MAIN;
+ this.a.ctx.time_base = c.av_make_q(1, 44100);
+
+ this.a.ctx.ch_layout.order = c.AV_CHANNEL_ORDER_NATIVE;
+ this.a.ctx.ch_layout.nb_channels = 2;
+ this.a.ctx.ch_layout.u.map = null; // not custom channel order
+ this.a.ctx.ch_layout.u.mask = c.AV_CH_LAYOUT_STEREO;
+ this.a.ctx.ch_layout.@"opaque" = null;
+
+ try fftest("open audio codec", c.avcodec_open2(this.a.ctx, this.a.codec, null));
+
+ // audio resampler
+ this.a.swr = c.swr_alloc() orelse {
+ std.log.err("failed to alloc swr context", .{});
+ return error.FFmpegError;
+ };
+
+ try fftest("set audio sample rate", c.av_opt_set_int(this.a.swr, "in_sample_rate", 44100, 0));
+ try fftest("set audio sample format", c.av_opt_set_sample_fmt(this.a.swr, "in_sample_fmt", c.AV_SAMPLE_FMT_S16P, 0));
+ try fftest("set audio channel layout", c.av_opt_set_chlayout(this.a.swr, "in_chlayout", &this.a.ctx.ch_layout, 0));
+ try fftest("set out sample rate", c.av_opt_set_int(this.a.swr, "out_sample_rate", this.a.ctx.sample_rate, 0));
+ try fftest("set out sample format", c.av_opt_set_sample_fmt(this.a.swr, "out_sample_fmt", this.a.ctx.sample_fmt, 0));
+ try fftest("set out channel layout", c.av_opt_set_chlayout(this.a.swr, "out_chlayout", &this.a.ctx.ch_layout, 0));
+
+ r = c.swr_init(this.a.swr);
+ if (r < 0) {
+ std.log.err("couldn't start audio resampler ({})", .{r});
+ return error.FFmpegError;
+ }
+
+ const frame = c.av_frame_alloc();
+ this.a.frame = frame orelse {
+ std.log.err("failed to alloc audio frame", .{});
+ return error.FFmpegError;
+ };
+ this.a.frame.nb_samples = this.a.ctx.frame_size;
+ this.a.frame.format = this.a.ctx.sample_fmt;
+ this.a.frame.ch_layout = this.a.ctx.ch_layout;
+ r = c.av_frame_get_buffer(this.a.frame, 0);
+ if (r < 0) {
+ std.log.err("couldn't alloc frame buffer", .{});
+ return error.FFmpegError;
+ }
+
+ std.log.info("Audio frame nb_samples: {}", .{this.a.frame.nb_samples});
+
+ const astream = c.avformat_new_stream(this.format, null);
+ if (astream == null) {
+ std.log.err("failed to create audio stream", .{});
+ return error.FFmpegError;
+ }
+ this.a.stream = astream.?;
+ try fftest("set stream parameters from ctx",
+ c.avcodec_parameters_from_context(this.a.stream.codecpar, this.a.ctx));
+ this.a.stream.time_base = this.a.ctx.time_base;
+
+ r = c.avcodec_open2(this.a.ctx, this.a.codec, null);
+ if (r < 0) {
+ std.log.err("couldn't open audio codec", .{});
+ return error.FFmpegError;
+ }
+
+ try fftest("open container file", c.avio_open(&this.format.*.pb, "test.mp4", c.AVIO_FLAG_WRITE));
+ try fftest("write header", c.avformat_write_header(this.format, null));
+
+ this.v.nextpts = 0;
+ this.a.nextpts = 0;
+ this.a.frame_idx = 0;
+ const width_big: usize = config.width;
+ const height_big: usize = config.height;
+ this.pixels = try allocator.alloc(Bgr, width_big * height_big);
+
+ //this.orig_WriteMovieFrame = try hookman.hookVMT(
+ // api.videomode.vt,
+ // gamedata.CVideoMode_MaterialSystem.vtidx_WriteMovieFrame,
+ // &hook_WriteMovieFrame
+ //);
+ //this.orig_S_TransferStereo16 =
+ // try hookman.hookDetour(api.S_TransferStereo16, &hook_S_TransferStereo16);
+ }
+} = undefined;
+
+extern "kernel32" fn SetDllDirectoryA(lpPathName: [*]const u8) callconv(.winapi) c_int;
+
+var orig_Init: *const @TypeOf(hook_Init) = undefined;
+fn hook_Init(this: *CEngineVGui) callconv(.{ .x86_thiscall = .{} }) void {
+ orig_Init(this);
+ api.init() catch |err| {
+ std.log.err("failed to init api! {}", .{err});
+ std.process.exit(1);
+ };
+ render.init(gpa.allocator()) catch |err| {
+ std.log.err("failed to init render! {}", .{err});
+ std.process.exit(1);
+ };
+ api.engclient.clientCmd(.{"sv_cheats 1"});
+ api.engclient.clientCmd(.{"host_framerate 60"});
+ // _ = api.demoplayer.startPlayback(.{"test.dem", false});
+}
+
+var hooked_engine = false;
+var orig_LoadLibraryExA: *const @TypeOf(hook_LoadLibraryExA) = undefined;
+fn hook_LoadLibraryExA(filename: [*:0]const u8,
+ handle: w.HANDLE, flags: u32) callconv(.winapi) ?w.HMODULE {
+ const ret = orig_LoadLibraryExA(filename, handle, flags)
+ orelse return null; // we only want to log things that actually load
+ // get the base name of the module
+ var iter = std.mem.splitScalar(u8, std.mem.span(filename), '\\');
+ var shortname: []const u8 = undefined;
+ while (iter.next()) |seg| shortname = seg;
+ shortname = shortname[0 .. shortname.len - 4]; // cut off ".dll"
+ std.log.info("LoadLibrary: {s}", .{shortname});
+
+ // have a guard, the game likes to try and load engine multiple times and we
+ // don't want to try and double hook
+ if (std.mem.eql(u8, shortname, "engine") and !hooked_engine) {
+ var eng = std.DynLib { .inner = .{ .dll = ret } };
+ const factory = eng.lookup(CreateInterfaceFn, "CreateInterface").?;
+ const engvgui: *CEngineVGui =
+ @ptrCast(factory(gamedata.CEngineVGui.iface_name, null));
+ orig_Init = hookman.hookVMT(engvgui.vt,
+ gamedata.CEngineVGui.vtidx_Init, &hook_Init) catch return ret;
+ // since we already have the engine dll base now, just grab the sound
+ // offsets we need
+ const base = @intFromPtr(ret);
+ api.S_TransferStereo16 =
+ @ptrFromInt(base + gamedata.off_S_TransferStereo16);
+ hooked_engine = true;
+ }
+
+ return ret;
+}
+
+var cmdline: [128:0]u8 = .{0} ** 128;
+fn hook_GetCommandLineA() callconv(.winapi) [*:0]const u8 {
+ return &cmdline;
+}
+
+var hookman: hook.HookManager = undefined;
+var config: Config = undefined;
+var gpa: std.heap.GeneralPurposeAllocator(.{}) = undefined;
+pub fn main() !void {
+ gpa = std.heap.GeneralPurposeAllocator(.{}).init;
+
+ // cd to the exe
+ var buf: [4096]u8 = undefined;
+ const goalcwd = try std.fs.selfExeDirPath(&buf);
+ std.log.info("cd to {s}", .{goalcwd});
+ try (try std.fs.openDirAbsolute(goalcwd, .{})).setAsCwd();
+
+ config = try Config.readFromFile("config.zon", gpa.allocator());
+ std.log.info("{}", .{config});
+
+ _ = try std.fmt.bufPrint(&cmdline,
+ "hl2.exe -game {s} -w {} -h {} -window -novid -console {s}",
+ .{ config.mod, config.width, config.height, config.extraargs });
+
+ if (SetDllDirectoryA("bin/") == 0) return error.SetDllDirectoryFailed;
+
+ hookman = try hook.HookManager.init(gpa.allocator());
+ defer _ = hookman.deinit();
+
+ // hook loadlibrary so we know when the game is fully loaded
+ orig_LoadLibraryExA =
+ try hookman.hookSymbol("kernel32", "LoadLibraryExA", &hook_LoadLibraryExA);
+ // easy way to pass the game stuff like resolution, etc
+ _ = try hookman.hookSymbol("kernel32", "GetCommandLineA", &hook_GetCommandLineA);
+
+ var launcher = try std.DynLib.open("bin/launcher.dll");
+ const launcherMain =
+ launcher.lookup(*const fn (?*anyopaque, ?*anyopaque) callconv(.c) c_int, "LauncherMain");
+ _ = launcherMain.?(null, null);
+}
diff --git a/src/root.zig b/src/root.zig
new file mode 100644
index 0000000..9afb8de
--- /dev/null
+++ b/src/root.zig
@@ -0,0 +1,24 @@
+//! By convention, root.zig is the root source file when making a library.
+const std = @import("std");
+
+pub fn bufferedPrint() !void {
+ // Stdout is for the actual output of your application, for example if you
+ // are implementing gzip, then only the compressed bytes should be sent to
+ // stdout, not any debugging messages.
+ const stdout_file = std.fs.File.stdout().deprecatedWriter();
+ // Buffering can improve performance significantly in print-heavy programs.
+ var bw = std.io.bufferedWriter(stdout_file);
+ const stdout = bw.writer();
+
+ try stdout.print("Run `zig build test` to run the tests.\n", .{});
+
+ try bw.flush(); // Don't forget to flush!
+}
+
+pub fn add(a: i32, b: i32) i32 {
+ return a + b;
+}
+
+test "basic add functionality" {
+ try std.testing.expect(add(3, 7) == 10);
+}