const std = @import("std"); const Column = @import("Column.zig").Column; const c = @cImport({ @cInclude("duckdb.h"); }); pub fn Result(comptime T: type) type{ const column_count = switch (@typeInfo(T)) { .Struct => |v| v.fields.len, .Void => 0, else => @compileError("Expecting struct or void in query result type"), }; return struct { _res: c.duckdb_result, _chunk: c.duckdb_data_chunk, _columns: [column_count]c.duckdb_vector, _validities: [column_count]?[*]u64, _data: [column_count]?*anyopaque, _current_row: usize, const Self = @This(); pub fn init(result: c.duckdb_result) !Self { var self : Self = .{ ._res = result, ._chunk = null, ._columns = undefined, ._validities = undefined, ._data = undefined, ._current_row = 0, }; self.fetchDataChunk(); if( column_count != 0 and column_count != self.getColumnCount() ){ return error.QueryColumnCountCapture; } return self; } pub fn deinit(self: *Self) void { c.duckdb_destroy_result(&self._res); if (self._chunk != null){ c.duckdb_destroy_data_chunk(&self._chunk); self._chunk = null; } } /// There's not way to know how many total elements we have, but we can /// know how many we have in the current chunk. fn getCurrentChunkSize(self: Self) usize { std.debug.assert(self._chunk != null); return c.duckdb_data_chunk_get_size(self._chunk); } pub fn getColumnCount(self: Self) usize { std.debug.assert(self._chunk != null); return c.duckdb_data_chunk_get_column_count(self._chunk); } /// This needs to be called repeatedly to obtain the next blocks of /// data. There's no way to know how many elements we'll obtain from /// it. fn fetchDataChunk(self: *Self) void{ if (self._chunk != null){ c.duckdb_destroy_data_chunk(&self._chunk); } self._chunk = c.duckdb_fetch_chunk(self._res); for (self._columns, 0..) |_, i| { const col = c.duckdb_data_chunk_get_vector(self._chunk, i); self._columns[i] = col; self._validities[i] = c.duckdb_vector_get_validity(col); self._data[i] = c.duckdb_vector_get_data(col); } self._current_row = 0; } pub fn exausted(self: Self) bool{ // TODO: check exhaustion properly return self._chunk != null; } /// We need some comptime magic to create the output structures from /// the T. pub fn next(self: *Self) !T{ var result: T = undefined; const fields = comptime switch (@typeInfo(T)) { .Void => .{}, else => |f| f.Struct.fields, }; if (self._current_row == self.getCurrentChunkSize()){ self.fetchDataChunk(); } inline for (fields, 0..) |field, i| { // TODO: check compatibility between the column type and // the struct provided as result container const column_type = c.duckdb_column_type(&self._res, i); // Check validity const entry_idx :usize = self._current_row / 64; const idx_in_entry :u6 = @intCast(@mod(self._current_row, 64)); const one :u64 = 1; var is_valid :bool = false; if (self._validities[i]) |v| { const num = v[entry_idx] & @shlExact(one, idx_in_entry); is_valid = num != 0; } // Store the column in current row if (is_valid){ // Unwrap the output Optional const t = switch (@typeInfo(field.type)){ .Optional => |t| t.child, else => field.type }; // Obtain and convert column to something we can process const column = Column.fromType(column_type, self._data[i].?); // Convert to Zig data type and store in output struct @field(result, field.name) = try column.getAs(self._current_row, t); } else { // Got a NULL from the DB if (@typeInfo(field.type) != .Optional){ // Cannot return it because output it's not optional return error.NullInNotOptional; } @field(result, field.name) = null; } } self._current_row += 1; return result; } // pub fn giveMeAll(){ // // TODO: know the chunk length and use it // while (self._current_row < self._chunk_size) : (self._current_row += 1){ // } // } }; }