Got blackholio working
This commit is contained in:
parent
c64475b0a4
commit
9456d51538
6 changed files with 1115 additions and 366 deletions
|
|
@ -21,7 +21,7 @@ pub fn build(b: *std.Build) void {
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const lib = b.addExecutable(.{
|
const lib = b.addExecutable(.{
|
||||||
.name = "stdb-zig-helloworld",
|
.name = "blackholio",
|
||||||
.root_source_file = b.path("src/main.zig"),
|
.root_source_file = b.path("src/main.zig"),
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,9 @@ shift;
|
||||||
|
|
||||||
if [[ "$func" == "publish" ]]; then
|
if [[ "$func" == "publish" ]]; then
|
||||||
zig build -freference-trace=100 || exit 1
|
zig build -freference-trace=100 || exit 1
|
||||||
spacetime logout
|
#spacetime logout
|
||||||
spacetime login --server-issued-login local
|
spacetime login --server-issued-login local
|
||||||
spacetime publish -y --server local --bin-path=zig-out/bin/stdb-zig-helloworld.wasm
|
spacetime publish -y --server local --bin-path=zig-out/bin/blackholio.wasm blackholio
|
||||||
DB_HASH=$(spacetime list 2>/dev/null | tail -1)
|
DB_HASH=$(spacetime list 2>/dev/null | tail -1)
|
||||||
spacetime logs $DB_HASH
|
spacetime logs $DB_HASH
|
||||||
exit $?
|
exit $?
|
||||||
|
|
|
||||||
801
src/main.zig
801
src/main.zig
|
|
@ -2,66 +2,268 @@ const std = @import("std");
|
||||||
const spacetime = @import("spacetime.zig");
|
const spacetime = @import("spacetime.zig");
|
||||||
comptime { _ = spacetime; }
|
comptime { _ = spacetime; }
|
||||||
|
|
||||||
|
const START_PLAYER_MASS: u32 = 15;
|
||||||
|
const START_PLAYER_SPEED: u32 = 10;
|
||||||
|
const FOOD_MASS_MIN: u32 = 2;
|
||||||
|
const FOOD_MASS_MAX: u32 = 4;
|
||||||
|
const TARGET_FOOD_COUNT: usize = 600;
|
||||||
|
const MINIMUM_SAFE_MASS_RATIO: f32 = 0.85;
|
||||||
|
|
||||||
|
const MIN_MASS_TO_SPLIT: u32 = START_PLAYER_MASS * 2;
|
||||||
|
const MAX_CIRCLES_PER_PLAYER: u32 = 16;
|
||||||
|
const SPLIT_RECOMBINE_DELAY_SEC: f32 = 5.0;
|
||||||
|
const SPLIT_GRAV_PULL_BEFORE_RECOMBINE_SEC: f32 = 2.0;
|
||||||
|
const ALLOWED_SPLIT_CIRCLE_OVERLAP_PCT: f32 = 0.9;
|
||||||
|
//1 == instantly separate circles. less means separation takes time
|
||||||
|
const SELF_COLLISION_SPEED: f32 = 0.05;
|
||||||
|
|
||||||
pub const std_options = std.Options{
|
pub const std_options = std.Options{
|
||||||
.log_level = .debug,
|
.log_level = .debug,
|
||||||
.logFn = spacetime.logFn,
|
.logFn = spacetime.logFn,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const spacespec = spacetime.Spec{
|
||||||
|
.tables = &.{
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "config",
|
||||||
|
.schema = Config,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "id",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "entity",
|
||||||
|
.schema = Entity,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "entity_id",
|
||||||
|
.autoinc = &.{ "entity_id", },
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "circle",
|
||||||
|
.schema = Circle,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "entity_id",
|
||||||
|
.autoinc = &.{ "entity_id", },
|
||||||
|
.indexes = &.{ .{ .name = "player_id", .layout = .BTree }, },
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "player",
|
||||||
|
.schema = Player,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "identity",
|
||||||
|
.autoinc = &.{ "player_id", },
|
||||||
|
.unique = &.{ "player_id", },
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "logged_out_player",
|
||||||
|
.schema = Player,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "identity",
|
||||||
|
.unique = &.{ "player_id", },
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "food",
|
||||||
|
.schema = Food,
|
||||||
|
.attribs = .{
|
||||||
|
.access = .Public,
|
||||||
|
.primary_key = "entity_id",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "move_all_players_timer",
|
||||||
|
.schema = MoveAllPlayersTimer,
|
||||||
|
.attribs = .{
|
||||||
|
.primary_key = "scheduled_id",
|
||||||
|
.autoinc = &.{ "scheduled_id", },
|
||||||
|
.schedule = "move_all_players",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "spawn_food_timer",
|
||||||
|
.schema = SpawnFoodTimer,
|
||||||
|
.attribs = .{
|
||||||
|
.primary_key = "scheduled_id",
|
||||||
|
.autoinc = &.{ "scheduled_id" },
|
||||||
|
.schedule = "spawn_food",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "circle_decay_timer",
|
||||||
|
.schema = CircleDecayTimer,
|
||||||
|
.attribs = .{
|
||||||
|
.primary_key = "scheduled_id",
|
||||||
|
.autoinc = &.{ "scheduled_id" },
|
||||||
|
.schedule = "circle_decay",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "circle_recombine_timer",
|
||||||
|
.schema = CircleRecombineTimer,
|
||||||
|
.attribs = .{
|
||||||
|
.primary_key = "scheduled_id",
|
||||||
|
.autoinc = &.{ "scheduled_id" },
|
||||||
|
.schedule = "circle_recombine",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
spacetime.Table{
|
||||||
|
.name = "consume_entity_timer",
|
||||||
|
.schema = ConsumeEntityTimer,
|
||||||
|
.attribs = .{
|
||||||
|
.primary_key = "scheduled_id",
|
||||||
|
.autoinc = &.{ "scheduled_id" },
|
||||||
|
.schedule = "consume_entity",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.reducers = &.{
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "init",
|
||||||
|
.lifecycle = .Init,
|
||||||
|
.func = &init,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "client_connected",
|
||||||
|
.lifecycle = .OnConnect,
|
||||||
|
.func = &connect,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "client_disconnected",
|
||||||
|
.lifecycle = .OnDisconnect,
|
||||||
|
.func = &disconnect,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "enter_game",
|
||||||
|
.params = &.{ "name" },
|
||||||
|
.func = &enter_game,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "respawn",
|
||||||
|
.func = &respawn,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "suicide",
|
||||||
|
.func = &suicide,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "update_player_input",
|
||||||
|
.func = &update_player_input,
|
||||||
|
.params = &.{ "direction", },
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "move_all_players",
|
||||||
|
.func = &move_all_players,
|
||||||
|
.params = &.{ "_timer", },
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "consume_entity",
|
||||||
|
.func = &consume_entity,
|
||||||
|
.params = &.{ "request", },
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "player_split",
|
||||||
|
.func = &player_split,
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "spawn_food",
|
||||||
|
.func = &spawn_food,
|
||||||
|
.params = &.{ "_timer", },
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "circle_decay",
|
||||||
|
.func = &circle_decay,
|
||||||
|
.params = &.{ "_timer", },
|
||||||
|
}),
|
||||||
|
spacetime.Reducer(.{
|
||||||
|
.name = "circle_recombine",
|
||||||
|
.func = &circle_recombine,
|
||||||
|
.params = &.{ "_timer", },
|
||||||
|
})
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
pub const DbVector2 = struct {
|
pub const DbVector2 = struct {
|
||||||
x: f32,
|
x: f32,
|
||||||
y: f32,
|
y: f32,
|
||||||
|
|
||||||
|
pub fn sqr_magnitude(self: @This()) f32 {
|
||||||
|
return self.x * self.x + self.y * self.y;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn magnitude(self: @This()) f32 {
|
||||||
|
return @sqrt(self.sqr_magnitude());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn normalized(self: @This()) DbVector2 {
|
||||||
|
const length = self.magnitude();
|
||||||
|
return .{
|
||||||
|
.x = self.x / length,
|
||||||
|
.y = self.y / length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn scale(self: @This(), val: f32) DbVector2 {
|
||||||
|
return .{
|
||||||
|
.x = self.x * val,
|
||||||
|
.y = self.y * val,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(self: @This(), other: DbVector2) DbVector2 {
|
||||||
|
return .{
|
||||||
|
.x = self.x + other.x,
|
||||||
|
.y = self.y + other.y,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_to(self: *@This(), other: DbVector2) void {
|
||||||
|
self.x += other.x;
|
||||||
|
self.y += other.y;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sub(self: @This(), other: DbVector2) DbVector2 {
|
||||||
|
return .{
|
||||||
|
.x = self.x - other.x,
|
||||||
|
.y = self.y - other.y,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sub_from(self: *@This(), other: DbVector2) void {
|
||||||
|
self.x -= other.x;
|
||||||
|
self.y -= other.y;
|
||||||
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const config: spacetime.Table = .{ .schema = Config, .primary_key = "id", .access = .Public, };
|
|
||||||
pub const Config = struct {
|
pub const Config = struct {
|
||||||
//#[primary_key]
|
|
||||||
id: u32,
|
id: u32,
|
||||||
world_size: u64,
|
world_size: u64,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const entity: spacetime.Table = .{ .schema = Entity, .primary_key = "entity_id", .access = .Public };
|
|
||||||
pub const Entity = struct {
|
pub const Entity = struct {
|
||||||
//#[auto_inc]
|
|
||||||
//#[primary_key]
|
|
||||||
entity_id: u32,
|
entity_id: u32,
|
||||||
position: DbVector2,
|
position: DbVector2,
|
||||||
mass: u32,
|
mass: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const circles: spacetime.Table = .{
|
|
||||||
.schema = Circle,
|
|
||||||
.primary_key = "entity_id",
|
|
||||||
.access = .Public,
|
|
||||||
.indexes = &.{ .{ .name = "player_id", .layout = .BTree } },
|
|
||||||
};
|
|
||||||
pub const Circle = struct {
|
pub const Circle = struct {
|
||||||
//#[auto_inc]
|
|
||||||
//#[primary_key]
|
|
||||||
entity_id: u32,
|
entity_id: u32,
|
||||||
//#[index(btree)]
|
|
||||||
player_id: u32,
|
player_id: u32,
|
||||||
direction: DbVector2,
|
direction: DbVector2,
|
||||||
speed: f32,
|
speed: f32,
|
||||||
last_split_time: spacetime.Timestamp,
|
last_split_time: spacetime.Timestamp,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const players: spacetime.Table = .{
|
|
||||||
.schema = Player,
|
|
||||||
.primary_key = "identity",
|
|
||||||
.access = .Public,
|
|
||||||
.unique = &.{ "player_id" },
|
|
||||||
.autoinc = &.{ "player_id" },
|
|
||||||
};
|
|
||||||
pub const logged_out_players: spacetime.Table = .{
|
|
||||||
.schema = Player,
|
|
||||||
.primary_key = "identity",
|
|
||||||
.unique = &.{ "player_id" }
|
|
||||||
};
|
|
||||||
pub const Player = struct {
|
pub const Player = struct {
|
||||||
//#[primary_key]
|
|
||||||
identity: spacetime.Identity,
|
identity: spacetime.Identity,
|
||||||
//#[unique]
|
|
||||||
//#[auto_inc]
|
|
||||||
player_id: u32,
|
player_id: u32,
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
|
|
||||||
|
|
@ -71,81 +273,67 @@ pub const Player = struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const food: spacetime.Table = .{ .schema = Food, .primary_key = "entity_id", .access = .Public };
|
|
||||||
pub const Food = struct {
|
pub const Food = struct {
|
||||||
//#[primary_key]
|
|
||||||
entity_id: u32,
|
entity_id: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
//#[spacetimedb::table(name = spawn_food_timer, scheduled(spawn_food))]
|
|
||||||
pub const spawn_food_timer: spacetime.Table = .{ .schema = SpawnFoodTimer, .primary_key = "scheduled_id" };
|
|
||||||
pub const SpawnFoodTimer = struct {
|
pub const SpawnFoodTimer = struct {
|
||||||
//#[primary_key]
|
|
||||||
//#[auto_inc]
|
|
||||||
scheduled_id: u64,
|
scheduled_id: u64,
|
||||||
scheduled_at: spacetime.ScheduleAt,
|
scheduled_at: spacetime.ScheduleAt,
|
||||||
};
|
};
|
||||||
|
|
||||||
//#[spacetimedb::table(name = circle_decay_timer, scheduled(circle_decay))]
|
|
||||||
pub const circle_decay_timer: spacetime.Table = .{ .schema = CircleDecayTimer, .primary_key = "scheduled_id" };
|
|
||||||
pub const CircleDecayTimer = struct {
|
pub const CircleDecayTimer = struct {
|
||||||
//#[primary_key]
|
|
||||||
//#[auto_inc]
|
|
||||||
scheduled_id: u64,
|
scheduled_id: u64,
|
||||||
scheduled_at: spacetime.ScheduleAt,
|
scheduled_at: spacetime.ScheduleAt,
|
||||||
};
|
};
|
||||||
|
|
||||||
//#[spacetimedb::table(name = circle_recombine_timer, scheduled(circle_recombine))]
|
|
||||||
pub const circle_recombine_timer: spacetime.Table = .{ .schema = CircleRecombineTimer, .primary_key = "scheduled_id" };
|
|
||||||
pub const CircleRecombineTimer = struct {
|
pub const CircleRecombineTimer = struct {
|
||||||
//#[primary_key]
|
|
||||||
//#[auto_inc]
|
|
||||||
scheduled_id: u64,
|
scheduled_id: u64,
|
||||||
scheduled_at: spacetime.ScheduleAt,
|
scheduled_at: spacetime.ScheduleAt,
|
||||||
player_id: u32,
|
player_id: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const consume_entity_timer: spacetime.Table = .{ .schema = ConsumeEntityTimer, .primary_key = "scheduled_id" };
|
|
||||||
pub const ConsumeEntityTimer = struct {
|
pub const ConsumeEntityTimer = struct {
|
||||||
//#[primary_key]
|
|
||||||
//#[auto_inc]
|
|
||||||
scheduled_id: u64,
|
scheduled_id: u64,
|
||||||
scheduled_at: spacetime.ScheduleAt,
|
scheduled_at: spacetime.ScheduleAt,
|
||||||
consumed_entity_id: u32,
|
consumed_entity_id: u32,
|
||||||
consumer_entity_id: u32,
|
consumer_entity_id: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Init: spacetime.Reducer = .{ .func_type = @TypeOf(InitReducer), .func = @ptrCast(&InitReducer), .lifecycle = .Init, };
|
pub const MoveAllPlayersTimer = struct {
|
||||||
pub fn InitReducer(ctx: *spacetime.ReducerContext) !void {
|
scheduled_id: u64,
|
||||||
|
scheduled_at: spacetime.ScheduleAt,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn init(ctx: *spacetime.ReducerContext) !void {
|
||||||
std.log.info("Initializing...", .{});
|
std.log.info("Initializing...", .{});
|
||||||
try ctx.db.get("config").insert(Config {
|
_ = try ctx.db.get("config").insert(Config {
|
||||||
.id = 0,
|
.id = 0,
|
||||||
.world_size = 1000,
|
.world_size = 1000,
|
||||||
});
|
});
|
||||||
try ctx.db.get("circle_decay_timer").insert(CircleDecayTimer {
|
_ = try ctx.db.get("circle_decay_timer").insert(CircleDecayTimer {
|
||||||
.scheduled_id = 0,
|
.scheduled_id = 0,
|
||||||
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 5 * std.time.us_per_s }},
|
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 5 * std.time.us_per_s }},
|
||||||
});
|
});
|
||||||
try ctx.db.get("spawn_food_timer").insert(SpawnFoodTimer {
|
_ = try ctx.db.get("spawn_food_timer").insert(SpawnFoodTimer {
|
||||||
.scheduled_id = 0,
|
.scheduled_id = 0,
|
||||||
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 500 * std.time.us_per_ms }}
|
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 500 * std.time.us_per_ms }}
|
||||||
});
|
});
|
||||||
try ctx.db.get("move_all_players_timer").insert(MoveAllPlayersTimer {
|
_ = try ctx.db.get("move_all_players_timer").insert(MoveAllPlayersTimer {
|
||||||
.scheduled_id = 0,
|
.scheduled_id = 0,
|
||||||
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 50 * std.time.us_per_ms }}
|
.scheduled_at = .{ .Interval = .{ .__time_duration_micros__ = 50 * std.time.us_per_ms }}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const OnConnect = spacetime.Reducer{ .func_type = @TypeOf(OnConnectReducer), .func = @ptrCast(&OnConnectReducer), .lifecycle = .OnConnect, };
|
pub fn connect(ctx: *spacetime.ReducerContext) !void {
|
||||||
pub fn OnConnectReducer(ctx: *spacetime.ReducerContext) !void {
|
|
||||||
// Called everytime a new client connects
|
// Called everytime a new client connects
|
||||||
std.log.info("[OnConnect]", .{});
|
std.log.info("[OnConnect]", .{});
|
||||||
const nPlayer = try ctx.db.get("logged_out_players").col("identity").find(.{ .identity = ctx.sender });
|
const nPlayer = try ctx.db.get("logged_out_player").col("identity").find(.{ .identity = ctx.sender });
|
||||||
if (nPlayer) |player| {
|
if (nPlayer) |player| {
|
||||||
try ctx.db.get("players").insert(player.*);
|
_ = try ctx.db.get("player").insert(player);
|
||||||
try ctx.db.get("logged_out_players").col("identity").delete(.{ .identity = player.identity });
|
try ctx.db.get("logged_out_player").col("identity").delete(.{ .identity = player.identity });
|
||||||
} else {
|
} else {
|
||||||
try ctx.db.get("players").insert(Player {
|
_ = try ctx.db.get("player").insert(Player {
|
||||||
.identity = ctx.sender,
|
.identity = ctx.sender,
|
||||||
.player_id = 0,
|
.player_id = 0,
|
||||||
.name = "",
|
.name = "",
|
||||||
|
|
@ -153,62 +341,481 @@ pub fn OnConnectReducer(ctx: *spacetime.ReducerContext) !void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const OnDisconnect = spacetime.Reducer{ .func_type = @TypeOf(OnDisconnectReducer), .func = @ptrCast(&OnDisconnectReducer), .lifecycle = .OnDisconnect, };
|
pub fn disconnect(ctx: *spacetime.ReducerContext) !void {
|
||||||
pub fn OnDisconnectReducer(ctx: *spacetime.ReducerContext) !void {
|
|
||||||
// Called everytime a client disconnects
|
// Called everytime a client disconnects
|
||||||
std.log.info("[OnDisconnect]", .{});
|
std.log.info("[OnDisconnect]", .{});
|
||||||
const nPlayer = try ctx.db.get("players").col("identity").find(.{ .identity = ctx.sender});
|
const nPlayer = try ctx.db.get("player").col("identity").find(.{ .identity = ctx.sender});
|
||||||
if(nPlayer == null) {
|
if(nPlayer == null) {
|
||||||
std.log.err("Disconnecting player doesn't have a valid players row!",.{});
|
std.log.err("Disconnecting player doesn't have a valid players row!",.{});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const player = nPlayer.?;
|
const player = nPlayer.?;
|
||||||
//std.log.info("{?}", .{player});
|
_ = try ctx.db.get("logged_out_player").insert(player);
|
||||||
const player_id = player.player_id;
|
try ctx.db.get("player").col("identity").delete(.{ .identity = ctx.sender});
|
||||||
try ctx.db.get("logged_out_players").insert(player.*);
|
|
||||||
try ctx.db.get("players").col("identity").delete(.{ .identity = ctx.sender});
|
|
||||||
|
|
||||||
// Remove any circles from the arena
|
// Remove any circles from the arena
|
||||||
var iter = ctx.db.get("circles").col("player_id").filter(.{ .player_id = player_id });
|
var iter = try ctx.db.get("circle").col("player_id").filter(.{ .player_id = player.player_id });
|
||||||
//_ = player_id;
|
while (try iter.next()) |circle_val| {
|
||||||
_ = &iter;
|
try ctx.db.get("entity").col("entity_id").delete(.{ .entity_id = circle_val.entity_id, });
|
||||||
// std.log.info("blag", .{});
|
try ctx.db.get("circle").col("entity_id").delete(.{ .entity_id = circle_val.entity_id, });
|
||||||
// while (try iter.next()) |circle_val| {
|
}
|
||||||
// try ctx.db.get("entity").col("entity_id").delete(.{ .entity_id = circle_val.entity_id, });
|
|
||||||
// try ctx.db.get("circle").col("entity_id").delete(.{ .entity_id = circle_val.entity_id, });
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//#[spacetimedb::table(name = move_all_players_timer, scheduled(move_all_players))]
|
pub fn enter_game(ctx: *spacetime.ReducerContext, name: []const u8) !void {
|
||||||
pub const move_all_players_timer: spacetime.Table = .{
|
std.log.info("Creating player with name {s}", .{name});
|
||||||
.schema = MoveAllPlayersTimer,
|
var player: ?Player = try ctx.db.get("player").col("identity").find(.{ .identity = ctx.sender });
|
||||||
.primary_key = "scheduled_id",
|
const player_id = player.?.player_id;
|
||||||
.schedule_reducer = &move_all_players
|
player.?.name = name;
|
||||||
};
|
try ctx.db.get("player").col("identity").update(player.?);
|
||||||
pub const MoveAllPlayersTimer = struct {
|
_ = try spawn_player_initial_circle(ctx, player_id);
|
||||||
//#[primary_key]
|
}
|
||||||
//#[auto_inc]
|
|
||||||
scheduled_id: u64,
|
|
||||||
scheduled_at: spacetime.ScheduleAt,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const move_all_players = spacetime.Reducer{
|
fn gen_range(rng: *std.Random.DefaultPrng, min: f32, max: f32) f32 {
|
||||||
.func_type = @TypeOf(move_all_players_reducer),
|
return @floatCast(std.Random.float(rng.random(), f64) * (@as(f64, @floatCast(max)) - @as(f64, @floatCast(min))) + @as(f64, @floatCast(min)));
|
||||||
.func = @ptrCast(&move_all_players_reducer),
|
}
|
||||||
.params = &.{ "_timer" }
|
|
||||||
};
|
fn spawn_player_initial_circle(ctx: *spacetime.ReducerContext, player_id: u32) !Entity {
|
||||||
pub fn move_all_players_reducer(ctx: *spacetime.ReducerContext, _timer: MoveAllPlayersTimer) !void {
|
var rng = ctx.rng;
|
||||||
_ = ctx;
|
const world_size = (try ctx
|
||||||
|
.db.get("config").col("id")
|
||||||
|
.find(.{ .id = 0, })).?.world_size;
|
||||||
|
const player_start_radius = mass_to_radius(START_PLAYER_MASS);
|
||||||
|
const x = gen_range(&rng, player_start_radius, (@as(f32, @floatFromInt(world_size)) - player_start_radius));
|
||||||
|
const y = gen_range(&rng, player_start_radius, (@as(f32, @floatFromInt(world_size)) - player_start_radius));
|
||||||
|
return spawn_circle_at(
|
||||||
|
ctx,
|
||||||
|
player_id,
|
||||||
|
START_PLAYER_MASS,
|
||||||
|
DbVector2 { .x = x, .y = y },
|
||||||
|
ctx.timestamp,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn spawn_circle_at(
|
||||||
|
ctx: *spacetime.ReducerContext,
|
||||||
|
player_id: u32,
|
||||||
|
mass: u32,
|
||||||
|
position: DbVector2,
|
||||||
|
timestamp: spacetime.Timestamp,
|
||||||
|
) !Entity {
|
||||||
|
const entity = try ctx.db.get("entity").insert(.{
|
||||||
|
.entity_id = 0,
|
||||||
|
.position = position,
|
||||||
|
.mass = mass,
|
||||||
|
});
|
||||||
|
|
||||||
|
_ = try ctx.db.get("circle").insert(.{
|
||||||
|
.entity_id = entity.entity_id,
|
||||||
|
.player_id = player_id,
|
||||||
|
.direction = DbVector2 { .x = 0.0, .y = 1.0 },
|
||||||
|
.speed = 0.0,
|
||||||
|
.last_split_time = timestamp,
|
||||||
|
});
|
||||||
|
|
||||||
|
return entity;
|
||||||
|
}
|
||||||
|
|
||||||
|
//#[spacetimedb::reducer]
|
||||||
|
pub fn respawn(ctx: *spacetime.ReducerContext) !void {
|
||||||
|
const player = (try ctx
|
||||||
|
.db.get("player")
|
||||||
|
.col("identity")
|
||||||
|
.find(.{ .identity = ctx.sender})).?;
|
||||||
|
|
||||||
|
_ = try spawn_player_initial_circle(ctx, player.player_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
//#[spacetimedb::reducer]
|
||||||
|
pub fn suicide(ctx: *spacetime.ReducerContext) !void {
|
||||||
|
const player = (try ctx
|
||||||
|
.db
|
||||||
|
.get("player")
|
||||||
|
.col("identity")
|
||||||
|
.find(.{ .identity = ctx.sender})).?;
|
||||||
|
|
||||||
|
var circles = try ctx.db.get("circle").col("player_id").filter(.{ .player_id = player.player_id});
|
||||||
|
|
||||||
|
while(try circles.next()) |circle| {
|
||||||
|
try destroy_entity(ctx, circle.entity_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//#[spacetimedb::reducer]
|
||||||
|
pub fn update_player_input(ctx: *spacetime.ReducerContext, direction: DbVector2) !void {
|
||||||
|
std.log.info("player input updated!", .{});
|
||||||
|
const player = (try ctx
|
||||||
|
.db
|
||||||
|
.get("player")
|
||||||
|
.col("identity")
|
||||||
|
.find(.{ .identity = ctx.sender})).?;
|
||||||
|
var circles = try ctx.db.get("circle").col("player_id").filter(.{ .player_id = player.player_id});
|
||||||
|
while(try circles.next()) |circle| {
|
||||||
|
var copy_circle = circle;
|
||||||
|
copy_circle.direction = direction.normalized();
|
||||||
|
copy_circle.speed = std.math.clamp(direction.magnitude(), 0.0, 1.0);
|
||||||
|
try ctx.db.get("circle").col("entity_id").update(copy_circle);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_overlapping(a: *Entity, b: *Entity) bool {
|
||||||
|
const dx = a.position.x - b.position.x;
|
||||||
|
const dy = a.position.y - b.position.y;
|
||||||
|
const distance_sq = dx * dx + dy * dy;
|
||||||
|
|
||||||
|
const radius_a = mass_to_radius(a.mass);
|
||||||
|
const radius_b = mass_to_radius(b.mass);
|
||||||
|
|
||||||
|
// If the distance between the two circle centers is less than the
|
||||||
|
// maximum radius, then the center of the smaller circle is inside
|
||||||
|
// the larger circle. This gives some leeway for the circles to overlap
|
||||||
|
// before being eaten.
|
||||||
|
const max_radius = @max(radius_a, radius_b);
|
||||||
|
return distance_sq <= max_radius * max_radius;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mass_to_radius(mass: u32) f32 {
|
||||||
|
return @sqrt(@as(f32, @floatFromInt(mass)));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mass_to_max_move_speed(mass: u32) f32 {
|
||||||
|
return 2.0 * @as(f32, @floatFromInt(START_PLAYER_SPEED)) / (1.0 + @sqrt(@as(f32, @floatFromInt(mass)) / @as(f32, @floatFromInt(START_PLAYER_MASS))));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn move_all_players(ctx: *spacetime.ReducerContext, _timer: MoveAllPlayersTimer) !void {
|
||||||
|
// TODO identity check
|
||||||
|
// let span = spacetimedb::log_stopwatch::LogStopwatch::new("tick");
|
||||||
|
//std.log.info("_timer: {}", .{ _timer.scheduled_id });
|
||||||
_ = _timer;
|
_ = _timer;
|
||||||
//std.log.info("Move Players!", .{});
|
const world_size = (try ctx
|
||||||
|
.db.get("config").col("id")
|
||||||
|
.find(.{ .id = 0 })).?.world_size;
|
||||||
|
|
||||||
|
var circle_directions = std.AutoHashMap(u32, DbVector2).init(ctx.db.allocator);
|
||||||
|
var circleIter = ctx.db.get("circle").iter();
|
||||||
|
while(try circleIter.next()) |circle| {
|
||||||
|
try circle_directions.put(circle.entity_id, circle.direction.scale(circle.speed));
|
||||||
|
}
|
||||||
|
|
||||||
|
var playerIter = ctx.db.get("player").iter();
|
||||||
|
while(try playerIter.next()) |player| {
|
||||||
|
var circles = std.ArrayList(Circle).init(ctx.db.allocator);
|
||||||
|
var circlesIter1 = try ctx.db.get("circle").col("player_id")
|
||||||
|
.filter(.{ .player_id = player.player_id});
|
||||||
|
while(try circlesIter1.next()) |circle| {
|
||||||
|
try circles.append(circle);
|
||||||
|
}
|
||||||
|
|
||||||
|
var player_entities = std.ArrayList(Entity).init(ctx.db.allocator);
|
||||||
|
for(circles.items) |c| {
|
||||||
|
try player_entities.append((try ctx.db.get("entity").col("entity_id").find(.{ .entity_id = c.entity_id})).?);
|
||||||
|
}
|
||||||
|
if(player_entities.items.len <= 1) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const count = player_entities.items.len;
|
||||||
|
|
||||||
|
// Gravitate circles towards other circles before they recombine
|
||||||
|
for(0..count) |i| {
|
||||||
|
const circle_i = circles.items[i];
|
||||||
|
const time_since_split = ctx.timestamp
|
||||||
|
.DurationSince(circle_i.last_split_time)
|
||||||
|
.as_f32(.Seconds);
|
||||||
|
const time_before_recombining = @max(SPLIT_RECOMBINE_DELAY_SEC - time_since_split, 0.0);
|
||||||
|
if(time_before_recombining > SPLIT_GRAV_PULL_BEFORE_RECOMBINE_SEC) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entity_i = player_entities.items[i];
|
||||||
|
for (player_entities.items) |entity_j| {
|
||||||
|
if(entity_i.entity_id == entity_j.entity_id) continue;
|
||||||
|
var diff = entity_i.position.sub(entity_j.position);
|
||||||
|
var distance_sqr = diff.sqr_magnitude();
|
||||||
|
if(distance_sqr <= 0.0001) {
|
||||||
|
diff = DbVector2{ .x = 1.0, .y = 0.0 };
|
||||||
|
distance_sqr = 1.0;
|
||||||
|
}
|
||||||
|
const radius_sum = mass_to_radius(entity_i.mass) + mass_to_radius(entity_j.mass);
|
||||||
|
if(distance_sqr > radius_sum * radius_sum) {
|
||||||
|
const gravity_multiplier =
|
||||||
|
1.0 - time_before_recombining / SPLIT_GRAV_PULL_BEFORE_RECOMBINE_SEC;
|
||||||
|
const vec = diff.normalized()
|
||||||
|
.scale(radius_sum - @sqrt(distance_sqr))
|
||||||
|
.scale(gravity_multiplier)
|
||||||
|
.scale(0.05)
|
||||||
|
.scale( 1.0 / @as(f32, @floatFromInt(count)));
|
||||||
|
circle_directions.getPtr(entity_i.entity_id).?.add_to(vec.scale( 1.0 / 2.0));
|
||||||
|
circle_directions.getPtr(entity_j.entity_id).?.sub_from(vec.scale( 1.0 / 2.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force circles apart
|
||||||
|
for(0..count) |i| {
|
||||||
|
const slice2 = player_entities.items[i+1..];
|
||||||
|
const entity_i = player_entities.items[i];
|
||||||
|
for (0..slice2.len) |j| {
|
||||||
|
const entity_j = slice2[j];
|
||||||
|
var diff = entity_i.position.sub(entity_j.position);
|
||||||
|
var distance_sqr = diff.sqr_magnitude();
|
||||||
|
if(distance_sqr <= 0.0001) {
|
||||||
|
diff = DbVector2{.x = 1.0, .y = 0.0};
|
||||||
|
distance_sqr = 1.0;
|
||||||
|
}
|
||||||
|
const radius_sum = mass_to_radius(entity_i.mass) + mass_to_radius(entity_j.mass);
|
||||||
|
const radius_sum_multiplied = radius_sum * ALLOWED_SPLIT_CIRCLE_OVERLAP_PCT;
|
||||||
|
if(distance_sqr < radius_sum_multiplied * radius_sum_multiplied) {
|
||||||
|
const vec = diff.normalized()
|
||||||
|
.scale(radius_sum - @sqrt(distance_sqr))
|
||||||
|
.scale(SELF_COLLISION_SPEED);
|
||||||
|
circle_directions.getPtr(entity_i.entity_id).?.add_to(vec.scale( 1.0 / 2.0));
|
||||||
|
circle_directions.getPtr(entity_j.entity_id).?.sub_from(vec.scale( 1.0 / 2.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var circleIter2 = ctx.db.get("circle").iter();
|
||||||
|
while(try circleIter2.next()) |circle| {
|
||||||
|
const circle_entity_n = (ctx.db.get("entity").col("entity_id").find(.{ .entity_id = circle.entity_id }) catch {
|
||||||
|
continue;
|
||||||
|
});
|
||||||
|
var circle_entity = circle_entity_n.?;
|
||||||
|
const circle_radius = mass_to_radius(circle_entity.mass);
|
||||||
|
const direction = circle_directions.get(circle.entity_id).?;
|
||||||
|
const new_pos = circle_entity.position.add(direction.scale(mass_to_max_move_speed(circle_entity.mass)));
|
||||||
|
const min = circle_radius;
|
||||||
|
const max = @as(f32, @floatFromInt(world_size)) - circle_radius;
|
||||||
|
circle_entity.position.x = std.math.clamp(new_pos.x, min, max);
|
||||||
|
circle_entity.position.y = std.math.clamp(new_pos.y, min, max);
|
||||||
|
try ctx.db.get("entity").col("entity_id").update(circle_entity);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check collisions
|
||||||
|
var entities = std.AutoHashMap(u32, Entity).init(ctx.db.allocator);
|
||||||
|
var entitiesIter = ctx.db.get("entity").iter();
|
||||||
|
while(try entitiesIter.next()) |e| {
|
||||||
|
try entities.put(e.entity_id, e);
|
||||||
|
}
|
||||||
|
var circleIter3 = ctx.db.get("circle").iter();
|
||||||
|
while(try circleIter3.next()) |circle| {
|
||||||
|
// let span = spacetimedb::time_span::Span::start("collisions");
|
||||||
|
var circle_entity = entities.get(circle.entity_id).?;
|
||||||
|
_ = &circle_entity;
|
||||||
|
var entityIter = entities.iterator();
|
||||||
|
while (entityIter.next()) |other_entity| {
|
||||||
|
if(other_entity.value_ptr.entity_id == circle_entity.entity_id) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(is_overlapping(&circle_entity, other_entity.value_ptr)) {
|
||||||
|
const other_circle_n = try ctx.db.get("circle").col("entity_id").find(.{ .entity_id = other_entity.value_ptr.entity_id });
|
||||||
|
if (other_circle_n) |other_circle| {
|
||||||
|
if(other_circle.player_id != circle.player_id) {
|
||||||
|
const mass_ratio = @as(f32, @floatFromInt(other_entity.value_ptr.mass)) / @as(f32, @floatFromInt(circle_entity.mass));
|
||||||
|
if(mass_ratio < MINIMUM_SAFE_MASS_RATIO) {
|
||||||
|
try schedule_consume_entity(
|
||||||
|
ctx,
|
||||||
|
circle_entity.entity_id,
|
||||||
|
other_entity.value_ptr.entity_id,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try schedule_consume_entity(ctx, circle_entity.entity_id, other_entity.value_ptr.entity_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// span.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn schedule_consume_entity(ctx: *spacetime.ReducerContext, consumer_id: u32, consumed_id: u32) !void {
|
||||||
|
_ = try ctx.db.get("consume_entity_timer").insert(ConsumeEntityTimer{
|
||||||
|
.scheduled_id = 0,
|
||||||
|
.scheduled_at = .{ .Time = ctx.timestamp },
|
||||||
|
.consumer_entity_id = consumer_id,
|
||||||
|
.consumed_entity_id = consumed_id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn consume_entity(ctx: *spacetime.ReducerContext, request: ConsumeEntityTimer) !void {
|
||||||
|
const consumed_entity_n = try ctx
|
||||||
|
.db.get("entity").col("entity_id")
|
||||||
|
.find(.{ .entity_id = request.consumed_entity_id});
|
||||||
|
const consumer_entity_n = try ctx
|
||||||
|
.db.get("entity").col("entity_id")
|
||||||
|
.find(.{ .entity_id = request.consumer_entity_id});
|
||||||
|
if(consumed_entity_n == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if(consumer_entity_n == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const consumed_entity = consumed_entity_n.?;
|
||||||
|
var consumer_entity = consumer_entity_n.?;
|
||||||
|
|
||||||
|
consumer_entity.mass += consumed_entity.mass;
|
||||||
|
try destroy_entity(ctx, consumed_entity.entity_id);
|
||||||
|
try ctx.db.get("entity").col("entity_id").update(consumer_entity);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn destroy_entity(ctx: *spacetime.ReducerContext, entity_id: u32) !void {
|
||||||
|
try ctx.db.get("food").col("entity_id").delete(.{ .entity_id = entity_id});
|
||||||
|
try ctx.db.get("circle").col("entity_id").delete(.{ .entity_id = entity_id});
|
||||||
|
try ctx.db.get("entity").col("entity_id").delete(.{ .entity_id = entity_id});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn player_split(ctx: *spacetime.ReducerContext) !void {
|
||||||
|
const player = (try ctx
|
||||||
|
.db.get("player").col("identity")
|
||||||
|
.find(.{ .identity = ctx.sender})).?;
|
||||||
|
var circles = std.ArrayList(Circle).init(ctx.db.allocator);
|
||||||
|
var circlesIter = try ctx
|
||||||
|
.db
|
||||||
|
.get("circle")
|
||||||
|
.col("player_id")
|
||||||
|
.filter(.{ .player_id = player.player_id});
|
||||||
|
while(try circlesIter.next()) |circle| {
|
||||||
|
try circles.append(circle);
|
||||||
|
}
|
||||||
|
var circle_count = circles.items.len;
|
||||||
|
if(circle_count >= MAX_CIRCLES_PER_PLAYER) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const say_hello = spacetime.Reducer{ .func_type = @TypeOf(say_hello_reducer), .func = @ptrCast(&say_hello_reducer)};
|
for(circles.items) |c| {
|
||||||
|
var circle = c;
|
||||||
|
var circle_entity = (try ctx
|
||||||
|
.db
|
||||||
|
.get("entity")
|
||||||
|
.col("entity_id")
|
||||||
|
.find(.{ .entity_id = circle.entity_id})).?;
|
||||||
|
if(circle_entity.mass >= MIN_MASS_TO_SPLIT * 2) {
|
||||||
|
const half_mass = @divTrunc(circle_entity.mass, 2);
|
||||||
|
_ = try spawn_circle_at(
|
||||||
|
ctx,
|
||||||
|
circle.player_id,
|
||||||
|
half_mass,
|
||||||
|
circle_entity.position.add(circle.direction),
|
||||||
|
ctx.timestamp,
|
||||||
|
);
|
||||||
|
circle_entity.mass -= half_mass;
|
||||||
|
circle.last_split_time = ctx.timestamp;
|
||||||
|
try ctx.db.get("circle").col("entity_id").update(circle);
|
||||||
|
try ctx.db.get("entity").col("entity_id").update(circle_entity);
|
||||||
|
circle_count += 1;
|
||||||
|
if (circle_count >= MAX_CIRCLES_PER_PLAYER) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn say_hello_reducer(ctx: *spacetime.ReducerContext) !void {
|
_ = try ctx.db
|
||||||
_ = ctx;
|
.get("circle_recombine_timer")
|
||||||
std.log.info("Hello!", .{});
|
.insert(CircleRecombineTimer {
|
||||||
|
.scheduled_id = 0,
|
||||||
|
.scheduled_at = spacetime.ScheduleAt.durationSecs(ctx, SPLIT_RECOMBINE_DELAY_SEC),
|
||||||
|
.player_id = player.player_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
std.log.warn("Player split!", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spawn_food(ctx: *spacetime.ReducerContext, _: SpawnFoodTimer) !void {
|
||||||
|
if(try ctx.db.get("player").count() == 0) {
|
||||||
|
//Are there no players yet?
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const world_size = (try ctx
|
||||||
|
.db
|
||||||
|
.get("config")
|
||||||
|
.col("id")
|
||||||
|
.find(.{ .id = 0})).?
|
||||||
|
.world_size;
|
||||||
|
|
||||||
|
var rng = ctx.rng;
|
||||||
|
var food_count = try ctx.db.get("food").count();
|
||||||
|
while (food_count < TARGET_FOOD_COUNT) {
|
||||||
|
const food_mass = gen_range(&rng, FOOD_MASS_MIN, FOOD_MASS_MAX);
|
||||||
|
const food_radius = mass_to_radius(@intFromFloat(food_mass));
|
||||||
|
const x = gen_range(&rng, food_radius, @as(f32, @floatFromInt(world_size)) - food_radius);
|
||||||
|
const y = gen_range(&rng, food_radius, @as(f32, @floatFromInt(world_size)) - food_radius);
|
||||||
|
const entity = try ctx.db.get("entity").insert(Entity {
|
||||||
|
.entity_id = 0,
|
||||||
|
.position = DbVector2{ .x = x, .y = y },
|
||||||
|
.mass = @intFromFloat(food_mass),
|
||||||
|
});
|
||||||
|
_ = try ctx.db.get("food").insert(Food {
|
||||||
|
.entity_id = entity.entity_id,
|
||||||
|
});
|
||||||
|
food_count += 1;
|
||||||
|
std.log.info("Spawned food! {}", .{entity.entity_id});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn circle_decay(ctx: *spacetime.ReducerContext, _: CircleDecayTimer) !void {
|
||||||
|
var circleIter = ctx.db.get("circle").iter();
|
||||||
|
while(try circleIter.next()) |circle| {
|
||||||
|
var circle_entity = (try ctx
|
||||||
|
.db
|
||||||
|
.get("entity")
|
||||||
|
.col("entity_id")
|
||||||
|
.find(.{ .entity_id = circle.entity_id})).?;
|
||||||
|
if(circle_entity.mass <= START_PLAYER_MASS) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
circle_entity.mass = @intFromFloat((@as(f32, @floatFromInt(circle_entity.mass)) * 0.99));
|
||||||
|
try ctx.db.get("entity").col("entity_id").update(circle_entity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn calculate_center_of_mass(entities: []const Entity) DbVector2 {
|
||||||
|
const total_mass: u32 = blk: {
|
||||||
|
var sum: u32 = 0;
|
||||||
|
for(entities) |entity| {
|
||||||
|
sum += entity.mass;
|
||||||
|
}
|
||||||
|
break :blk sum;
|
||||||
|
};
|
||||||
|
//entities.iter().map(|e| e.position * e.mass as f32).sum();
|
||||||
|
const center_of_mass: DbVector2 = blk: {
|
||||||
|
var sum: DbVector2 = 0;
|
||||||
|
for(entities) |entity| {
|
||||||
|
sum.x += entity.position.x * @as(f32, @floatFromInt(entity.mass));
|
||||||
|
sum.y += entity.position.y * @as(f32, @floatFromInt(entity.mass));
|
||||||
|
}
|
||||||
|
break :blk sum;
|
||||||
|
};
|
||||||
|
return center_of_mass / @as(f32, @floatFromInt(total_mass));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn circle_recombine(ctx: *spacetime.ReducerContext, timer: CircleRecombineTimer) !void {
|
||||||
|
var circles = std.ArrayList(Circle).init(ctx.db.allocator);
|
||||||
|
var circlesIter = try ctx
|
||||||
|
.db
|
||||||
|
.get("circle")
|
||||||
|
.col("player_id")
|
||||||
|
.filter(.{ .player_id = timer.player_id });
|
||||||
|
while(try circlesIter.next()) |circle| {
|
||||||
|
try circles.append(circle);
|
||||||
|
}
|
||||||
|
var recombining_entities = std.ArrayList(Entity).init(ctx.db.allocator);
|
||||||
|
for(circles.items) |circle| {
|
||||||
|
if(@as(f32, @floatFromInt(ctx.timestamp.__timestamp_micros_since_unix_epoch__ - circle.last_split_time.__timestamp_micros_since_unix_epoch__)) >= SPLIT_RECOMBINE_DELAY_SEC) {
|
||||||
|
const entity = (try ctx.db
|
||||||
|
.get("entity").col("entity_id")
|
||||||
|
.find(.{ .entity_id = circle.entity_id })).?;
|
||||||
|
try recombining_entities.append(entity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(recombining_entities.items.len <= 1) {
|
||||||
|
return; //No circles to recombine
|
||||||
|
}
|
||||||
|
|
||||||
|
const base_entity_id = recombining_entities.items[0].entity_id;
|
||||||
|
for(1..recombining_entities.items.len) |i| {
|
||||||
|
try schedule_consume_entity(ctx, base_entity_id, recombining_entities.items[i].entity_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -73,16 +73,42 @@ pub const Identity = struct {
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Timestamp = struct {
|
pub const Timestamp = struct {
|
||||||
__timestamp_micros_since_unix_epoch__: i64
|
__timestamp_micros_since_unix_epoch__: i64,
|
||||||
|
|
||||||
|
pub fn DurationSince(self: @This(), other: @This()) TimeDuration {
|
||||||
|
return .{
|
||||||
|
.__time_duration_micros__ = other.__timestamp_micros_since_unix_epoch__ - self.__timestamp_micros_since_unix_epoch__,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const TimeUnit = enum {
|
||||||
|
Seconds,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const TimeDuration = struct {
|
pub const TimeDuration = struct {
|
||||||
__time_duration_micros__: i64
|
__time_duration_micros__: i64,
|
||||||
|
|
||||||
|
pub fn as_f32(self: @This(), unit: TimeUnit) f32 {
|
||||||
|
return switch(unit) {
|
||||||
|
.Seconds => @as(f32, @floatFromInt(self.__time_duration_micros__)) / std.time.us_per_s,
|
||||||
|
};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ScheduleAt = union(enum){
|
pub const ScheduleAt = union(enum){
|
||||||
Interval: TimeDuration,
|
Interval: TimeDuration,
|
||||||
Time: Timestamp,
|
Time: Timestamp,
|
||||||
|
|
||||||
|
pub fn durationSecs(ctx: *ReducerContext, secs: f32) ScheduleAt {
|
||||||
|
return .{
|
||||||
|
.Time = .{
|
||||||
|
.__timestamp_micros_since_unix_epoch__ =
|
||||||
|
ctx.timestamp.__timestamp_micros_since_unix_epoch__ +
|
||||||
|
@as(i64, @intFromFloat(secs * std.time.us_per_s)),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ConnectionId = struct {
|
pub const ConnectionId = struct {
|
||||||
|
|
@ -124,6 +150,9 @@ pub extern "spacetime_10.0" fn datastore_index_scan_range_bsatn( index_id: Index
|
||||||
pub extern "spacetime_10.0" fn row_iter_bsatn_close(iter: RowIter) u16;
|
pub extern "spacetime_10.0" fn row_iter_bsatn_close(iter: RowIter) u16;
|
||||||
|
|
||||||
pub extern "spacetime_10.0" fn datastore_delete_by_index_scan_range_bsatn(index_id: IndexId, prefix_ptr: [*c]const u8, prefix_len: usize, prefix_elems: ColId, rstart_ptr: [*c]const u8, rstart_len: usize, rend_ptr: [*c]const u8, rend_len: usize, out: [*c]u32) u16;
|
pub extern "spacetime_10.0" fn datastore_delete_by_index_scan_range_bsatn(index_id: IndexId, prefix_ptr: [*c]const u8, prefix_len: usize, prefix_elems: ColId, rstart_ptr: [*c]const u8, rstart_len: usize, rend_ptr: [*c]const u8, rend_len: usize, out: [*c]u32) u16;
|
||||||
|
pub extern "spacetime_10.0" fn datastore_update_bsatn(table_id: TableId, index_id: IndexId, row_ptr: [*c]u8, row_len_ptr: [*c]usize) u16;
|
||||||
|
|
||||||
|
pub extern "spacetime_10.0" fn datastore_table_row_count(table_id: TableId, out: [*c]u64) u16;
|
||||||
|
|
||||||
pub fn retMap(errVal: i17) !SpacetimeValue {
|
pub fn retMap(errVal: i17) !SpacetimeValue {
|
||||||
return switch(errVal) {
|
return switch(errVal) {
|
||||||
|
|
@ -239,16 +268,14 @@ pub fn readArg(allocator: std.mem.Allocator, args: BytesSource, comptime t: type
|
||||||
const tagType = std.meta.Tag(t);
|
const tagType = std.meta.Tag(t);
|
||||||
const intType = u8;
|
const intType = u8;
|
||||||
const tag: tagType = @enumFromInt(try readArg(allocator, args, intType));
|
const tag: tagType = @enumFromInt(try readArg(allocator, args, intType));
|
||||||
var temp: t = undefined;//@unionInit(t, @tagName(tag), undefined);
|
|
||||||
switch(tag) {
|
switch(tag) {
|
||||||
inline else => |tag_field| {
|
inline else => |tag_field| {
|
||||||
|
var temp: t = @unionInit(t, @tagName(tag_field), undefined);
|
||||||
const field = std.meta.fields(t)[@intFromEnum(tag_field)];
|
const field = std.meta.fields(t)[@intFromEnum(tag_field)];
|
||||||
@field(temp, field.name) = (try readArg(allocator, args, field.type));
|
@field(temp, field.name) = (try readArg(allocator, args, field.type));
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//@field(temp, field.name) = try readArg(allocator, args, @TypeOf(field));
|
|
||||||
return temp;
|
return temp;
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
@compileLog(t);
|
@compileLog(t);
|
||||||
|
|
@ -320,7 +347,7 @@ const StructImpl = struct {
|
||||||
fields: []const StructFieldImpl,
|
fields: []const StructFieldImpl,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn addStructImpl(structImpls: *[]const StructImpl, layout: anytype) u32 {
|
pub fn addStructImpl(comptime structImpls: *[]const StructImpl, layout: anytype) u32 {
|
||||||
const name = blk: {
|
const name = blk: {
|
||||||
var temp: []const u8 = @typeName(layout);
|
var temp: []const u8 = @typeName(layout);
|
||||||
if(std.mem.lastIndexOf(u8, temp, ".")) |idx|
|
if(std.mem.lastIndexOf(u8, temp, ".")) |idx|
|
||||||
|
|
@ -330,6 +357,7 @@ pub fn addStructImpl(structImpls: *[]const StructImpl, layout: anytype) u32 {
|
||||||
|
|
||||||
//FIXME: Search for existing structImpl of provided layout. I think the current might work, but I don't trust it.
|
//FIXME: Search for existing structImpl of provided layout. I think the current might work, but I don't trust it.
|
||||||
inline for(structImpls.*, 0..) |structImpl, i| {
|
inline for(structImpls.*, 0..) |structImpl, i| {
|
||||||
|
@setEvalBranchQuota(structImpl.name.len * 100);
|
||||||
if(std.mem.eql(u8, structImpl.name, name)) {
|
if(std.mem.eql(u8, structImpl.name, name)) {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
@ -399,6 +427,7 @@ pub fn getStructImplOrType(structImpls: []const StructImpl, layout: type) Algebr
|
||||||
break :blk temp;
|
break :blk temp;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@setEvalBranchQuota(structImpls.len * 100);
|
||||||
inline for(structImpls, 0..) |structImpl, i| {
|
inline for(structImpls, 0..) |structImpl, i| {
|
||||||
if(std.mem.eql(u8, structImpl.name, name)) {
|
if(std.mem.eql(u8, structImpl.name, name)) {
|
||||||
return .{
|
return .{
|
||||||
|
|
@ -412,179 +441,7 @@ pub fn getStructImplOrType(structImpls: []const StructImpl, layout: type) Algebr
|
||||||
return zigTypeToSpacetimeType(layout);
|
return zigTypeToSpacetimeType(layout);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile(comptime moduleTables : []const Table, comptime moduleReducers : []const Reducer) !RawModuleDefV9 {
|
pub fn callReducer(comptime mdef: []const SpecReducer, comptime id: usize, args: anytype) ReducerError!void {
|
||||||
var def : RawModuleDefV9 = undefined;
|
|
||||||
_ = &def;
|
|
||||||
|
|
||||||
var tableDefs: []const RawTableDefV9 = &[_]RawTableDefV9{};
|
|
||||||
var reducerDefs: []const RawReducerDefV9 = &[_]RawReducerDefV9{};
|
|
||||||
|
|
||||||
var raw_types: []const AlgebraicType = &[_]AlgebraicType{};
|
|
||||||
var types: []const RawTypeDefV9 = &[_]RawTypeDefV9{};
|
|
||||||
|
|
||||||
var structDecls: []const StructImpl = &[_]StructImpl{};
|
|
||||||
|
|
||||||
inline for(moduleTables) |table| {
|
|
||||||
const table_name: []const u8 = table.name.?;
|
|
||||||
const table_type: TableType = table.type;
|
|
||||||
const table_access: TableAccess = table.access;
|
|
||||||
const product_type_ref: AlgebraicTypeRef = AlgebraicTypeRef{
|
|
||||||
.inner = addStructImpl(&structDecls, table.schema),
|
|
||||||
};
|
|
||||||
const primary_key: []const u16 = blk: {
|
|
||||||
if(table.primary_key) |key| {
|
|
||||||
break :blk &[_]u16{ std.meta.fieldIndex(table.schema, key).?, };
|
|
||||||
}
|
|
||||||
break :blk &[_]u16{};
|
|
||||||
};
|
|
||||||
|
|
||||||
var indexes: []const RawIndexDefV9 = &[_]RawIndexDefV9{};
|
|
||||||
if(table.primary_key) |key| {
|
|
||||||
indexes = indexes ++ &[_]RawIndexDefV9{
|
|
||||||
RawIndexDefV9{
|
|
||||||
.name = null,
|
|
||||||
.accessor_name = key,
|
|
||||||
.algorithm = .{
|
|
||||||
.BTree = &.{ 0 }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if(table.indexes) |_indexes| {
|
|
||||||
inline for(_indexes) |index| {
|
|
||||||
|
|
||||||
const fieldIndex = std.meta.fieldIndex(table.schema, index.name).?;
|
|
||||||
|
|
||||||
const indexAlgo: RawIndexAlgorithm = blk: {
|
|
||||||
switch(index.layout) {
|
|
||||||
.BTree => break :blk .{ .BTree = &.{ fieldIndex } },
|
|
||||||
.Hash => break :blk .{ .Hash = &.{ fieldIndex } },
|
|
||||||
.Direct => break :blk .{ .Direct = fieldIndex },
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
indexes = indexes ++ &[_]RawIndexDefV9{
|
|
||||||
RawIndexDefV9{
|
|
||||||
.name = null,
|
|
||||||
.accessor_name = index.name,
|
|
||||||
.algorithm = indexAlgo
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var constraints: []const RawConstraintDefV9 = &[_]RawConstraintDefV9{};
|
|
||||||
if(table.primary_key) |_| {
|
|
||||||
constraints = constraints ++ &[_]RawConstraintDefV9{
|
|
||||||
RawConstraintDefV9{
|
|
||||||
.name = null,
|
|
||||||
.data = .{ .unique = .{ .Columns = &.{ primary_key[0] } } },
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const schedule: ?RawScheduleDefV9 = schedule_blk: {
|
|
||||||
if(table.schedule_reducer == null) break :schedule_blk null;
|
|
||||||
const column = column_blk: for(std.meta.fields(table.schema), 0..) |field, i| {
|
|
||||||
if(field.type == ScheduleAt) break :column_blk i;
|
|
||||||
};
|
|
||||||
const resolvedReducer = blk: for(moduleReducers) |reducer| {
|
|
||||||
if(reducer.func == table.schedule_reducer.?.func)
|
|
||||||
break :blk reducer;
|
|
||||||
};
|
|
||||||
break :schedule_blk RawScheduleDefV9{
|
|
||||||
.name = table_name ++ "_sched",
|
|
||||||
.reducer_name = resolvedReducer.name.?,
|
|
||||||
.scheduled_at_column = column,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
tableDefs = tableDefs ++ &[_]RawTableDefV9{
|
|
||||||
.{
|
|
||||||
.name = table_name,
|
|
||||||
.product_type_ref = product_type_ref,
|
|
||||||
.primary_key = primary_key,
|
|
||||||
.indexes = indexes,
|
|
||||||
.constraints = constraints,
|
|
||||||
.sequences = &[_]RawSequenceDefV9{},
|
|
||||||
.schedule = schedule,
|
|
||||||
.table_type = table_type,
|
|
||||||
.table_access = table_access,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
inline for(structDecls) |structDecl| {
|
|
||||||
var product_elements: []const ProductTypeElement = &[_]ProductTypeElement{};
|
|
||||||
|
|
||||||
inline for(structDecl.fields) |field|
|
|
||||||
{
|
|
||||||
product_elements = product_elements ++ &[_]ProductTypeElement{
|
|
||||||
.{
|
|
||||||
.name = field.name,
|
|
||||||
.algebraic_type = field.type,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
raw_types = raw_types ++ &[_]AlgebraicType{
|
|
||||||
.{
|
|
||||||
.Product = .{
|
|
||||||
.elements = product_elements,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
types = types ++ &[_]RawTypeDefV9{
|
|
||||||
.{
|
|
||||||
.name = .{
|
|
||||||
.scope = &[_][]u8{},
|
|
||||||
.name = structDecl.name
|
|
||||||
},
|
|
||||||
.ty = .{ .inner = raw_types.len-1, },
|
|
||||||
.custom_ordering = true,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
inline for(moduleReducers) |reducer| {
|
|
||||||
const name: []const u8 = reducer.name.?;
|
|
||||||
const lifecycle: Lifecycle = reducer.lifecycle;
|
|
||||||
|
|
||||||
var params: []const ProductTypeElement = &[_]ProductTypeElement{};
|
|
||||||
const param_names = reducer.params;
|
|
||||||
|
|
||||||
for(@typeInfo(reducer.func_type).@"fn".params[1..], param_names) |param, param_name| {
|
|
||||||
params = params ++ &[_]ProductTypeElement{
|
|
||||||
.{
|
|
||||||
.name = param_name,
|
|
||||||
.algebraic_type = getStructImplOrType(structDecls, param.type.?),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
reducerDefs = reducerDefs ++ &[_]RawReducerDefV9{
|
|
||||||
.{
|
|
||||||
.name = name,
|
|
||||||
.params = .{ .elements = params },
|
|
||||||
.lifecycle = lifecycle,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.typespace = .{
|
|
||||||
.types = raw_types,
|
|
||||||
},
|
|
||||||
.tables = tableDefs,
|
|
||||||
.reducers = reducerDefs,
|
|
||||||
.types = types,
|
|
||||||
.misc_exports = &[_]RawMiscModuleExportV9{},
|
|
||||||
.row_level_security = &[_]RawRowLevelSecurityDefV9{},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn callReducer(comptime mdef: []const Reducer, id: usize, args: anytype) ReducerError!void {
|
|
||||||
inline for(mdef, 0..) |field, i| {
|
inline for(mdef, 0..) |field, i| {
|
||||||
if(id == i) {
|
if(id == i) {
|
||||||
const func = field.func_type;
|
const func = field.func_type;
|
||||||
|
|
@ -593,7 +450,7 @@ pub fn callReducer(comptime mdef: []const Reducer, id: usize, args: anytype) Red
|
||||||
return @call(.auto, func_val, args);
|
return @call(.auto, func_val, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
const name: []const u8 = field.name.?;
|
const name: []const u8 = field.name;
|
||||||
std.log.err("invalid number of args passed to {s}, expected {} got {}", .{name, @typeInfo(func).@"fn".params.len, std.meta.fields(@TypeOf(args)).len});
|
std.log.err("invalid number of args passed to {s}, expected {} got {}", .{name, @typeInfo(func).@"fn".params.len, std.meta.fields(@TypeOf(args)).len});
|
||||||
@panic("invalid number of args passed to func");
|
@panic("invalid number of args passed to func");
|
||||||
}
|
}
|
||||||
|
|
@ -601,8 +458,7 @@ pub fn callReducer(comptime mdef: []const Reducer, id: usize, args: anytype) Red
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn PrintModule(data: anytype) void {
|
pub fn PrintModule(data: anytype) void {
|
||||||
var buf: [64]u8 = undefined;
|
std.log.debug("\"{s}\": {{", .{@typeName(@TypeOf(data))});
|
||||||
std.log.debug(std.fmt.bufPrint(&buf, "\"{s}\": {{", .{@typeName(@TypeOf(data))}) catch "<Error>");
|
|
||||||
switch(@TypeOf(data)) {
|
switch(@TypeOf(data)) {
|
||||||
RawModuleDefV9 => {
|
RawModuleDefV9 => {
|
||||||
PrintModule(data.typespace);
|
PrintModule(data.typespace);
|
||||||
|
|
@ -652,30 +508,43 @@ pub fn PrintModule(data: anytype) void {
|
||||||
PrintModule(data.scope);
|
PrintModule(data.scope);
|
||||||
PrintModule(data.name);
|
PrintModule(data.name);
|
||||||
},
|
},
|
||||||
|
[]const RawReducerDefV9 => {
|
||||||
|
for(data) |elem| {
|
||||||
|
PrintModule(elem);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
RawReducerDefV9 => {
|
||||||
|
PrintModule(data.lifecycle);
|
||||||
|
PrintModule(data.name);
|
||||||
|
PrintModule(data.params);
|
||||||
|
},
|
||||||
|
Lifecycle => {
|
||||||
|
std.log.debug("\"{any}\"", .{data});
|
||||||
|
},
|
||||||
[][]const u8 => {
|
[][]const u8 => {
|
||||||
for(data) |elem| {
|
for(data) |elem| {
|
||||||
PrintModule(elem);
|
PrintModule(elem);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[]const u8 => {
|
[]const u8 => {
|
||||||
std.log.debug(std.fmt.bufPrint(&buf, "\"{s}\"", .{data}) catch "<Error>");
|
std.log.debug("\"{s}\"", .{data});
|
||||||
},
|
},
|
||||||
u32 => {
|
u32 => {
|
||||||
std.log.debug(std.fmt.bufPrint(&buf, "{}", .{data}) catch "<Error>");
|
std.log.debug("{}", .{data});
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
std.log.debug("\"...\"");
|
std.log.debug("\"...\"", .{});
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
std.log.debug("},");
|
std.log.debug("}},", .{});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const Param = struct {
|
pub const Param = struct {
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Reducer = struct {
|
pub const SpecReducer = struct {
|
||||||
name: ?[]const u8 = null,
|
name: []const u8,
|
||||||
lifecycle: Lifecycle = .None,
|
lifecycle: Lifecycle = .None,
|
||||||
params: []const [:0]const u8 = &.{},
|
params: []const [:0]const u8 = &.{},
|
||||||
param_types: ?[]type = null,
|
param_types: ?[]type = null,
|
||||||
|
|
@ -683,65 +552,249 @@ pub const Reducer = struct {
|
||||||
func: *const fn()void,
|
func: *const fn()void,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub fn Reducer(data: anytype) SpecReducer {
|
||||||
|
return .{
|
||||||
|
.name = data.name,
|
||||||
|
.lifecycle = if(@hasField(@TypeOf(data), "lifecycle")) data.lifecycle else .None,
|
||||||
|
.params = if(@hasField(@TypeOf(data), "params")) data.params else &.{},
|
||||||
|
.func = @ptrCast(data.func),
|
||||||
|
.func_type = @TypeOf(data.func.*)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub const Index = struct {
|
pub const Index = struct {
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
layout: std.meta.Tag(RawIndexAlgorithm),
|
layout: std.meta.Tag(RawIndexAlgorithm),
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Table = struct {
|
pub const TableAttribs = struct {
|
||||||
name: ?[]const u8 = null,
|
|
||||||
schema: type,
|
|
||||||
type: TableType = .User,
|
type: TableType = .User,
|
||||||
access: TableAccess = .Private,
|
access: TableAccess = .Private,
|
||||||
primary_key: ?[]const u8 = null,
|
primary_key: ?[]const u8 = null,
|
||||||
schedule_reducer: ?*const Reducer = null,
|
schedule: ?[]const u8 = null,
|
||||||
indexes: ?[]const Index = null,
|
indexes: ?[]const Index = null,
|
||||||
unique: ?[]const []const u8 = null,
|
unique: ?[]const []const u8 = null,
|
||||||
autoinc: ?[]const []const u8 = null,
|
autoinc: ?[]const [:0]const u8 = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const reducers: []const Reducer = blk: {
|
pub const Table = struct {
|
||||||
var temp: []const Reducer = &.{};
|
name: []const u8,
|
||||||
|
schema: type,
|
||||||
|
attribs: TableAttribs = .{},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Spec = struct {
|
||||||
|
tables: []const Table,
|
||||||
|
reducers: []const SpecReducer,
|
||||||
|
includes: []const Spec = &.{},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn SpecBuilder(comptime spec: Spec) RawModuleDefV9 {
|
||||||
|
comptime {
|
||||||
|
//var moduleDef: RawModuleDefV9 = undefined;
|
||||||
|
var tableDefs: []const RawTableDefV9 = &[_]RawTableDefV9{};
|
||||||
|
var reducerDefs: []const RawReducerDefV9 = &[_]RawReducerDefV9{};
|
||||||
|
|
||||||
|
var raw_types: []const AlgebraicType = &[_]AlgebraicType{};
|
||||||
|
var types: []const RawTypeDefV9 = &[_]RawTypeDefV9{};
|
||||||
|
|
||||||
|
var structDecls: []const StructImpl = &[_]StructImpl{};
|
||||||
|
|
||||||
|
for(spec.tables) |table| {
|
||||||
|
const table_name: []const u8 = table.name;
|
||||||
|
const table_type: TableType = table.attribs.type;
|
||||||
|
const table_access: TableAccess = table.attribs.access;
|
||||||
|
const product_type_ref: AlgebraicTypeRef = AlgebraicTypeRef{
|
||||||
|
.inner = addStructImpl(&structDecls, table.schema),
|
||||||
|
};
|
||||||
|
const primary_key: []const u16 = blk: {
|
||||||
|
if(table.attribs.primary_key) |key| {
|
||||||
|
const fieldIdx = std.meta.fieldIndex(table.schema, key);
|
||||||
|
if(fieldIdx == null) {
|
||||||
|
@compileLog(table.schema, key);
|
||||||
|
@compileError("Primary Key `" ++ table_name ++ "." ++ key ++ "` does not exist in table schema `"++@typeName(table.schema)++"`!");
|
||||||
|
}
|
||||||
|
break :blk &[_]u16{ fieldIdx.?, };
|
||||||
|
}
|
||||||
|
break :blk &[_]u16{};
|
||||||
|
};
|
||||||
|
|
||||||
|
var indexes: []const RawIndexDefV9 = &[_]RawIndexDefV9{};
|
||||||
|
if(table.attribs.primary_key) |key| {
|
||||||
|
indexes = indexes ++ &[_]RawIndexDefV9{
|
||||||
|
RawIndexDefV9{
|
||||||
|
.name = null,
|
||||||
|
.accessor_name = key,
|
||||||
|
.algorithm = .{
|
||||||
|
.BTree = &.{ 0 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if(table.attribs.indexes) |_indexes| {
|
||||||
|
for(_indexes) |index| {
|
||||||
|
|
||||||
|
const fieldIndex = std.meta.fieldIndex(table.schema, index.name).?;
|
||||||
|
|
||||||
|
const indexAlgo: RawIndexAlgorithm = blk: {
|
||||||
|
switch(index.layout) {
|
||||||
|
.BTree => break :blk .{ .BTree = &.{ fieldIndex } },
|
||||||
|
.Hash => break :blk .{ .Hash = &.{ fieldIndex } },
|
||||||
|
.Direct => break :blk .{ .Direct = fieldIndex },
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
indexes = indexes ++ &[_]RawIndexDefV9{
|
||||||
|
RawIndexDefV9{
|
||||||
|
.name = null,
|
||||||
|
.accessor_name = index.name,
|
||||||
|
.algorithm = indexAlgo
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var constraints: []const RawConstraintDefV9 = &[_]RawConstraintDefV9{};
|
||||||
|
if(table.attribs.primary_key) |_| {
|
||||||
|
constraints = constraints ++ &[_]RawConstraintDefV9{
|
||||||
|
RawConstraintDefV9{
|
||||||
|
.name = null,
|
||||||
|
.data = .{ .unique = .{ .Columns = &.{ primary_key[0] } } },
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const schedule: ?RawScheduleDefV9 = schedule_blk: {
|
||||||
|
if(table.attribs.schedule == null) break :schedule_blk null;
|
||||||
|
const column = column_blk: for(std.meta.fields(table.schema), 0..) |field, i| {
|
||||||
|
if(field.type == ScheduleAt) break :column_blk i;
|
||||||
|
};
|
||||||
|
const resolvedReducer = blk: {
|
||||||
|
for(spec.reducers) |reducer| {
|
||||||
|
if(std.mem.eql(u8, reducer.name, table.attribs.schedule.?))
|
||||||
|
break :blk reducer;
|
||||||
|
}
|
||||||
|
@compileError("Reducer of name `"++table.attribs.schedule.?++"` does not exist!");
|
||||||
|
};
|
||||||
|
break :schedule_blk RawScheduleDefV9{
|
||||||
|
.name = table_name ++ "_sched",
|
||||||
|
.reducer_name = resolvedReducer.name,
|
||||||
|
.scheduled_at_column = column,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
var sequences: []const RawSequenceDefV9 = &[_]RawSequenceDefV9{};
|
||||||
|
if(table.attribs.autoinc) |autoincs| {
|
||||||
|
for(autoincs) |autoinc| {
|
||||||
|
sequences = sequences ++ &[_]RawSequenceDefV9{
|
||||||
|
RawSequenceDefV9{
|
||||||
|
.name = table_name ++ "_" ++ autoinc ++ "_seq",
|
||||||
|
.column = std.meta.fieldIndex(table.schema, autoinc).?,
|
||||||
|
.start = null,
|
||||||
|
.min_value = null,
|
||||||
|
.max_value = null,
|
||||||
|
.increment = 1,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tableDefs = tableDefs ++ &[1]RawTableDefV9{
|
||||||
|
.{
|
||||||
|
.name = table_name,
|
||||||
|
.product_type_ref = product_type_ref,
|
||||||
|
.primary_key = primary_key,
|
||||||
|
.indexes = indexes,
|
||||||
|
.constraints = constraints,
|
||||||
|
.sequences = sequences,
|
||||||
|
.schedule = schedule,
|
||||||
|
.table_type = table_type,
|
||||||
|
.table_access = table_access,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@setEvalBranchQuota(structDecls.len * 100);
|
||||||
|
for(structDecls) |structDecl| {
|
||||||
|
var product_elements: []const ProductTypeElement = &[_]ProductTypeElement{};
|
||||||
|
|
||||||
|
for(structDecl.fields) |field|
|
||||||
|
{
|
||||||
|
product_elements = product_elements ++ &[_]ProductTypeElement{
|
||||||
|
.{
|
||||||
|
.name = field.name,
|
||||||
|
.algebraic_type = field.type,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
raw_types = raw_types ++ &[_]AlgebraicType{
|
||||||
|
.{
|
||||||
|
.Product = .{
|
||||||
|
.elements = product_elements,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
types = types ++ &[_]RawTypeDefV9{
|
||||||
|
.{
|
||||||
|
.name = .{
|
||||||
|
.scope = &[_][]u8{},
|
||||||
|
.name = structDecl.name
|
||||||
|
},
|
||||||
|
.ty = .{ .inner = raw_types.len-1, },
|
||||||
|
.custom_ordering = true,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
for(spec.reducers) |reducer| {
|
||||||
|
const name: []const u8 = reducer.name;
|
||||||
|
const lifecycle: Lifecycle = reducer.lifecycle;
|
||||||
|
|
||||||
|
var params: []const ProductTypeElement = &[_]ProductTypeElement{};
|
||||||
|
const param_names = reducer.params;
|
||||||
|
|
||||||
|
for(@typeInfo(reducer.func_type).@"fn".params[1..], param_names) |param, param_name| {
|
||||||
|
params = params ++ &[_]ProductTypeElement{
|
||||||
|
.{
|
||||||
|
.name = param_name,
|
||||||
|
.algebraic_type = getStructImplOrType(structDecls, param.type.?),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
reducerDefs = reducerDefs ++ &[_]RawReducerDefV9{
|
||||||
|
.{
|
||||||
|
.name = name,
|
||||||
|
.params = .{ .elements = params },
|
||||||
|
.lifecycle = lifecycle,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.typespace = .{
|
||||||
|
.types = raw_types,
|
||||||
|
},
|
||||||
|
.tables = tableDefs,
|
||||||
|
.reducers = reducerDefs,
|
||||||
|
.types = types,
|
||||||
|
.misc_exports = &[_]RawMiscModuleExportV9{},
|
||||||
|
.row_level_security = &[_]RawRowLevelSecurityDefV9{},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const globalSpec: Spec = blk: {
|
||||||
const root = @import("root");
|
const root = @import("root");
|
||||||
for(@typeInfo(root).@"struct".decls) |decl| {
|
for(@typeInfo(root).@"struct".decls) |decl| {
|
||||||
const field = @field(root, decl.name);
|
const field = @field(root, decl.name);
|
||||||
if(@TypeOf(@field(root, decl.name)) == Reducer) {
|
if(@TypeOf(field) == Spec) {
|
||||||
temp = temp ++ &[_]Reducer{
|
break :blk field;
|
||||||
Reducer{
|
|
||||||
.name = field.name orelse decl.name,
|
|
||||||
.lifecycle = field.lifecycle,
|
|
||||||
.params = field.params,
|
|
||||||
.func = field.func,
|
|
||||||
.func_type = field.func_type,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break :blk temp;
|
@compileError("No spacetime spec found in root file!");
|
||||||
};
|
|
||||||
|
|
||||||
pub const tables: []const Table = blk: {
|
|
||||||
var temp: []const Table = &.{};
|
|
||||||
const root = @import("root");
|
|
||||||
for(@typeInfo(root).@"struct".decls) |decl| {
|
|
||||||
const field = @field(root, decl.name);
|
|
||||||
if(@TypeOf(@field(root, decl.name)) == Table) {
|
|
||||||
temp = temp ++ &[_]Table{
|
|
||||||
Table{
|
|
||||||
.type = field.type,
|
|
||||||
.access = field.access,
|
|
||||||
.schema = field.schema,
|
|
||||||
.name = field.name orelse decl.name,
|
|
||||||
.primary_key = field.primary_key,
|
|
||||||
.schedule_reducer = field.schedule_reducer,
|
|
||||||
.indexes = field.indexes,
|
|
||||||
.autoinc = field.autoinc,
|
|
||||||
.unique = field.unique,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break :blk temp;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub export fn __describe_module__(description: BytesSink) void {
|
pub export fn __describe_module__(description: BytesSink) void {
|
||||||
|
|
@ -751,13 +804,7 @@ pub export fn __describe_module__(description: BytesSink) void {
|
||||||
var moduleDefBytes = std.ArrayList(u8).init(allocator);
|
var moduleDefBytes = std.ArrayList(u8).init(allocator);
|
||||||
defer moduleDefBytes.deinit();
|
defer moduleDefBytes.deinit();
|
||||||
|
|
||||||
const compiledModule = comptime compile(tables, reducers) catch |err| {
|
const compiledModule = comptime SpecBuilder(globalSpec);
|
||||||
var buf: [1024]u8 = undefined;
|
|
||||||
const fmterr = std.fmt.bufPrint(&buf, "Error: {}", .{err}) catch {
|
|
||||||
@compileError("ERROR2: No Space Left! Expand error buffer size!");
|
|
||||||
};
|
|
||||||
@compileError(fmterr);
|
|
||||||
};
|
|
||||||
|
|
||||||
//PrintModule(compiledModule);
|
//PrintModule(compiledModule);
|
||||||
|
|
||||||
|
|
@ -794,6 +841,18 @@ pub export fn __call_reducer__(
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const spec: Spec = blk: {
|
||||||
|
const root = @import("root");
|
||||||
|
inline for(@typeInfo(root).@"struct".decls) |decl| {
|
||||||
|
const field = @field(root, decl.name);
|
||||||
|
if(@TypeOf(field) == Spec) {
|
||||||
|
break :blk field;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const reducers = spec.reducers;
|
||||||
|
|
||||||
inline for(reducers, 0..) |reducer, i| {
|
inline for(reducers, 0..) |reducer, i| {
|
||||||
if(id == i) {
|
if(id == i) {
|
||||||
const func = reducer.func_type;
|
const func = reducer.func_type;
|
||||||
|
|
@ -849,7 +908,7 @@ pub export fn __call_reducer__(
|
||||||
}
|
}
|
||||||
|
|
||||||
callReducer(reducers, i, constructedArg) catch |errRet| {
|
callReducer(reducers, i, constructedArg) catch |errRet| {
|
||||||
std.log.debug("{s}", .{@errorName(errRet)});
|
std.log.err("{s}", .{@errorName(errRet)});
|
||||||
if (@errorReturnTrace()) |trace| {
|
if (@errorReturnTrace()) |trace| {
|
||||||
std.debug.dumpStackTrace(trace.*);
|
std.debug.dumpStackTrace(trace.*);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -153,9 +153,18 @@ fn serialize_raw_constraint_def_v9(array: *std.ArrayList(u8), val: RawConstraint
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_raw_sequence_def_v9(array: *std.ArrayList(u8), val: RawSequenceDefV9) !void {
|
fn serialize_raw_sequence_def_v9(array: *std.ArrayList(u8), val: RawSequenceDefV9) !void {
|
||||||
_ = array;
|
try array.appendSlice(&[_]u8{ @intFromBool(val.name == null) });
|
||||||
_ = val;
|
if(val.name) |name| {
|
||||||
unreachable;
|
try array.appendSlice(&std.mem.toBytes(@as(u32, @intCast(name.len))));
|
||||||
|
try array.appendSlice(name);
|
||||||
|
}
|
||||||
|
try array.appendSlice(&std.mem.toBytes(@as(u16, @intCast(val.column))));
|
||||||
|
try array.appendSlice(&[_]u8{ @intFromBool(val.start == null) });
|
||||||
|
try array.appendSlice(&[_]u8{ @intFromBool(val.min_value == null) });
|
||||||
|
if(val.min_value != null) undefined;
|
||||||
|
try array.appendSlice(&[_]u8{ @intFromBool(val.max_value == null) });
|
||||||
|
if(val.max_value != null) undefined;
|
||||||
|
try array.appendSlice(&std.mem.toBytes(@as(i128, @intCast(val.increment))));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_raw_schedule_def_v9(array: *std.ArrayList(u8), val: RawScheduleDefV9) !void {
|
fn serialize_raw_schedule_def_v9(array: *std.ArrayList(u8), val: RawScheduleDefV9) !void {
|
||||||
|
|
|
||||||
|
|
@ -285,10 +285,10 @@ pub fn UnionDeserializer(union_type: type) fn(allocator: std.mem.Allocator, *[]c
|
||||||
}.deserialize;
|
}.deserialize;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn StructDeserializer(struct_type: type) fn(allocator: std.mem.Allocator, *[]u8) std.mem.Allocator.Error!*struct_type {
|
pub fn StructDeserializer(struct_type: type) fn(allocator: std.mem.Allocator, *[]u8) std.mem.Allocator.Error!struct_type {
|
||||||
return struct {
|
return struct {
|
||||||
pub fn deserialize(allocator: std.mem.Allocator, data: *[]u8) std.mem.Allocator.Error!*struct_type {
|
pub fn deserialize(allocator: std.mem.Allocator, data: *[]u8) std.mem.Allocator.Error!struct_type {
|
||||||
const ret = try allocator.create(struct_type);
|
var ret: struct_type = undefined;
|
||||||
var offset_mem = data.*;
|
var offset_mem = data.*;
|
||||||
const fields = std.meta.fields(struct_type);
|
const fields = std.meta.fields(struct_type);
|
||||||
inline for(fields) |field| {
|
inline for(fields) |field| {
|
||||||
|
|
@ -296,22 +296,22 @@ pub fn StructDeserializer(struct_type: type) fn(allocator: std.mem.Allocator, *[
|
||||||
[]const u8 => {
|
[]const u8 => {
|
||||||
const len = std.mem.bytesAsValue(u32, offset_mem[0..4]).*;
|
const len = std.mem.bytesAsValue(u32, offset_mem[0..4]).*;
|
||||||
const str = try allocator.dupe(u8, offset_mem[4..(4+len)]);
|
const str = try allocator.dupe(u8, offset_mem[4..(4+len)]);
|
||||||
@field(ret.*, field.name) = str;
|
@field(ret, field.name) = str;
|
||||||
offset_mem = offset_mem[4+len ..];
|
offset_mem = offset_mem[4+len ..];
|
||||||
},
|
},
|
||||||
i8, u8, i16, u16, i32, u32,
|
i8, u8, i16, u16, i32, u32,
|
||||||
i64, u64, i128, u128, i256, u256,
|
i64, u64, i128, u128, i256, u256,
|
||||||
f32, f64 => {
|
f32, f64 => {
|
||||||
std.log.debug("field_type: {} (offset_mem.len: {})", .{field.type, offset_mem.len});
|
//std.log.debug("field_type: {} (offset_mem.len: {})", .{field.type, offset_mem.len});
|
||||||
@field(ret.*, field.name) = std.mem.bytesAsValue(field.type, offset_mem[0..@sizeOf(field.type)]).*;
|
@field(ret, field.name) = std.mem.bytesAsValue(field.type, offset_mem[0..@sizeOf(field.type)]).*;
|
||||||
offset_mem = offset_mem[@sizeOf(field.type)..];
|
offset_mem = offset_mem[@sizeOf(field.type)..];
|
||||||
},
|
},
|
||||||
else => blk: {
|
else => blk: {
|
||||||
if(@typeInfo(field.type) == .@"struct") {
|
if(@typeInfo(field.type) == .@"struct") {
|
||||||
@field(ret.*, field.name) = (try StructDeserializer(field.type)(allocator, &offset_mem)).*;
|
@field(ret, field.name) = try StructDeserializer(field.type)(allocator, &offset_mem);
|
||||||
break :blk;
|
break :blk;
|
||||||
} else if(@typeInfo(field.type) == .@"union") {
|
} else if(@typeInfo(field.type) == .@"union") {
|
||||||
@field(ret.*, field.name) = (try UnionDeserializer(field.type)(allocator, &offset_mem)).*;
|
@field(ret, field.name) = try UnionDeserializer(field.type)(allocator, &offset_mem);
|
||||||
break :blk;
|
break :blk;
|
||||||
}
|
}
|
||||||
@compileLog(field.type);
|
@compileLog(field.type);
|
||||||
|
|
@ -320,7 +320,7 @@ pub fn StructDeserializer(struct_type: type) fn(allocator: std.mem.Allocator, *[
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
data.* = offset_mem;
|
data.* = offset_mem;
|
||||||
std.log.debug("StructDeserializer Ended!", .{});
|
//std.log.debug("StructDeserializer Ended!", .{});
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
}.deserialize;
|
}.deserialize;
|
||||||
|
|
@ -341,11 +341,11 @@ pub fn Iter(struct_type: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
handle: spacetime.RowIter,
|
handle: spacetime.RowIter,
|
||||||
buffer: [0x20_000]u8 = undefined,
|
buffer: [0x5_000]u8 = undefined,
|
||||||
contents: ?[]u8 = null,
|
contents: ?[]u8 = null,
|
||||||
last_ret: SpacetimeValue = .OK,
|
last_ret: SpacetimeValue = .OK,
|
||||||
|
|
||||||
pub fn next(self: *@This()) spacetime.ReducerError!?*struct_type {
|
pub fn next(self: *@This()) spacetime.ReducerError!?struct_type {
|
||||||
var buffer_len: usize = undefined;
|
var buffer_len: usize = undefined;
|
||||||
var ret: spacetime.SpacetimeValue = self.last_ret;
|
var ret: spacetime.SpacetimeValue = self.last_ret;
|
||||||
if(self.contents == null or self.contents.?.len == 0) {
|
if(self.contents == null or self.contents.?.len == 0) {
|
||||||
|
|
@ -367,10 +367,10 @@ pub fn Iter(struct_type: type) type {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return StructDeserializer(struct_type)(self.allocator, &(self.contents.?));
|
return try StructDeserializer(struct_type)(self.allocator, &(self.contents.?));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn one_or_null(self: *@This()) ?*struct_type {
|
pub fn one_or_null(self: *@This()) ?struct_type {
|
||||||
defer self.close();
|
defer self.close();
|
||||||
return self.next() catch null;
|
return self.next() catch null;
|
||||||
}
|
}
|
||||||
|
|
@ -385,8 +385,8 @@ pub fn Iter(struct_type: type) type {
|
||||||
|
|
||||||
pub fn Column2ORM(comptime table_name: []const u8, comptime column_name: [:0]const u8) type {
|
pub fn Column2ORM(comptime table_name: []const u8, comptime column_name: [:0]const u8) type {
|
||||||
const table = blk: {
|
const table = blk: {
|
||||||
for(spacetime.tables) |table| {
|
for(spacetime.globalSpec.tables) |table| {
|
||||||
if(std.mem.eql(u8, table_name, table.name.?)) {
|
if(std.mem.eql(u8, table_name, table.name)) {
|
||||||
break :blk table;
|
break :blk table;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -420,7 +420,8 @@ pub fn Column2ORM(comptime table_name: []const u8, comptime column_name: [:0]con
|
||||||
const temp_name: []const u8 = comptime table_name ++ "_" ++ column_name ++ "_idx_btree";
|
const temp_name: []const u8 = comptime table_name ++ "_" ++ column_name ++ "_idx_btree";
|
||||||
var id = spacetime.IndexId{ ._inner = std.math.maxInt(u32)};
|
var id = spacetime.IndexId{ ._inner = std.math.maxInt(u32)};
|
||||||
const err = try spacetime.retMap(spacetime.index_id_from_name(temp_name.ptr, temp_name.len, &id));
|
const err = try spacetime.retMap(spacetime.index_id_from_name(temp_name.ptr, temp_name.len, &id));
|
||||||
std.log.debug("index_id_from_name({}): {x}", .{err, id._inner});
|
_ = err;
|
||||||
|
//std.log.debug("index_id_from_name({}): {x}", .{err, id._inner});
|
||||||
|
|
||||||
const nVal: struct{ bounds: BoundVariant, val: wrapped_type } = .{
|
const nVal: struct{ bounds: BoundVariant, val: wrapped_type } = .{
|
||||||
.bounds = .Inclusive,
|
.bounds = .Inclusive,
|
||||||
|
|
@ -454,7 +455,7 @@ pub fn Column2ORM(comptime table_name: []const u8, comptime column_name: [:0]con
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find(self: @This(), val: wrapped_type) !?*struct_type {
|
pub fn find(self: @This(), val: wrapped_type) !?struct_type {
|
||||||
var iter = try self.filter(val);
|
var iter = try self.filter(val);
|
||||||
return iter.one_or_null();
|
return iter.one_or_null();
|
||||||
}
|
}
|
||||||
|
|
@ -490,29 +491,92 @@ pub fn Column2ORM(comptime table_name: []const u8, comptime column_name: [:0]con
|
||||||
&deleted_fields
|
&deleted_fields
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn update(self: @This(), val: struct_type) !void {
|
||||||
|
var table_id: TableId = undefined;
|
||||||
|
_ = spacetime.table_id_from_name(table_name.ptr, table_name.len, &table_id);
|
||||||
|
|
||||||
|
const temp_name: []const u8 = table_name ++ "_" ++ column_name ++ "_idx_btree";
|
||||||
|
var index_id = spacetime.IndexId{ ._inner = std.math.maxInt(u32) };
|
||||||
|
_ = spacetime.index_id_from_name(temp_name.ptr, temp_name.len, &index_id);
|
||||||
|
|
||||||
|
const size: usize = getStructSize(val);
|
||||||
|
const mem = try self.allocator.alloc(u8, size);
|
||||||
|
var offset_mem = mem;
|
||||||
|
defer self.allocator.free(mem);
|
||||||
|
getStructData(val, &offset_mem);
|
||||||
|
|
||||||
|
const data = mem[0..size];
|
||||||
|
var data_len = data.len;
|
||||||
|
_ = spacetime.datastore_update_bsatn(
|
||||||
|
table_id,
|
||||||
|
index_id,
|
||||||
|
data.ptr,
|
||||||
|
&data_len
|
||||||
|
);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn AutoIncStruct(base: type, autoincs: []const [:0]const u8) type {
|
||||||
|
return @Type(.{
|
||||||
|
.@"struct" = std.builtin.Type.Struct{
|
||||||
|
.backing_integer = null,
|
||||||
|
.decls = &.{},
|
||||||
|
.is_tuple = false,
|
||||||
|
.layout = .auto,
|
||||||
|
.fields = blk: {
|
||||||
|
var fields: []const std.builtin.Type.StructField = &.{};
|
||||||
|
for(autoincs) |autoinc| {
|
||||||
|
const member_type = utils.getMemberDefaultType(base, autoinc);
|
||||||
|
fields = fields ++ &[_]std.builtin.Type.StructField{
|
||||||
|
std.builtin.Type.StructField{
|
||||||
|
.is_comptime = false,
|
||||||
|
.name = autoinc,
|
||||||
|
.default_value_ptr = null,
|
||||||
|
.type = member_type,
|
||||||
|
.alignment = 0,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
break :blk fields;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
pub fn Table2ORM(comptime table_name: []const u8) type {
|
pub fn Table2ORM(comptime table_name: []const u8) type {
|
||||||
const table = blk: {
|
const table = blk: {
|
||||||
for(spacetime.tables) |table| {
|
for(spacetime.globalSpec.tables) |table| {
|
||||||
if(std.mem.eql(u8, table_name, table.name.?)) {
|
if(std.mem.eql(u8, table_name, table.name)) {
|
||||||
break :blk table;
|
break :blk table;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@compileError("Table " ++ table_name ++ " not found!");
|
||||||
};
|
};
|
||||||
const struct_type = table.schema;
|
const struct_type = table.schema;
|
||||||
|
|
||||||
|
const autoinc_return_type = AutoIncStruct(struct_type, table.attribs.autoinc orelse &.{});
|
||||||
|
|
||||||
return struct {
|
return struct {
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
|
|
||||||
pub fn insert(self: @This(), data: struct_type) !void {
|
pub fn insert(self: @This(), data: struct_type) !struct_type {
|
||||||
var id: TableId = undefined;
|
var id: TableId = undefined;
|
||||||
_ = spacetime.table_id_from_name(table_name.ptr, table_name.len, &id);
|
_ = spacetime.table_id_from_name(table_name.ptr, table_name.len, &id);
|
||||||
const raw_data = try StructSerializer(struct_type)(self.allocator, data);
|
var raw_data = try StructSerializer(struct_type)(self.allocator, data);
|
||||||
defer self.allocator.free(raw_data);
|
defer self.allocator.free(raw_data);
|
||||||
var raw_data_len: usize = raw_data.len;
|
var raw_data_len: usize = raw_data.len;
|
||||||
_ = spacetime.datastore_insert_bsatn(id, raw_data.ptr, &raw_data_len);
|
_ = spacetime.datastore_insert_bsatn(id, raw_data.ptr, &raw_data_len);
|
||||||
|
|
||||||
|
var data_copy = data;
|
||||||
|
const out = try StructDeserializer(autoinc_return_type)(self.allocator, &raw_data);
|
||||||
|
inline for(std.meta.fields(autoinc_return_type)) |field| {
|
||||||
|
@field(data_copy, field.name) = @field(out, field.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return data_copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(self: @This()) Iter(struct_type) {
|
pub fn iter(self: @This()) Iter(struct_type) {
|
||||||
|
|
@ -531,6 +595,15 @@ pub fn Table2ORM(comptime table_name: []const u8) type {
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn count(self: @This()) !u64 {
|
||||||
|
_ = self;
|
||||||
|
var id: TableId = undefined;
|
||||||
|
_ = spacetime.table_id_from_name(table_name.ptr, table_name.len, &id);
|
||||||
|
var val: u64 = undefined;
|
||||||
|
_ = try spacetime.retMap(spacetime.datastore_table_row_count(id, &val));
|
||||||
|
return val;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -549,6 +622,7 @@ pub const ReducerContext = struct {
|
||||||
timestamp: spacetime.Timestamp,
|
timestamp: spacetime.Timestamp,
|
||||||
connection_id: spacetime.ConnectionId,
|
connection_id: spacetime.ConnectionId,
|
||||||
db: Local,
|
db: Local,
|
||||||
|
rng: std.Random.DefaultPrng = std.Random.DefaultPrng.init(0),
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ReducerFn = fn(*ReducerContext) void;
|
pub const ReducerFn = fn(*ReducerContext) void;
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue