2023-02-13 19:45:26 -07:00
|
|
|
#include "buildingplan.h"
|
2023-02-22 16:08:11 -07:00
|
|
|
#include "buildingtypekey.h"
|
|
|
|
#include "defaultitemfilters.h"
|
|
|
|
#include "plannedbuilding.h"
|
2023-02-13 19:45:26 -07:00
|
|
|
|
2023-01-17 00:49:11 -07:00
|
|
|
#include "Debug.h"
|
2023-01-17 05:05:17 -07:00
|
|
|
#include "LuaTools.h"
|
2023-01-17 00:49:11 -07:00
|
|
|
#include "PluginManager.h"
|
|
|
|
|
|
|
|
#include "modules/World.h"
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
#include "df/item.h"
|
|
|
|
#include "df/job_item.h"
|
2023-01-17 00:49:11 -07:00
|
|
|
#include "df/world.h"
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
using std::map;
|
2023-01-17 00:49:11 -07:00
|
|
|
using std::string;
|
2023-01-17 05:05:17 -07:00
|
|
|
using std::unordered_map;
|
2023-01-17 00:49:11 -07:00
|
|
|
using std::vector;
|
|
|
|
|
|
|
|
using namespace DFHack;
|
|
|
|
|
|
|
|
DFHACK_PLUGIN("buildingplan");
|
|
|
|
DFHACK_PLUGIN_IS_ENABLED(is_enabled);
|
|
|
|
|
|
|
|
REQUIRE_GLOBAL(world);
|
|
|
|
|
|
|
|
namespace DFHack {
|
|
|
|
DBG_DECLARE(buildingplan, status, DebugCategory::LINFO);
|
|
|
|
DBG_DECLARE(buildingplan, cycle, DebugCategory::LINFO);
|
|
|
|
}
|
|
|
|
|
|
|
|
static const string CONFIG_KEY = string(plugin_name) + "/config";
|
2023-02-22 16:08:11 -07:00
|
|
|
const string FILTER_CONFIG_KEY = string(plugin_name) + "/filter";
|
2023-02-13 19:45:26 -07:00
|
|
|
const string BLD_CONFIG_KEY = string(plugin_name) + "/building";
|
2023-01-17 05:05:17 -07:00
|
|
|
|
2023-02-13 19:45:26 -07:00
|
|
|
int get_config_val(PersistentDataItem &c, int index) {
|
2023-01-17 05:05:17 -07:00
|
|
|
if (!c.isValid())
|
2023-01-17 00:49:11 -07:00
|
|
|
return -1;
|
2023-01-17 05:05:17 -07:00
|
|
|
return c.ival(index);
|
|
|
|
}
|
2023-02-13 19:45:26 -07:00
|
|
|
bool get_config_bool(PersistentDataItem &c, int index) {
|
2023-01-17 05:05:17 -07:00
|
|
|
return get_config_val(c, index) == 1;
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
2023-02-13 19:45:26 -07:00
|
|
|
void set_config_val(PersistentDataItem &c, int index, int value) {
|
2023-01-17 05:05:17 -07:00
|
|
|
if (c.isValid())
|
|
|
|
c.ival(index) = value;
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
2023-02-13 19:45:26 -07:00
|
|
|
void set_config_bool(PersistentDataItem &c, int index, bool value) {
|
2023-01-17 05:05:17 -07:00
|
|
|
set_config_val(c, index, value ? 1 : 0);
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
2023-01-17 05:05:17 -07:00
|
|
|
|
|
|
|
static PersistentDataItem config;
|
2023-02-15 17:54:38 -07:00
|
|
|
// for use in counting available materials for the UI
|
2023-02-22 16:08:11 -07:00
|
|
|
static unordered_map<BuildingTypeKey, vector<const df::job_item *>, BuildingTypeKeyHash> job_item_cache;
|
2023-02-19 01:57:30 -07:00
|
|
|
static unordered_map<BuildingTypeKey, HeatSafety, BuildingTypeKeyHash> cur_heat_safety;
|
2023-02-22 16:08:11 -07:00
|
|
|
static unordered_map<BuildingTypeKey, DefaultItemFilters, BuildingTypeKeyHash> cur_item_filters;
|
2023-01-17 05:05:17 -07:00
|
|
|
// building id -> PlannedBuilding
|
2023-02-15 17:54:38 -07:00
|
|
|
static unordered_map<int32_t, PlannedBuilding> planned_buildings;
|
2023-01-17 05:05:17 -07:00
|
|
|
// vector id -> filter bucket -> queue of (building id, job_item index)
|
2023-02-15 17:54:38 -07:00
|
|
|
static Tasks tasks;
|
2023-01-17 05:05:17 -07:00
|
|
|
|
|
|
|
// note that this just removes the PlannedBuilding. the tasks will get dropped
|
|
|
|
// as we discover them in the tasks queues and they fail to be found in planned_buildings.
|
|
|
|
// this "lazy" task cleaning algorithm works because there is no way to
|
|
|
|
// re-register a building once it has been removed -- if it has been booted out of
|
|
|
|
// planned_buildings, then it has either been built or desroyed. therefore there is
|
|
|
|
// no chance of duplicate tasks getting added to the tasks queues.
|
|
|
|
void PlannedBuilding::remove(color_ostream &out) {
|
|
|
|
DEBUG(status,out).print("removing persistent data for building %d\n", id);
|
2023-02-15 17:54:38 -07:00
|
|
|
World::DeletePersistentData(bld_config);
|
2023-01-17 05:05:17 -07:00
|
|
|
if (planned_buildings.count(id) > 0)
|
|
|
|
planned_buildings.erase(id);
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static const int32_t CYCLE_TICKS = 600; // twice per game day
|
|
|
|
static int32_t cycle_timestamp = 0; // world->frame_counter at last cycle
|
|
|
|
|
2023-02-21 19:05:15 -07:00
|
|
|
static bool call_buildingplan_lua(color_ostream *out, const char *fn_name,
|
|
|
|
int nargs = 0, int nres = 0,
|
|
|
|
Lua::LuaLambda && args_lambda = Lua::DEFAULT_LUA_LAMBDA,
|
|
|
|
Lua::LuaLambda && res_lambda = Lua::DEFAULT_LUA_LAMBDA) {
|
|
|
|
DEBUG(status).print("calling buildingplan lua function: '%s'\n", fn_name);
|
|
|
|
|
|
|
|
CoreSuspender guard;
|
|
|
|
|
|
|
|
auto L = Lua::Core::State;
|
|
|
|
Lua::StackUnwinder top(L);
|
|
|
|
|
|
|
|
if (!out)
|
|
|
|
out = &Core::getInstance().getConsole();
|
|
|
|
|
|
|
|
return Lua::CallLuaModuleFunction(*out, L, "plugins.buildingplan", fn_name,
|
|
|
|
nargs, nres,
|
|
|
|
std::forward<Lua::LuaLambda&&>(args_lambda),
|
|
|
|
std::forward<Lua::LuaLambda&&>(res_lambda));
|
|
|
|
}
|
|
|
|
|
|
|
|
static int get_num_filters(color_ostream &out, BuildingTypeKey key) {
|
|
|
|
int num_filters = 0;
|
|
|
|
if (!call_buildingplan_lua(&out, "get_num_filters", 3, 1,
|
|
|
|
[&](lua_State *L) {
|
|
|
|
Lua::Push(L, std::get<0>(key));
|
|
|
|
Lua::Push(L, std::get<1>(key));
|
|
|
|
Lua::Push(L, std::get<2>(key));
|
|
|
|
},
|
|
|
|
[&](lua_State *L) {
|
|
|
|
num_filters = lua_tonumber(L, -1);
|
|
|
|
})) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return num_filters;
|
|
|
|
}
|
|
|
|
|
2023-02-22 16:08:11 -07:00
|
|
|
static const vector<const df::job_item *> & get_job_items(color_ostream &out, BuildingTypeKey key) {
|
2023-02-21 19:05:15 -07:00
|
|
|
if (job_item_cache.count(key))
|
|
|
|
return job_item_cache[key];
|
|
|
|
const int num_filters = get_num_filters(out, key);
|
|
|
|
auto &jitems = job_item_cache[key];
|
|
|
|
for (int index = 0; index < num_filters; ++index) {
|
|
|
|
bool failed = false;
|
|
|
|
if (!call_buildingplan_lua(&out, "get_job_item", 4, 1,
|
|
|
|
[&](lua_State *L) {
|
|
|
|
Lua::Push(L, std::get<0>(key));
|
|
|
|
Lua::Push(L, std::get<1>(key));
|
|
|
|
Lua::Push(L, std::get<2>(key));
|
|
|
|
Lua::Push(L, index+1);
|
|
|
|
},
|
|
|
|
[&](lua_State *L) {
|
|
|
|
df::job_item *jitem = Lua::GetDFObject<df::job_item>(L, -1);
|
|
|
|
DEBUG(status,out).print("retrieving job_item for (%d, %d, %d) index=%d: %p\n",
|
|
|
|
std::get<0>(key), std::get<1>(key), std::get<2>(key), index, jitem);
|
|
|
|
if (!jitem)
|
|
|
|
failed = true;
|
|
|
|
else
|
|
|
|
jitems.emplace_back(jitem);
|
|
|
|
}) || failed) {
|
|
|
|
jitems.clear();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return jitems;
|
|
|
|
}
|
|
|
|
|
|
|
|
static HeatSafety get_heat_safety_filter(const BuildingTypeKey &key) {
|
|
|
|
if (cur_heat_safety.count(key))
|
|
|
|
return cur_heat_safety.at(key);
|
|
|
|
return HEAT_SAFETY_ANY;
|
|
|
|
}
|
|
|
|
|
2023-02-22 16:08:11 -07:00
|
|
|
static DefaultItemFilters & get_item_filters(color_ostream &out, const BuildingTypeKey &key) {
|
2023-02-21 19:05:15 -07:00
|
|
|
if (cur_item_filters.count(key))
|
2023-02-22 16:08:11 -07:00
|
|
|
return cur_item_filters.at(key);
|
|
|
|
cur_item_filters.emplace(key, DefaultItemFilters(out, key, get_job_items(out, key)));
|
|
|
|
return cur_item_filters.at(key);
|
2023-02-21 19:05:15 -07:00
|
|
|
}
|
|
|
|
|
2023-01-17 00:49:11 -07:00
|
|
|
static command_result do_command(color_ostream &out, vector<string> ¶meters);
|
2023-02-13 19:45:26 -07:00
|
|
|
void buildingplan_cycle(color_ostream &out, Tasks &tasks,
|
|
|
|
unordered_map<int32_t, PlannedBuilding> &planned_buildings);
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
static bool registerPlannedBuilding(color_ostream &out, PlannedBuilding & pb);
|
2023-01-17 00:49:11 -07:00
|
|
|
|
|
|
|
DFhackCExport command_result plugin_init(color_ostream &out, std::vector <PluginCommand> &commands) {
|
|
|
|
DEBUG(status,out).print("initializing %s\n", plugin_name);
|
|
|
|
|
|
|
|
// provide a configuration interface for the plugin
|
|
|
|
commands.push_back(PluginCommand(
|
|
|
|
plugin_name,
|
|
|
|
"Plan building placement before you have materials.",
|
|
|
|
do_command));
|
|
|
|
|
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
DFhackCExport command_result plugin_enable(color_ostream &out, bool enable) {
|
|
|
|
if (enable != is_enabled) {
|
|
|
|
is_enabled = enable;
|
|
|
|
DEBUG(status,out).print("%s from the API; persisting\n",
|
|
|
|
is_enabled ? "enabled" : "disabled");
|
|
|
|
} else {
|
|
|
|
DEBUG(status,out).print("%s from the API, but already %s; no action\n",
|
|
|
|
is_enabled ? "enabled" : "disabled",
|
|
|
|
is_enabled ? "enabled" : "disabled");
|
|
|
|
}
|
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
DFhackCExport command_result plugin_shutdown (color_ostream &out) {
|
|
|
|
DEBUG(status,out).print("shutting down %s\n", plugin_name);
|
|
|
|
|
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
2023-02-15 17:54:38 -07:00
|
|
|
static void validate_config(color_ostream &out, bool verbose = false) {
|
|
|
|
if (get_config_bool(config, CONFIG_BLOCKS)
|
|
|
|
|| get_config_bool(config, CONFIG_BOULDERS)
|
|
|
|
|| get_config_bool(config, CONFIG_LOGS)
|
|
|
|
|| get_config_bool(config, CONFIG_BARS))
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (verbose)
|
|
|
|
out.printerr("all contruction materials disabled; resetting config\n");
|
|
|
|
|
|
|
|
set_config_bool(config, CONFIG_BLOCKS, true);
|
|
|
|
set_config_bool(config, CONFIG_BOULDERS, true);
|
|
|
|
set_config_bool(config, CONFIG_LOGS, true);
|
|
|
|
set_config_bool(config, CONFIG_BARS, false);
|
|
|
|
}
|
|
|
|
|
2023-02-17 00:02:34 -07:00
|
|
|
static void clear_state(color_ostream &out) {
|
|
|
|
call_buildingplan_lua(&out, "signal_reset");
|
2023-02-20 00:28:57 -07:00
|
|
|
call_buildingplan_lua(&out, "reload_cursors");
|
2023-02-17 00:02:34 -07:00
|
|
|
planned_buildings.clear();
|
|
|
|
tasks.clear();
|
2023-02-21 19:05:15 -07:00
|
|
|
cur_heat_safety.clear();
|
|
|
|
cur_item_filters.clear();
|
|
|
|
for (auto &entry : job_item_cache ) {
|
2023-02-15 17:54:38 -07:00
|
|
|
for (auto &jitem : entry.second) {
|
|
|
|
delete jitem;
|
|
|
|
}
|
|
|
|
}
|
2023-02-21 19:05:15 -07:00
|
|
|
job_item_cache.clear();
|
2023-02-15 17:54:38 -07:00
|
|
|
}
|
|
|
|
|
2023-01-17 00:49:11 -07:00
|
|
|
DFhackCExport command_result plugin_load_data (color_ostream &out) {
|
2023-02-03 00:44:33 -07:00
|
|
|
cycle_timestamp = 0;
|
2023-01-17 00:49:11 -07:00
|
|
|
config = World::GetPersistentData(CONFIG_KEY);
|
|
|
|
|
|
|
|
if (!config.isValid()) {
|
|
|
|
DEBUG(status,out).print("no config found in this save; initializing\n");
|
|
|
|
config = World::AddPersistentData(CONFIG_KEY);
|
|
|
|
}
|
2023-02-15 17:54:38 -07:00
|
|
|
validate_config(out);
|
2023-01-17 00:49:11 -07:00
|
|
|
|
2023-01-17 22:55:27 -07:00
|
|
|
DEBUG(status,out).print("loading persisted state\n");
|
2023-02-17 00:02:34 -07:00
|
|
|
clear_state(out);
|
2023-02-22 16:08:11 -07:00
|
|
|
|
|
|
|
vector<PersistentDataItem> filter_configs;
|
|
|
|
World::GetPersistentData(&filter_configs, FILTER_CONFIG_KEY);
|
|
|
|
for (auto &cfg : filter_configs) {
|
|
|
|
BuildingTypeKey key = DefaultItemFilters::getKey(cfg);
|
|
|
|
cur_item_filters.emplace(key, DefaultItemFilters(out, cfg, get_job_items(out, key)));
|
|
|
|
}
|
|
|
|
|
2023-01-17 22:55:27 -07:00
|
|
|
vector<PersistentDataItem> building_configs;
|
|
|
|
World::GetPersistentData(&building_configs, BLD_CONFIG_KEY);
|
2023-01-17 05:05:17 -07:00
|
|
|
const size_t num_building_configs = building_configs.size();
|
2023-01-18 01:08:21 -07:00
|
|
|
for (size_t idx = 0; idx < num_building_configs; ++idx) {
|
2023-02-15 20:10:42 -07:00
|
|
|
PlannedBuilding pb(out, building_configs[idx]);
|
2023-02-21 19:05:15 -07:00
|
|
|
df::building *bld = df::building::find(pb.id);
|
|
|
|
if (!bld) {
|
2023-02-22 16:08:11 -07:00
|
|
|
INFO(status).print("building %d no longer exists; skipping\n", pb.id);
|
|
|
|
pb.remove(out);
|
|
|
|
continue;
|
2023-02-21 19:05:15 -07:00
|
|
|
}
|
|
|
|
BuildingTypeKey key(bld->getType(), bld->getSubtype(), bld->getCustomType());
|
2023-02-22 16:08:11 -07:00
|
|
|
if (pb.item_filters.size() != get_item_filters(out, key).getItemFilters().size()) {
|
2023-02-21 19:05:15 -07:00
|
|
|
WARN(status).print("loaded state for building %d doesn't match world\n", pb.id);
|
2023-02-22 16:08:11 -07:00
|
|
|
pb.remove(out);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
registerPlannedBuilding(out, pb);
|
2023-01-18 01:08:21 -07:00
|
|
|
}
|
2023-01-17 05:05:17 -07:00
|
|
|
|
2023-01-17 00:49:11 -07:00
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
DFhackCExport command_result plugin_onstatechange(color_ostream &out, state_change_event event) {
|
2023-02-13 19:45:26 -07:00
|
|
|
if (event == SC_WORLD_UNLOADED) {
|
2023-01-17 22:55:27 -07:00
|
|
|
DEBUG(status,out).print("world unloaded; clearing state for %s\n", plugin_name);
|
2023-02-17 00:02:34 -07:00
|
|
|
clear_state(out);
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
2023-02-15 17:54:38 -07:00
|
|
|
static bool cycle_requested = false;
|
|
|
|
|
|
|
|
static void do_cycle(color_ostream &out) {
|
|
|
|
// mark that we have recently run
|
|
|
|
cycle_timestamp = world->frame_counter;
|
|
|
|
cycle_requested = false;
|
|
|
|
|
|
|
|
buildingplan_cycle(out, tasks, planned_buildings);
|
2023-02-16 22:17:55 -07:00
|
|
|
call_buildingplan_lua(&out, "signal_reset");
|
2023-02-15 17:54:38 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
DFhackCExport command_result plugin_onupdate(color_ostream &out) {
|
|
|
|
if (!Core::getInstance().isWorldLoaded())
|
|
|
|
return CR_OK;
|
|
|
|
|
|
|
|
if (is_enabled &&
|
|
|
|
(cycle_requested || world->frame_counter - cycle_timestamp >= CYCLE_TICKS))
|
|
|
|
do_cycle(out);
|
|
|
|
return CR_OK;
|
|
|
|
}
|
|
|
|
|
2023-01-17 00:49:11 -07:00
|
|
|
static command_result do_command(color_ostream &out, vector<string> ¶meters) {
|
|
|
|
CoreSuspender suspend;
|
|
|
|
|
|
|
|
if (!Core::getInstance().isWorldLoaded()) {
|
2023-01-17 22:55:27 -07:00
|
|
|
out.printerr("Cannot configure %s without a loaded world.\n", plugin_name);
|
2023-01-17 00:49:11 -07:00
|
|
|
return CR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
bool show_help = false;
|
|
|
|
if (!call_buildingplan_lua(&out, "parse_commandline", parameters.size(), 1,
|
|
|
|
[&](lua_State *L) {
|
|
|
|
for (const string ¶m : parameters)
|
|
|
|
Lua::Push(L, param);
|
|
|
|
},
|
|
|
|
[&](lua_State *L) {
|
|
|
|
show_help = !lua_toboolean(L, -1);
|
|
|
|
})) {
|
|
|
|
return CR_FAILURE;
|
|
|
|
}
|
2023-01-17 00:49:11 -07:00
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
return show_help ? CR_WRONG_USAGE : CR_OK;
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
/////////////////////////////////////////////////////
|
|
|
|
// Lua API
|
|
|
|
// core will already be suspended when coming in through here
|
|
|
|
//
|
2023-01-17 00:49:11 -07:00
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
static string getBucket(const df::job_item & ji) {
|
|
|
|
std::ostringstream ser;
|
2023-01-17 00:49:11 -07:00
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
// pull out and serialize only known relevant fields. if we miss a few, then
|
|
|
|
// the filter bucket will be slighly less specific than it could be, but
|
|
|
|
// that's probably ok. we'll just end up bucketing slightly different items
|
|
|
|
// together. this is only a problem if the different filter at the front of
|
|
|
|
// the queue doesn't match any available items and blocks filters behind it
|
|
|
|
// that could be matched.
|
|
|
|
ser << ji.item_type << ':' << ji.item_subtype << ':' << ji.mat_type << ':'
|
|
|
|
<< ji.mat_index << ':' << ji.flags1.whole << ':' << ji.flags2.whole
|
|
|
|
<< ':' << ji.flags3.whole << ':' << ji.flags4 << ':' << ji.flags5 << ':'
|
|
|
|
<< ji.metal_ore << ':' << ji.has_tool_use;
|
|
|
|
|
|
|
|
return ser.str();
|
2023-01-17 00:49:11 -07:00
|
|
|
}
|
2023-01-17 05:05:17 -07:00
|
|
|
|
|
|
|
// get a list of item vectors that we should search for matches
|
2023-02-22 16:08:11 -07:00
|
|
|
vector<df::job_item_vector_id> getVectorIds(color_ostream &out, const df::job_item *job_item) {
|
2023-01-17 05:05:17 -07:00
|
|
|
std::vector<df::job_item_vector_id> ret;
|
|
|
|
|
|
|
|
// if the filter already has the vector_id set to something specific, use it
|
|
|
|
if (job_item->vector_id > df::job_item_vector_id::IN_PLAY)
|
|
|
|
{
|
|
|
|
DEBUG(status,out).print("using vector_id from job_item: %s\n",
|
|
|
|
ENUM_KEY_STR(job_item_vector_id, job_item->vector_id).c_str());
|
|
|
|
ret.push_back(job_item->vector_id);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
// if the filer is for building material, refer to our global settings for
|
|
|
|
// which vectors to search
|
|
|
|
if (job_item->flags2.bits.building_material)
|
|
|
|
{
|
|
|
|
if (get_config_bool(config, CONFIG_BLOCKS))
|
|
|
|
ret.push_back(df::job_item_vector_id::BLOCKS);
|
|
|
|
if (get_config_bool(config, CONFIG_BOULDERS))
|
|
|
|
ret.push_back(df::job_item_vector_id::BOULDER);
|
|
|
|
if (get_config_bool(config, CONFIG_LOGS))
|
|
|
|
ret.push_back(df::job_item_vector_id::WOOD);
|
|
|
|
if (get_config_bool(config, CONFIG_BARS))
|
|
|
|
ret.push_back(df::job_item_vector_id::BAR);
|
|
|
|
}
|
|
|
|
|
|
|
|
// fall back to IN_PLAY if no other vector was appropriate
|
|
|
|
if (ret.empty())
|
|
|
|
ret.push_back(df::job_item_vector_id::IN_PLAY);
|
|
|
|
return ret;
|
|
|
|
}
|
2023-02-15 20:10:42 -07:00
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
static bool registerPlannedBuilding(color_ostream &out, PlannedBuilding & pb) {
|
|
|
|
df::building * bld = pb.getBuildingIfValidOrRemoveIfNot(out);
|
|
|
|
if (!bld)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (bld->jobs.size() != 1) {
|
|
|
|
DEBUG(status,out).print("unexpected number of jobs: want 1, got %zu\n", bld->jobs.size());
|
|
|
|
return false;
|
|
|
|
}
|
2023-02-17 20:16:45 -07:00
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
auto job_items = bld->jobs[0]->job_items;
|
2023-02-17 20:16:45 -07:00
|
|
|
if (isJobReady(out, job_items)) {
|
|
|
|
// all items are already attached
|
|
|
|
finalizeBuilding(out, bld);
|
|
|
|
return true;
|
2023-01-17 05:05:17 -07:00
|
|
|
}
|
2023-02-17 20:16:45 -07:00
|
|
|
|
|
|
|
int num_job_items = job_items.size();
|
2023-01-17 05:05:17 -07:00
|
|
|
int32_t id = bld->id;
|
|
|
|
for (int job_item_idx = 0; job_item_idx < num_job_items; ++job_item_idx) {
|
|
|
|
auto job_item = job_items[job_item_idx];
|
|
|
|
auto bucket = getBucket(*job_item);
|
|
|
|
|
|
|
|
// if there are multiple vector_ids, schedule duplicate tasks. after
|
|
|
|
// the correct number of items are matched, the extras will get popped
|
|
|
|
// as invalid
|
2023-02-16 18:25:15 -07:00
|
|
|
for (auto vector_id : pb.vector_ids[job_item_idx]) {
|
2023-01-17 05:05:17 -07:00
|
|
|
for (int item_num = 0; item_num < job_item->quantity; ++item_num) {
|
2023-02-16 19:02:15 -07:00
|
|
|
tasks[vector_id][bucket].emplace_back(id, job_item_idx);
|
2023-01-17 05:05:17 -07:00
|
|
|
DEBUG(status,out).print("added task: %s/%s/%d,%d; "
|
|
|
|
"%zu vector(s), %zu filter bucket(s), %zu task(s) in bucket",
|
|
|
|
ENUM_KEY_STR(job_item_vector_id, vector_id).c_str(),
|
|
|
|
bucket.c_str(), id, job_item_idx, tasks.size(),
|
|
|
|
tasks[vector_id].size(), tasks[vector_id][bucket].size());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// suspend jobs
|
|
|
|
for (auto job : bld->jobs)
|
|
|
|
job->flags.bits.suspend = true;
|
|
|
|
|
|
|
|
// add the planned buildings to our register
|
|
|
|
planned_buildings.emplace(bld->id, pb);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-02-16 04:08:55 -07:00
|
|
|
static string get_desc_string(color_ostream &out, df::job_item *jitem,
|
|
|
|
const vector<df::job_item_vector_id> &vec_ids) {
|
|
|
|
vector<string> descs;
|
|
|
|
for (auto &vec_id : vec_ids) {
|
|
|
|
df::job_item jitem_copy = *jitem;
|
|
|
|
jitem_copy.vector_id = vec_id;
|
|
|
|
call_buildingplan_lua(&out, "get_desc", 1, 1,
|
|
|
|
[&](lua_State *L) { Lua::Push(L, &jitem_copy); },
|
|
|
|
[&](lua_State *L) {
|
|
|
|
descs.emplace_back(lua_tostring(L, -1)); });
|
|
|
|
}
|
|
|
|
return join_strings(" or ", descs);
|
|
|
|
}
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
static void printStatus(color_ostream &out) {
|
|
|
|
DEBUG(status,out).print("entering buildingplan_printStatus\n");
|
|
|
|
out.print("buildingplan is %s\n\n", is_enabled ? "enabled" : "disabled");
|
|
|
|
out.print("Current settings:\n");
|
|
|
|
out.print(" use blocks: %s\n", get_config_bool(config, CONFIG_BLOCKS) ? "yes" : "no");
|
|
|
|
out.print(" use boulders: %s\n", get_config_bool(config, CONFIG_BOULDERS) ? "yes" : "no");
|
|
|
|
out.print(" use logs: %s\n", get_config_bool(config, CONFIG_LOGS) ? "yes" : "no");
|
|
|
|
out.print(" use bars: %s\n", get_config_bool(config, CONFIG_BARS) ? "yes" : "no");
|
|
|
|
out.print("\n");
|
2023-02-08 19:47:10 -07:00
|
|
|
|
|
|
|
map<string, int32_t> counts;
|
|
|
|
int32_t total = 0;
|
2023-02-16 04:08:55 -07:00
|
|
|
for (auto &entry : planned_buildings) {
|
|
|
|
auto &pb = entry.second;
|
|
|
|
auto bld = pb.getBuildingIfValidOrRemoveIfNot(out);
|
|
|
|
if (!bld || bld->jobs.size() != 1)
|
|
|
|
continue;
|
|
|
|
auto &job_items = bld->jobs[0]->job_items;
|
|
|
|
if (job_items.size() != pb.vector_ids.size())
|
|
|
|
continue;
|
|
|
|
int job_item_idx = 0;
|
|
|
|
for (auto &vec_ids : pb.vector_ids) {
|
|
|
|
auto &jitem = job_items[job_item_idx++];
|
|
|
|
int32_t quantity = jitem->quantity;
|
|
|
|
if (quantity) {
|
|
|
|
counts[get_desc_string(out, jitem, vec_ids)] += quantity;
|
|
|
|
total += quantity;
|
2023-02-08 19:47:10 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-16 18:25:15 -07:00
|
|
|
if (planned_buildings.size()) {
|
|
|
|
out.print("Waiting for %d item(s) to be produced for %zd building(s):\n",
|
|
|
|
total, planned_buildings.size());
|
|
|
|
for (auto &count : counts)
|
2023-02-18 02:25:24 -07:00
|
|
|
out.print(" %3d %s%s\n", count.second, count.first.c_str(), count.second == 1 ? "" : "s");
|
2023-02-16 18:25:15 -07:00
|
|
|
} else {
|
|
|
|
out.print("Currently no planned buildings\n");
|
|
|
|
}
|
2023-02-08 19:47:10 -07:00
|
|
|
out.print("\n");
|
2023-01-17 05:05:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static bool setSetting(color_ostream &out, string name, bool value) {
|
2023-01-18 01:08:21 -07:00
|
|
|
DEBUG(status,out).print("entering setSetting (%s -> %s)\n", name.c_str(), value ? "true" : "false");
|
2023-01-17 05:05:17 -07:00
|
|
|
if (name == "blocks")
|
|
|
|
set_config_bool(config, CONFIG_BLOCKS, value);
|
|
|
|
else if (name == "boulders")
|
|
|
|
set_config_bool(config, CONFIG_BOULDERS, value);
|
|
|
|
else if (name == "logs")
|
|
|
|
set_config_bool(config, CONFIG_LOGS, value);
|
|
|
|
else if (name == "bars")
|
|
|
|
set_config_bool(config, CONFIG_BARS, value);
|
|
|
|
else {
|
|
|
|
out.printerr("unrecognized setting: '%s'\n", name.c_str());
|
|
|
|
return false;
|
|
|
|
}
|
2023-02-15 17:54:38 -07:00
|
|
|
|
|
|
|
validate_config(out, true);
|
2023-02-16 22:17:55 -07:00
|
|
|
call_buildingplan_lua(&out, "signal_reset");
|
2023-01-17 05:05:17 -07:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool isPlannableBuilding(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom) {
|
|
|
|
DEBUG(status,out).print("entering isPlannableBuilding\n");
|
2023-02-21 19:05:15 -07:00
|
|
|
return get_num_filters(out, BuildingTypeKey(type, subtype, custom)) >= 1;
|
2023-01-17 05:05:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static bool isPlannedBuilding(color_ostream &out, df::building *bld) {
|
|
|
|
TRACE(status,out).print("entering isPlannedBuilding\n");
|
2023-02-21 19:05:15 -07:00
|
|
|
return bld && planned_buildings.count(bld->id);
|
2023-02-19 01:57:30 -07:00
|
|
|
}
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
static bool addPlannedBuilding(color_ostream &out, df::building *bld) {
|
|
|
|
DEBUG(status,out).print("entering addPlannedBuilding\n");
|
|
|
|
if (!bld || planned_buildings.count(bld->id)
|
|
|
|
|| !isPlannableBuilding(out, bld->getType(), bld->getSubtype(),
|
|
|
|
bld->getCustomType()))
|
|
|
|
return false;
|
2023-02-19 01:57:30 -07:00
|
|
|
BuildingTypeKey key(bld->getType(), bld->getSubtype(), bld->getCustomType());
|
2023-02-22 16:08:11 -07:00
|
|
|
PlannedBuilding pb(out, bld, get_heat_safety_filter(key), get_item_filters(out, key).getItemFilters());
|
2023-01-18 01:08:21 -07:00
|
|
|
return registerPlannedBuilding(out, pb);
|
2023-01-17 05:05:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void doCycle(color_ostream &out) {
|
|
|
|
DEBUG(status,out).print("entering doCycle\n");
|
|
|
|
do_cycle(out);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void scheduleCycle(color_ostream &out) {
|
|
|
|
DEBUG(status,out).print("entering scheduleCycle\n");
|
|
|
|
cycle_requested = true;
|
|
|
|
}
|
|
|
|
|
2023-02-18 02:09:54 -07:00
|
|
|
static int scanAvailableItems(color_ostream &out, df::building_type type, int16_t subtype,
|
|
|
|
int32_t custom, int index, vector<int> *item_ids = NULL) {
|
2023-02-15 17:54:38 -07:00
|
|
|
DEBUG(status,out).print(
|
|
|
|
"entering countAvailableItems building_type=%d subtype=%d custom=%d index=%d\n",
|
|
|
|
type, subtype, custom, index);
|
|
|
|
BuildingTypeKey key(type, subtype, custom);
|
2023-02-19 01:57:30 -07:00
|
|
|
HeatSafety heat = get_heat_safety_filter(key);
|
2023-02-21 19:05:15 -07:00
|
|
|
auto &job_items = get_job_items(out, key);
|
2023-02-22 09:34:55 -07:00
|
|
|
if (index < 0 || job_items.size() <= (size_t)index)
|
2023-02-21 19:05:15 -07:00
|
|
|
return 0;
|
2023-02-22 16:08:11 -07:00
|
|
|
auto &item_filters = get_item_filters(out, key).getItemFilters();
|
2023-02-15 17:54:38 -07:00
|
|
|
|
|
|
|
auto &jitem = job_items[index];
|
|
|
|
auto vector_ids = getVectorIds(out, jitem);
|
|
|
|
|
|
|
|
int count = 0;
|
|
|
|
for (auto vector_id : vector_ids) {
|
|
|
|
auto other_id = ENUM_ATTR(job_item_vector_id, other, vector_id);
|
|
|
|
for (auto &item : df::global::world->items.other[other_id]) {
|
2023-02-21 19:05:15 -07:00
|
|
|
if (itemPassesScreen(item) && matchesFilters(item, jitem, heat, item_filters[index])) {
|
2023-02-18 02:09:54 -07:00
|
|
|
if (item_ids)
|
|
|
|
item_ids->emplace_back(item->id);
|
2023-02-15 17:54:38 -07:00
|
|
|
++count;
|
2023-02-18 02:09:54 -07:00
|
|
|
}
|
2023-02-15 17:54:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
DEBUG(status,out).print("found matches %d\n", count);
|
|
|
|
return count;
|
2023-02-13 17:24:10 -07:00
|
|
|
}
|
|
|
|
|
2023-02-18 02:09:54 -07:00
|
|
|
static int getAvailableItems(lua_State *L) {
|
|
|
|
color_ostream *out = Lua::GetOutput(L);
|
|
|
|
if (!out)
|
|
|
|
out = &Core::getInstance().getConsole();
|
|
|
|
df::building_type type = (df::building_type)luaL_checkint(L, 1);
|
|
|
|
int16_t subtype = luaL_checkint(L, 2);
|
|
|
|
int32_t custom = luaL_checkint(L, 3);
|
|
|
|
int index = luaL_checkint(L, 4);
|
|
|
|
DEBUG(status,*out).print(
|
|
|
|
"entering getAvailableItems building_type=%d subtype=%d custom=%d index=%d\n",
|
|
|
|
type, subtype, custom, index);
|
|
|
|
vector<int> item_ids;
|
|
|
|
scanAvailableItems(*out, type, subtype, custom, index, &item_ids);
|
|
|
|
Lua::PushVector(L, item_ids);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int countAvailableItems(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int index) {
|
|
|
|
DEBUG(status,out).print(
|
|
|
|
"entering countAvailableItems building_type=%d subtype=%d custom=%d index=%d\n",
|
|
|
|
type, subtype, custom, index);
|
|
|
|
return scanAvailableItems(out, type, subtype, custom, index);
|
|
|
|
}
|
|
|
|
|
2023-02-21 14:04:53 -07:00
|
|
|
static bool hasFilter(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int index) {
|
2023-02-22 16:08:11 -07:00
|
|
|
TRACE(status,out).print("entering hasFilter\n");
|
|
|
|
BuildingTypeKey key(type, subtype, custom);
|
|
|
|
auto &filters = get_item_filters(out, key);
|
|
|
|
for (auto &filter : filters.getItemFilters()) {
|
|
|
|
if (filter.isEmpty())
|
|
|
|
return true;
|
|
|
|
}
|
2023-02-17 20:16:45 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-02-19 01:57:30 -07:00
|
|
|
static void setMaterialFilter(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int index, string filter) {
|
|
|
|
DEBUG(status,out).print("entering setMaterialFilter\n");
|
|
|
|
call_buildingplan_lua(&out, "signal_reset");
|
|
|
|
}
|
|
|
|
|
|
|
|
static int getMaterialFilter(lua_State *L) {
|
|
|
|
color_ostream *out = Lua::GetOutput(L);
|
|
|
|
if (!out)
|
|
|
|
out = &Core::getInstance().getConsole();
|
|
|
|
df::building_type type = (df::building_type)luaL_checkint(L, 1);
|
|
|
|
int16_t subtype = luaL_checkint(L, 2);
|
|
|
|
int32_t custom = luaL_checkint(L, 3);
|
|
|
|
int index = luaL_checkint(L, 4);
|
|
|
|
DEBUG(status,*out).print(
|
|
|
|
"entering getMaterialFilter building_type=%d subtype=%d custom=%d index=%d\n",
|
|
|
|
type, subtype, custom, index);
|
|
|
|
vector<string> filter;
|
|
|
|
Lua::PushVector(L, filter);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void setHeatSafetyFilter(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int heat) {
|
|
|
|
DEBUG(status,out).print("entering setHeatSafetyFilter\n");
|
|
|
|
BuildingTypeKey key(type, subtype, custom);
|
|
|
|
if (heat == HEAT_SAFETY_FIRE || heat == HEAT_SAFETY_MAGMA)
|
|
|
|
cur_heat_safety[key] = (HeatSafety)heat;
|
|
|
|
else
|
|
|
|
cur_heat_safety.erase(key);
|
|
|
|
call_buildingplan_lua(&out, "signal_reset");
|
2023-02-17 20:16:45 -07:00
|
|
|
}
|
|
|
|
|
2023-02-19 01:57:30 -07:00
|
|
|
static int getHeatSafetyFilter(lua_State *L) {
|
|
|
|
color_ostream *out = Lua::GetOutput(L);
|
|
|
|
if (!out)
|
|
|
|
out = &Core::getInstance().getConsole();
|
|
|
|
df::building_type type = (df::building_type)luaL_checkint(L, 1);
|
|
|
|
int16_t subtype = luaL_checkint(L, 2);
|
|
|
|
int32_t custom = luaL_checkint(L, 3);
|
|
|
|
DEBUG(status,*out).print(
|
|
|
|
"entering getHeatSafetyFilter building_type=%d subtype=%d custom=%d\n",
|
|
|
|
type, subtype, custom);
|
|
|
|
BuildingTypeKey key(type, subtype, custom);
|
|
|
|
HeatSafety heat = get_heat_safety_filter(key);
|
|
|
|
Lua::Push(L, heat);
|
|
|
|
return 1;
|
2023-02-17 20:16:45 -07:00
|
|
|
}
|
|
|
|
|
2023-02-16 04:08:55 -07:00
|
|
|
static bool validate_pb(color_ostream &out, df::building *bld, int index) {
|
2023-02-15 20:10:42 -07:00
|
|
|
if (!isPlannedBuilding(out, bld) || bld->jobs.size() != 1)
|
2023-02-16 04:08:55 -07:00
|
|
|
return false;
|
2023-02-15 20:10:42 -07:00
|
|
|
|
|
|
|
auto &job_items = bld->jobs[0]->job_items;
|
2023-02-16 22:42:58 -07:00
|
|
|
if ((int)job_items.size() <= index)
|
2023-02-16 04:08:55 -07:00
|
|
|
return false;
|
2023-02-15 20:10:42 -07:00
|
|
|
|
|
|
|
PlannedBuilding &pb = planned_buildings.at(bld->id);
|
2023-02-16 22:42:58 -07:00
|
|
|
if ((int)pb.vector_ids.size() <= index)
|
2023-02-16 04:08:55 -07:00
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static string getDescString(color_ostream &out, df::building *bld, int index) {
|
|
|
|
DEBUG(status,out).print("entering getDescString\n");
|
|
|
|
if (!validate_pb(out, bld, index))
|
2023-02-15 20:10:42 -07:00
|
|
|
return 0;
|
|
|
|
|
2023-02-16 04:08:55 -07:00
|
|
|
PlannedBuilding &pb = planned_buildings.at(bld->id);
|
|
|
|
auto &jitem = bld->jobs[0]->job_items[index];
|
|
|
|
return get_desc_string(out, jitem, pb.vector_ids[index]);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int getQueuePosition(color_ostream &out, df::building *bld, int index) {
|
|
|
|
DEBUG(status,out).print("entering getQueuePosition\n");
|
|
|
|
if (!validate_pb(out, bld, index))
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
PlannedBuilding &pb = planned_buildings.at(bld->id);
|
|
|
|
auto &job_item = bld->jobs[0]->job_items[index];
|
|
|
|
|
|
|
|
if (job_item->quantity <= 0)
|
|
|
|
return 0;
|
2023-02-15 20:10:42 -07:00
|
|
|
|
|
|
|
int min_pos = -1;
|
|
|
|
for (auto &vec_id : pb.vector_ids[index]) {
|
|
|
|
if (!tasks.count(vec_id))
|
|
|
|
continue;
|
|
|
|
auto &buckets = tasks.at(vec_id);
|
|
|
|
string bucket_id = getBucket(*job_item);
|
|
|
|
if (!buckets.count(bucket_id))
|
|
|
|
continue;
|
|
|
|
int bucket_pos = -1;
|
|
|
|
for (auto &task : buckets.at(bucket_id)) {
|
|
|
|
++bucket_pos;
|
|
|
|
if (bld->id == task.first && index == task.second)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (bucket_pos++ >= 0)
|
|
|
|
min_pos = min_pos < 0 ? bucket_pos : std::min(min_pos, bucket_pos);
|
|
|
|
}
|
|
|
|
|
|
|
|
return min_pos < 0 ? 0 : min_pos;
|
|
|
|
}
|
|
|
|
|
2023-02-16 19:02:15 -07:00
|
|
|
static void makeTopPriority(color_ostream &out, df::building *bld) {
|
|
|
|
DEBUG(status,out).print("entering makeTopPriority\n");
|
|
|
|
if (!validate_pb(out, bld, 0))
|
|
|
|
return;
|
|
|
|
|
|
|
|
PlannedBuilding &pb = planned_buildings.at(bld->id);
|
|
|
|
auto &job_items = bld->jobs[0]->job_items;
|
|
|
|
|
2023-02-16 22:42:58 -07:00
|
|
|
for (int index = 0; index < (int)job_items.size(); ++index) {
|
2023-02-16 19:02:15 -07:00
|
|
|
for (auto &vec_id : pb.vector_ids[index]) {
|
|
|
|
if (!tasks.count(vec_id))
|
|
|
|
continue;
|
|
|
|
auto &buckets = tasks.at(vec_id);
|
|
|
|
string bucket_id = getBucket(*job_items[index]);
|
|
|
|
if (!buckets.count(bucket_id))
|
|
|
|
continue;
|
|
|
|
auto &bucket = buckets.at(bucket_id);
|
|
|
|
for (auto taskit = bucket.begin(); taskit != bucket.end(); ++taskit) {
|
|
|
|
if (bld->id == taskit->first && index == taskit->second) {
|
|
|
|
auto task_bld_id = taskit->first;
|
|
|
|
auto task_job_item_idx = taskit->second;
|
|
|
|
bucket.erase(taskit);
|
|
|
|
bucket.emplace_front(task_bld_id, task_job_item_idx);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-17 05:05:17 -07:00
|
|
|
DFHACK_PLUGIN_LUA_FUNCTIONS {
|
|
|
|
DFHACK_LUA_FUNCTION(printStatus),
|
|
|
|
DFHACK_LUA_FUNCTION(setSetting),
|
|
|
|
DFHACK_LUA_FUNCTION(isPlannableBuilding),
|
|
|
|
DFHACK_LUA_FUNCTION(isPlannedBuilding),
|
|
|
|
DFHACK_LUA_FUNCTION(addPlannedBuilding),
|
|
|
|
DFHACK_LUA_FUNCTION(doCycle),
|
|
|
|
DFHACK_LUA_FUNCTION(scheduleCycle),
|
2023-02-13 17:24:10 -07:00
|
|
|
DFHACK_LUA_FUNCTION(countAvailableItems),
|
2023-02-21 14:04:53 -07:00
|
|
|
DFHACK_LUA_FUNCTION(hasFilter),
|
2023-02-19 01:57:30 -07:00
|
|
|
DFHACK_LUA_FUNCTION(setMaterialFilter),
|
|
|
|
DFHACK_LUA_FUNCTION(setHeatSafetyFilter),
|
2023-02-16 04:08:55 -07:00
|
|
|
DFHACK_LUA_FUNCTION(getDescString),
|
2023-02-15 20:10:42 -07:00
|
|
|
DFHACK_LUA_FUNCTION(getQueuePosition),
|
2023-02-16 19:02:15 -07:00
|
|
|
DFHACK_LUA_FUNCTION(makeTopPriority),
|
2023-01-17 05:05:17 -07:00
|
|
|
DFHACK_LUA_END
|
|
|
|
};
|
2023-02-18 02:09:54 -07:00
|
|
|
|
|
|
|
DFHACK_PLUGIN_LUA_COMMANDS {
|
|
|
|
DFHACK_LUA_COMMAND(getAvailableItems),
|
2023-02-19 01:57:30 -07:00
|
|
|
DFHACK_LUA_COMMAND(getMaterialFilter),
|
|
|
|
DFHACK_LUA_COMMAND(getHeatSafetyFilter),
|
2023-02-18 02:09:54 -07:00
|
|
|
DFHACK_LUA_END
|
|
|
|
};
|