workflow: Account for job postings correctly and fix existing issues

Without removing postings correctly, it was possible to end up with
multiple workers assigned to a job that workflow had suspended
multiple times, which caused crashes if more than one worker was
assigned to the same job by DF.

This adds an additional command, fix-job-postings, that runs
automatically when loading a world and fixes:
- Multiple job postings that point to the same job
- Job postings that point to a job where posting_index == -1
  (i.e. jobs that should have no posting assigned)

Fixes #741
develop
lethosor 2015-11-25 20:33:13 -05:00
parent df4fb70be2
commit 2aeac718cc
5 changed files with 100 additions and 3 deletions

@ -1464,6 +1464,7 @@ static const LuaWrapper::FunctionReg dfhack_job_module[] = {
WRAPM(Job,isSuitableMaterial), WRAPM(Job,isSuitableMaterial),
WRAPM(Job,getName), WRAPM(Job,getName),
WRAPM(Job,linkIntoWorld), WRAPM(Job,linkIntoWorld),
WRAPM(Job,removePostings),
WRAPN(is_equal, jobEqual), WRAPN(is_equal, jobEqual),
WRAPN(is_item_equal, jobItemEqual), WRAPN(is_item_equal, jobItemEqual),
{ NULL, NULL } { NULL, NULL }

@ -72,6 +72,11 @@ namespace DFHack
DFHACK_EXPORT bool linkIntoWorld(df::job *job, bool new_id = true); DFHACK_EXPORT bool linkIntoWorld(df::job *job, bool new_id = true);
// Flag this job's posting as "dead" and set its posting_index to -1
// If remove_all is true, flag all postings pointing to this job
// Returns true if any postings were removed
DFHACK_EXPORT bool removePostings(df::job *job, bool remove_all = false);
// lists jobs with ids >= *id_var, and sets *id_var = *job_next_id; // lists jobs with ids >= *id_var, and sets *id_var = *job_next_id;
DFHACK_EXPORT bool listNewlyCreated(std::vector<df::job*> *pvec, int *id_var); DFHACK_EXPORT bool listNewlyCreated(std::vector<df::job*> *pvec, int *id_var);

@ -377,6 +377,34 @@ bool DFHack::Job::linkIntoWorld(df::job *job, bool new_id)
} }
} }
bool DFHack::Job::removePostings(df::job *job, bool remove_all)
{
using df::global::world;
CHECK_NULL_POINTER(job);
bool removed = false;
if (!remove_all)
{
if (job->posting_index >= 0 && job->posting_index < world->job_postings.size())
{
world->job_postings[job->posting_index]->flags.bits.dead = true;
removed = true;
}
}
else
{
for (auto it = world->job_postings.begin(); it != world->job_postings.end(); ++it)
{
if ((**it).job == job)
{
(**it).flags.bits.dead = true;
removed = true;
}
}
}
job->posting_index = -1;
return removed;
}
bool DFHack::Job::listNewlyCreated(std::vector<df::job*> *pvec, int *id_var) bool DFHack::Job::listNewlyCreated(std::vector<df::job*> *pvec, int *id_var)
{ {
using df::global::world; using df::global::world;

@ -1 +1 @@
Subproject commit 378a580f7e333607a64a301d598e3885954a5d9d Subproject commit bb950d99da543f314f3dd9babf1446c8331ff99d

@ -59,10 +59,13 @@ REQUIRE_GLOBAL(job_next_id);
/* Plugin registration */ /* Plugin registration */
static command_result workflow_cmd(color_ostream &out, vector <string> & parameters); static command_result workflow_cmd(color_ostream &out, vector <string> & parameters);
static command_result fix_job_postings_cmd(color_ostream &out, vector<string> &parameters);
static void init_state(color_ostream &out); static void init_state(color_ostream &out);
static void cleanup_state(color_ostream &out); static void cleanup_state(color_ostream &out);
static int fix_job_postings(color_ostream *out = NULL, bool dry_run = false);
DFhackCExport command_result plugin_init (color_ostream &out, std::vector <PluginCommand> &commands) DFhackCExport command_result plugin_init (color_ostream &out, std::vector <PluginCommand> &commands)
{ {
if (!world || !ui) if (!world || !ui)
@ -142,6 +145,13 @@ DFhackCExport command_result plugin_init (color_ostream &out, std::vector <Plugi
" Maintain 10-100 locally-made crafts of exceptional quality.\n" " Maintain 10-100 locally-made crafts of exceptional quality.\n"
) )
); );
commands.push_back(PluginCommand(
"fix-job-postings",
"Fix broken job postings caused by certain versions of workflow",
fix_job_postings_cmd, false,
"fix-job-postings: Fix job postings\n"
"fix-job-postings dry|[any argument]: Dry run only (avoid making changes)\n"
));
} }
init_state(out); init_state(out);
@ -162,6 +172,9 @@ DFhackCExport command_result plugin_onstatechange(color_ostream &out, state_chan
case SC_MAP_LOADED: case SC_MAP_LOADED:
cleanup_state(out); cleanup_state(out);
init_state(out); init_state(out);
out << "workflow: checking for existing job issues" << endl;
if (fix_job_postings(&out))
out << "workflow: fixed job issues" << endl;
break; break;
case SC_MAP_UNLOADED: case SC_MAP_UNLOADED:
cleanup_state(out); cleanup_state(out);
@ -173,6 +186,14 @@ DFhackCExport command_result plugin_onstatechange(color_ostream &out, state_chan
return CR_OK; return CR_OK;
} }
command_result fix_job_postings_cmd(color_ostream &out, vector<string> &parameters)
{
bool dry = parameters.size();
int fixed = fix_job_postings(&out, dry);
out << fixed << " job issue(s) " << (dry ? "detected but not fixed" : "fixed") << endl;
return CR_OK;
}
/****************************** /******************************
* JOB STATE TRACKING STRUCTS * * JOB STATE TRACKING STRUCTS *
******************************/ ******************************/
@ -274,7 +295,7 @@ public:
{ {
if (world->frame_counter >= resume_time && actual_job->flags.bits.suspend) if (world->frame_counter >= resume_time && actual_job->flags.bits.suspend)
{ {
actual_job->unk_v4020_1 = -1; Job::removePostings(actual_job, true);
actual_job->flags.bits.suspend = false; actual_job->flags.bits.suspend = false;
} }
} }
@ -287,7 +308,7 @@ public:
if (!actual_job->flags.bits.suspend) if (!actual_job->flags.bits.suspend)
{ {
actual_job->flags.bits.suspend = true; actual_job->flags.bits.suspend = true;
actual_job->unk_v4020_1 = -1; Job::removePostings(actual_job, true);
} }
} }
@ -406,6 +427,41 @@ public:
} }
}; };
static int fix_job_postings (color_ostream *out, bool dry_run)
{
int count = 0;
df::job_list_link *link = &world->job_list;
while (link)
{
df::job *job = link->item;
if (job)
{
bool needs_posting = (job->posting_index >= 0);
bool found_posting = false;
for (auto it = world->job_postings.begin(); it != world->job_postings.end(); ++it)
{
df::world::T_job_postings *posting = *it;
if (posting->job == job && !posting->flags.bits.dead)
{
if (!found_posting && needs_posting)
found_posting = true;
else
{
++count;
if (*out)
*out << "Found extra job posting: Job " << job->id << ": "
<< Job::getName(job) << endl;
if (!dry_run)
posting->flags.bits.dead = true;
}
}
}
}
link = link->next;
}
return count;
}
/****************************** /******************************
* GLOBAL VARIABLES * * GLOBAL VARIABLES *
******************************/ ******************************/
@ -1603,6 +1659,12 @@ static int getCountHistory(lua_State *L)
return 1; return 1;
} }
static int fixJobPostings(lua_State *L)
{
bool dry = lua_toboolean(L, 1);
lua_pushinteger(L, fix_job_postings(NULL, dry));
return 1;
}
DFHACK_PLUGIN_LUA_FUNCTIONS { DFHACK_PLUGIN_LUA_FUNCTIONS {
DFHACK_LUA_FUNCTION(deleteConstraint), DFHACK_LUA_FUNCTION(deleteConstraint),
@ -1614,6 +1676,7 @@ DFHACK_PLUGIN_LUA_COMMANDS {
DFHACK_LUA_COMMAND(findConstraint), DFHACK_LUA_COMMAND(findConstraint),
DFHACK_LUA_COMMAND(setConstraint), DFHACK_LUA_COMMAND(setConstraint),
DFHACK_LUA_COMMAND(getCountHistory), DFHACK_LUA_COMMAND(getCountHistory),
DFHACK_LUA_COMMAND(fixJobPostings),
DFHACK_LUA_END DFHACK_LUA_END
}; };