Merge pull request #3017 from myk002/myk_buildingplan_backwards

[buildingplan] account for reverse ordering in job_items vector
develop
Myk 2023-03-13 14:01:58 -07:00 committed by GitHub
commit 20960c5d5e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 54 additions and 29 deletions

@ -36,6 +36,7 @@ changelog.txt uses a syntax similar to RST, with a few special sequences:
## New Plugins ## New Plugins
## Fixes ## Fixes
-@ `buildingplan`: items are now attached correctly to screw pumps and other multi-item buildings
## Misc Improvements ## Misc Improvements
-@ `buildingplan`: can now filter by clay materials -@ `buildingplan`: can now filter by clay materials

@ -372,7 +372,7 @@ static command_result do_command(color_ostream &out, vector<string> &parameters)
// //
static string getBucket(const df::job_item & ji, const PlannedBuilding & pb, int idx) { static string getBucket(const df::job_item & ji, const PlannedBuilding & pb, int idx) {
if (idx < 0 || (size_t)idx < pb.item_filters.size()) if (idx < 0 || (size_t)idx >= pb.item_filters.size())
return "INVALID"; return "INVALID";
std::ostringstream ser; std::ostringstream ser;
@ -428,7 +428,7 @@ vector<df::job_item_vector_id> getVectorIds(color_ostream &out, const df::job_it
return ret; return ret;
} }
// if the filer is for building material, refer to our global settings for // if the filter is for building material, refer to our global settings for
// which vectors to search // which vectors to search
if (job_item->flags2.bits.building_material) if (job_item->flags2.bits.building_material)
{ {
@ -465,10 +465,11 @@ static bool registerPlannedBuilding(color_ostream &out, PlannedBuilding & pb) {
return true; return true;
} }
int num_job_items = job_items.size(); int num_job_items = (int)job_items.size();
int32_t id = bld->id; int32_t id = bld->id;
for (int job_item_idx = 0; job_item_idx < num_job_items; ++job_item_idx) { for (int job_item_idx = 0; job_item_idx < num_job_items; ++job_item_idx) {
auto job_item = job_items[job_item_idx]; int rev_jitem_index = num_job_items - (job_item_idx+1);
auto job_item = job_items[rev_jitem_index];
auto bucket = getBucket(*job_item, pb, job_item_idx); auto bucket = getBucket(*job_item, pb, job_item_idx);
// if there are multiple vector_ids, schedule duplicate tasks. after // if there are multiple vector_ids, schedule duplicate tasks. after
@ -476,11 +477,11 @@ static bool registerPlannedBuilding(color_ostream &out, PlannedBuilding & pb) {
// as invalid // as invalid
for (auto vector_id : pb.vector_ids[job_item_idx]) { for (auto vector_id : pb.vector_ids[job_item_idx]) {
for (int item_num = 0; item_num < job_item->quantity; ++item_num) { for (int item_num = 0; item_num < job_item->quantity; ++item_num) {
tasks[vector_id][bucket].emplace_back(id, job_item_idx); tasks[vector_id][bucket].emplace_back(id, rev_jitem_index);
DEBUG(status,out).print("added task: %s/%s/%d,%d; " DEBUG(status,out).print("added task: %s/%s/%d,%d; "
"%zu vector(s), %zu filter bucket(s), %zu task(s) in bucket", "%zu vector(s), %zu filter bucket(s), %zu task(s) in bucket",
ENUM_KEY_STR(job_item_vector_id, vector_id).c_str(), ENUM_KEY_STR(job_item_vector_id, vector_id).c_str(),
bucket.c_str(), id, job_item_idx, tasks.size(), bucket.c_str(), id, rev_jitem_index, tasks.size(),
tasks[vector_id].size(), tasks[vector_id][bucket].size()); tasks[vector_id].size(), tasks[vector_id][bucket].size());
} }
} }
@ -531,17 +532,19 @@ static void printStatus(color_ostream &out) {
if (!bld || bld->jobs.size() != 1) if (!bld || bld->jobs.size() != 1)
continue; continue;
auto &job_items = bld->jobs[0]->job_items; auto &job_items = bld->jobs[0]->job_items;
if (job_items.size() != pb.vector_ids.size()) const size_t num_job_items = job_items.size();
if (num_job_items != pb.vector_ids.size())
continue; continue;
++bld_count; ++bld_count;
int job_item_idx = 0; int job_item_idx = 0;
for (auto &vec_ids : pb.vector_ids) { for (auto &vec_ids : pb.vector_ids) {
auto &jitem = job_items[job_item_idx++]; auto &jitem = job_items[num_job_items - (job_item_idx+1)];
int32_t quantity = jitem->quantity; int32_t quantity = jitem->quantity;
if (quantity) { if (quantity) {
counts[get_desc_string(out, jitem, vec_ids)] += quantity; counts[get_desc_string(out, jitem, vec_ids)] += quantity;
total += quantity; total += quantity;
} }
++job_item_idx;
} }
} }
@ -692,11 +695,9 @@ static bool hasFilter(color_ostream &out, df::building_type type, int16_t subtyp
TRACE(status,out).print("entering hasFilter\n"); TRACE(status,out).print("entering hasFilter\n");
BuildingTypeKey key(type, subtype, custom); BuildingTypeKey key(type, subtype, custom);
auto &filters = get_item_filters(out, key); auto &filters = get_item_filters(out, key);
for (auto &filter : filters.getItemFilters()) { if (index < 0 || filters.getItemFilters().size() <= (size_t)index)
if (!filter.isEmpty()) return false;
return true; return !filters.getItemFilters()[index].isEmpty();
}
return false;
} }
static void clearFilter(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int index) { static void clearFilter(color_ostream &out, df::building_type type, int16_t subtype, int32_t custom, int index) {
@ -972,10 +973,13 @@ static bool validate_pb(color_ostream &out, df::building *bld, int index) {
static string getDescString(color_ostream &out, df::building *bld, int index) { static string getDescString(color_ostream &out, df::building *bld, int index) {
DEBUG(status,out).print("entering getDescString\n"); DEBUG(status,out).print("entering getDescString\n");
if (!validate_pb(out, bld, index)) if (!validate_pb(out, bld, index))
return 0; return "INVALID";
PlannedBuilding &pb = planned_buildings.at(bld->id); PlannedBuilding &pb = planned_buildings.at(bld->id);
auto &jitem = bld->jobs[0]->job_items[index]; auto & jitems = bld->jobs[0]->job_items;
const size_t num_job_items = jitems.size();
int rev_index = num_job_items - (index + 1);
auto &jitem = jitems[rev_index];
return int_to_string(jitem->quantity) + " " + get_desc_string(out, jitem, pb.vector_ids[index]); return int_to_string(jitem->quantity) + " " + get_desc_string(out, jitem, pb.vector_ids[index]);
} }
@ -985,7 +989,10 @@ static int getQueuePosition(color_ostream &out, df::building *bld, int index) {
return 0; return 0;
PlannedBuilding &pb = planned_buildings.at(bld->id); PlannedBuilding &pb = planned_buildings.at(bld->id);
auto &job_item = bld->jobs[0]->job_items[index]; auto & jitems = bld->jobs[0]->job_items;
const size_t num_job_items = jitems.size();
int rev_index = num_job_items - (index + 1);
auto &job_item = jitems[rev_index];
if (job_item->quantity <= 0) if (job_item->quantity <= 0)
return 0; return 0;
@ -1001,7 +1008,7 @@ static int getQueuePosition(color_ostream &out, df::building *bld, int index) {
int bucket_pos = -1; int bucket_pos = -1;
for (auto &task : buckets.at(bucket_id)) { for (auto &task : buckets.at(bucket_id)) {
++bucket_pos; ++bucket_pos;
if (bld->id == task.first && index == task.second) if (bld->id == task.first && rev_index == task.second)
break; break;
} }
if (bucket_pos++ >= 0) if (bucket_pos++ >= 0)
@ -1018,18 +1025,20 @@ static void makeTopPriority(color_ostream &out, df::building *bld) {
PlannedBuilding &pb = planned_buildings.at(bld->id); PlannedBuilding &pb = planned_buildings.at(bld->id);
auto &job_items = bld->jobs[0]->job_items; auto &job_items = bld->jobs[0]->job_items;
const int num_job_items = (int)job_items.size();
for (int index = 0; index < (int)job_items.size(); ++index) { for (int index = 0; index < num_job_items; ++index) {
int rev_index = num_job_items - (index + 1);
for (auto &vec_id : pb.vector_ids[index]) { for (auto &vec_id : pb.vector_ids[index]) {
if (!tasks.count(vec_id)) if (!tasks.count(vec_id))
continue; continue;
auto &buckets = tasks.at(vec_id); auto &buckets = tasks.at(vec_id);
string bucket_id = getBucket(*job_items[index], pb, index); string bucket_id = getBucket(*job_items[rev_index], pb, index);
if (!buckets.count(bucket_id)) if (!buckets.count(bucket_id))
continue; continue;
auto &bucket = buckets.at(bucket_id); auto &bucket = buckets.at(bucket_id);
for (auto taskit = bucket.begin(); taskit != bucket.end(); ++taskit) { for (auto taskit = bucket.begin(); taskit != bucket.end(); ++taskit) {
if (bld->id == taskit->first && index == taskit->second) { if (bld->id == taskit->first && rev_index == taskit->second) {
auto task_bld_id = taskit->first; auto task_bld_id = taskit->first;
auto task_job_item_idx = taskit->second; auto task_job_item_idx = taskit->second;
bucket.erase(taskit); bucket.erase(taskit);

@ -190,6 +190,8 @@ static void doVector(color_ostream &out, df::job_item_vector_id vector_id,
if (!itemPassesScreen(item)) if (!itemPassesScreen(item))
continue; continue;
for (auto bucket_it = buckets.begin(); bucket_it != buckets.end(); ) { for (auto bucket_it = buckets.begin(); bucket_it != buckets.end(); ) {
TRACE(cycle,out).print("scanning bucket: %s/%s\n",
ENUM_KEY_STR(job_item_vector_id, vector_id).c_str(), bucket_it->first.c_str());
auto & task_queue = bucket_it->second; auto & task_queue = bucket_it->second;
auto bld = popInvalidTasks(out, task_queue, planned_buildings); auto bld = popInvalidTasks(out, task_queue, planned_buildings);
if (!bld) { if (!bld) {
@ -203,11 +205,14 @@ static void doVector(color_ostream &out, df::job_item_vector_id vector_id,
auto & task = task_queue.front(); auto & task = task_queue.front();
auto id = task.first; auto id = task.first;
auto job = bld->jobs[0]; auto job = bld->jobs[0];
auto & jitems = job->job_items;
const size_t num_filters = jitems.size();
auto filter_idx = task.second; auto filter_idx = task.second;
const int rev_filter_idx = num_filters - (filter_idx+1);
auto &pb = planned_buildings.at(id); auto &pb = planned_buildings.at(id);
if (isAccessibleFrom(out, item, job) if (isAccessibleFrom(out, item, job)
&& matchesFilters(item, job->job_items[filter_idx], pb.heat_safety, && matchesFilters(item, jitems[filter_idx], pb.heat_safety,
pb.item_filters[filter_idx]) pb.item_filters[rev_filter_idx])
&& Job::attachJobItem(job, item, && Job::attachJobItem(job, item,
df::job_item_ref::Hauled, filter_idx)) df::job_item_ref::Hauled, filter_idx))
{ {
@ -226,9 +231,9 @@ static void doVector(color_ostream &out, df::job_item_vector_id vector_id,
// keep quantity aligned with the actual number of remaining // keep quantity aligned with the actual number of remaining
// items so if buildingplan is turned off, the building will // items so if buildingplan is turned off, the building will
// be completed with the correct number of items. // be completed with the correct number of items.
--job->job_items[filter_idx]->quantity; --jitems[filter_idx]->quantity;
task_queue.pop_front(); task_queue.pop_front();
if (isJobReady(out, job->job_items)) { if (isJobReady(out, jitems)) {
finalizeBuilding(out, bld); finalizeBuilding(out, bld);
planned_buildings.at(id).remove(out); planned_buildings.at(id).remove(out);
} }

@ -6,6 +6,7 @@
namespace DFHack { namespace DFHack {
DBG_EXTERN(buildingplan, status); DBG_EXTERN(buildingplan, status);
DBG_EXTERN(buildingplan, cycle);
} }
using std::set; using std::set;
@ -153,11 +154,19 @@ bool ItemFilter::matches(DFHack::MaterialInfo &material) const {
} }
bool ItemFilter::matches(df::item *item) const { bool ItemFilter::matches(df::item *item) const {
if (item->getQuality() < min_quality || item->getQuality() > max_quality) if (item->getQuality() < min_quality || item->getQuality() > max_quality) {
TRACE(cycle).print("item outside of quality range (%d not between %d and %d)\n",
item->getQuality(), min_quality, max_quality);
return false; return false;
}
if (decorated_only && !item->hasImprovements()) if (decorated_only && !item->hasImprovements()) {
TRACE(cycle).print("item needs improvements and doesn't have any\n");
return false; return false;
}
if (!mat_mask.whole)
return true;
auto imattype = item->getActualMaterial(); auto imattype = item->getActualMaterial();
auto imatindex = item->getActualMaterialIndex(); auto imatindex = item->getActualMaterialIndex();

@ -26,9 +26,10 @@ static vector<vector<df::job_item_vector_id>> get_vector_ids(color_ostream &out,
if (!bld || bld->jobs.size() != 1) if (!bld || bld->jobs.size() != 1)
return ret; return ret;
auto &job = bld->jobs[0]; auto &jitems = bld->jobs[0]->job_items;
for (auto &jitem : job->job_items) { int num_job_items = (int)jitems.size();
ret.emplace_back(getVectorIds(out, jitem)); for (int jitem_idx = num_job_items - 1; jitem_idx >= 0; --jitem_idx) {
ret.emplace_back(getVectorIds(out, jitems[jitem_idx]));
} }
return ret; return ret;
} }