@ -12,12 +12,14 @@
# include "modules/Gui.h"
# include "modules/Gui.h"
# include "modules/Job.h"
# include "modules/Job.h"
# include "LuaTools.h"
# include "uicommon.h"
# include "uicommon.h"
# include "buildingplan-planner.h"
# include "buildingplan-planner.h"
# include "buildingplan-lib.h"
# include "buildingplan-lib.h"
static const std : : string planned_building_persistence_key_v1 = " buildingplan/constraints " ;
static const std : : string planned_building_persistence_key_v1 = " buildingplan/constraints " ;
static const std : : string planned_building_persistence_key_v2 = " buildingplan/constraints2 " ;
/*
/*
* ItemFilter
* ItemFilter
@ -37,24 +39,24 @@ void ItemFilter::clear()
materials . clear ( ) ;
materials . clear ( ) ;
}
}
bool ItemFilter : : deserialize ( PersistentDataItem & config )
bool ItemFilter : : deserialize ( std: : string ser )
{
{
clear ( ) ;
clear ( ) ;
std : : vector < std : : string > tokens ;
std : : vector < std : : string > tokens ;
split_string ( & tokens , config. val ( ) , " / " ) ;
split_string ( & tokens , ser , " / " ) ;
if ( tokens . size ( ) ! = 2 )
if ( tokens . size ( ) ! = 5 )
{
{
debug ( " invalid ItemFilter serialization: '%s' " , config. val ( ) . c_str ( ) ) ;
debug ( " invalid ItemFilter serialization: '%s' " , ser . c_str ( ) ) ;
return false ;
return false ;
}
}
if ( ! deserializeMaterialMask ( tokens [ 0 ] ) | | ! deserializeMaterials ( tokens [ 1 ] ) )
if ( ! deserializeMaterialMask ( tokens [ 0 ] ) | | ! deserializeMaterials ( tokens [ 1 ] ) )
return false ;
return false ;
setMinQuality ( config. ival ( 2 ) - 1 ) ;
setMinQuality ( atoi( tokens [ 2 ] . c_str ( ) ) ) ;
setMaxQuality ( config. ival ( 4 ) - 1 ) ;
setMaxQuality ( atoi( tokens [ 3 ] . c_str ( ) ) ) ;
decorated_only = config . ival ( 3 ) - 1 ;
decorated_only = static_cast < bool > ( atoi ( tokens [ 4 ] . c_str ( ) ) ) ;
return true ;
return true ;
}
}
@ -91,7 +93,8 @@ bool ItemFilter::deserializeMaterials(std::string ser)
return true ;
return true ;
}
}
void ItemFilter : : serialize ( PersistentDataItem & config ) const
// format: mat,mask,elements/materials,list/minq/maxq/decorated
std : : string ItemFilter : : serialize ( ) const
{
{
std : : ostringstream ser ;
std : : ostringstream ser ;
ser < < bitfield_to_string ( mat_mask , " , " ) < < " / " ;
ser < < bitfield_to_string ( mat_mask , " , " ) < < " / " ;
@ -101,10 +104,10 @@ void ItemFilter::serialize(PersistentDataItem &config) const
for ( size_t i = 1 ; i < materials . size ( ) ; + + i )
for ( size_t i = 1 ; i < materials . size ( ) ; + + i )
ser < < " , " < < materials [ i ] . getToken ( ) ;
ser < < " , " < < materials [ i ] . getToken ( ) ;
}
}
config. val ( ) = ser . str ( ) ;
ser < < " / " < < static_cast < int > ( min_quality ) ;
config. ival ( 2 ) = min_quality + 1 ;
ser < < " / " < < static_cast < int > ( max_quality ) ;
config. ival ( 4 ) = max_quality + 1 ;
ser < < " / " < < static_cast < int > ( decorated_only ) ;
config . ival ( 3 ) = static_cast < int > ( decorated_only ) + 1 ;
return ser . str ( ) ;
}
}
void ItemFilter : : clearMaterialMask ( )
void ItemFilter : : clearMaterialMask ( )
@ -230,20 +233,59 @@ bool ItemFilter::matches(df::item *item) const
* PlannedBuilding
* PlannedBuilding
*/
*/
static std : : vector < ItemFilter > deserializeFilters ( PersistentDataItem & config )
// format: itemfilterser|itemfilterser|...
static std : : string serializeFilters ( const std : : vector < ItemFilter > & filters )
{
std : : ostringstream ser ;
if ( ! filters . empty ( ) )
{
ser < < filters [ 0 ] . serialize ( ) ;
for ( size_t i = 1 ; i < filters . size ( ) ; + + i )
ser < < " | " < < filters [ i ] . serialize ( ) ;
}
return ser . str ( ) ;
}
static std : : vector < ItemFilter > deserializeFilters ( std : : string ser )
{
{
// simplified implementation while we can assume there is only one filter
std : : vector < std : : string > isers ;
split_string ( & isers , ser , " | " ) ;
std : : vector < ItemFilter > ret ;
std : : vector < ItemFilter > ret ;
ItemFilter itemFilter ;
for ( auto & iser : isers )
itemFilter . deserialize ( config ) ;
{
ret . push_back ( itemFilter ) ;
ItemFilter filter ;
if ( filter . deserialize ( iser ) )
ret . push_back ( filter ) ;
}
return ret ;
return ret ;
}
}
static size_t getNumFilters ( BuildingTypeKey key )
static size_t getNumFilters ( BuildingTypeKey key )
{
{
// TODO: get num filters in Lua when we handle all building types
auto L = Lua : : Core : : State ;
return 1 ;
color_ostream_proxy out ( Core : : getInstance ( ) . getConsole ( ) ) ;
Lua : : StackUnwinder top ( L ) ;
if ( ! lua_checkstack ( L , 4 ) | | ! Lua : : PushModulePublic (
out , L , " plugins.buildingplan " , " get_num_filters " ) )
{
debug ( " failed to push the lua method on the stack " ) ;
return 0 ;
}
Lua : : Push ( L , std : : get < 0 > ( key ) ) ;
Lua : : Push ( L , std : : get < 1 > ( key ) ) ;
Lua : : Push ( L , std : : get < 2 > ( key ) ) ;
if ( ! Lua : : SafeCall ( out , L , 3 , 1 ) )
{
debug ( " lua call failed " ) ;
return 0 ;
}
int num_filters = lua_tonumber ( L , - 1 ) ;
lua_pop ( L , 1 ) ;
return num_filters ;
}
}
PlannedBuilding : : PlannedBuilding ( df : : building * building , const std : : vector < ItemFilter > & filters )
PlannedBuilding : : PlannedBuilding ( df : : building * building , const std : : vector < ItemFilter > & filters )
@ -251,92 +293,27 @@ PlannedBuilding::PlannedBuilding(df::building *building, const std::vector<ItemF
building_id ( building - > id ) ,
building_id ( building - > id ) ,
filters ( filters )
filters ( filters )
{
{
config = DFHack : : World : : AddPersistentData ( planned_building_persistence_key_v1 ) ;
config = DFHack : : World : : AddPersistentData ( planned_building_persistence_key_v2 ) ;
config . ival ( 1 ) = building_id ;
config . ival ( 0 ) = building_id ;
// assume all filter vectors are length 1 for now
config . val ( ) = serializeFilters ( filters ) ;
filters [ 0 ] . serialize ( config ) ;
}
}
PlannedBuilding : : PlannedBuilding ( PersistentDataItem & config )
PlannedBuilding : : PlannedBuilding ( PersistentDataItem & config )
: config ( config ) ,
: config ( config ) ,
building ( df : : building : : find ( config . ival ( 1 ) ) ) ,
building ( df : : building : : find ( config . ival ( 0 ) ) ) ,
building_id ( config . ival ( 1 ) ) ,
building_id ( config . ival ( 0 ) ) ,
filters ( deserializeFilters ( config ) )
filters ( deserializeFilters ( config . val ( ) ) )
{ }
bool PlannedBuilding : : assignClosestItem ( std : : vector < df : : item * > * items_vector )
{
{
decltype ( items_vector - > begin ( ) ) closest_item ;
if ( building )
int32_t closest_distance = - 1 ;
for ( auto item_iter = items_vector - > begin ( ) ; item_iter ! = items_vector - > end ( ) ; item_iter + + )
{
{
auto item = * item_iter ;
if ( filters . size ( ) ! =
if ( ! filters [ 0 ] . matches ( item ) )
getNumFilters ( toBuildingTypeKey ( building ) ) )
continue ;
auto pos = item - > pos ;
auto distance = abs ( pos . x - building - > centerx ) +
abs ( pos . y - building - > centery ) +
abs ( pos . z - building - > z ) * 50 ;
if ( closest_distance > - 1 & & distance > = closest_distance )
continue ;
closest_distance = distance ;
closest_item = item_iter ;
}
if ( closest_distance > - 1 & & assignItem ( * closest_item ) )
{
debug ( " Item assigned " ) ;
items_vector - > erase ( closest_item ) ;
remove ( ) ;
return true ;
}
return false ;
}
void delete_item_fn ( df : : job_item * x ) { delete x ; }
bool PlannedBuilding : : assignItem ( df : : item * item )
{
auto ref = df : : allocate < df : : general_ref_building_holderst > ( ) ;
if ( ! ref )
{
{
Core : : printerr ( " Could not allocate general_ref_building_holderst \n " ) ;
debug ( " invalid ItemFilter vector serialization: '%s' " ,
return false ;
config . val ( ) . c_str ( ) ) ;
building = NULL ;
}
}
ref - > building_id = building - > id ;
if ( building - > jobs . size ( ) ! = 1 )
return false ;
auto job = building - > jobs [ 0 ] ;
for_each_ ( job - > job_items , delete_item_fn ) ;
job - > job_items . clear ( ) ;
job - > flags . bits . suspend = false ;
bool rough = false ;
Job : : attachJobItem ( job , item , df : : job_item_ref : : Hauled ) ;
if ( item - > getType ( ) = = item_type : : BOULDER )
rough = true ;
building - > mat_type = item - > getMaterial ( ) ;
building - > mat_index = item - > getMaterialIndex ( ) ;
job - > mat_type = building - > mat_type ;
job - > mat_index = building - > mat_index ;
if ( building - > needsDesign ( ) )
{
auto act = ( df : : building_actual * ) building ;
act - > design = new df : : building_design ( ) ;
act - > design - > flags . bits . rough = rough ;
}
}
return true ;
}
}
// Ensure the building still exists and is in a valid state. It can disappear
// Ensure the building still exists and is in a valid state. It can disappear
@ -361,6 +338,8 @@ df::building * PlannedBuilding::getBuilding()
const std : : vector < ItemFilter > & PlannedBuilding : : getFilters ( ) const
const std : : vector < ItemFilter > & PlannedBuilding : : getFilters ( ) const
{
{
// if we want to be able to dynamically change the filters, we'll need to
// re-bucket the tasks in Planner.
return filters ;
return filters ;
}
}
@ -412,45 +391,112 @@ std::size_t BuildingTypeKeyHash::operator() (const BuildingTypeKey & key) const
* Planner
* Planner
*/
*/
void Planner : : initialize ( )
// convert v1 persistent data into v2 format
{
// we can remove this conversion code once v2 has been live for a while
# define add_building_type(btype, itype) \
void migrateV1ToV2 ( )
item_for_building_type [ df : : building_type : : btype ] = df : : item_type : : itype ; \
{
available_item_vectors [ df : : item_type : : itype ] = std : : vector < df : : item * > ( ) ; \
std : : vector < PersistentDataItem > configs ;
is_relevant_item_type [ df : : item_type : : itype ] = true ; \
DFHack : : World : : GetPersistentData ( & configs , planned_building_persistence_key_v1 ) ;
if ( configs . empty ( ) )
FOR_ENUM_ITEMS ( item_type , it )
return ;
is_relevant_item_type [ it ] = false ;
debug ( " migrating %zu persisted configs to new format " , configs . size ( ) ) ;
add_building_type ( Armorstand , ARMORSTAND ) ;
for ( auto config : configs )
add_building_type ( Bed , BED ) ;
{
add_building_type ( Chair , CHAIR ) ;
df : : building * bld = df : : building : : find ( config . ival ( 1 ) ) ;
add_building_type ( Coffin , COFFIN ) ;
if ( ! bld )
add_building_type ( Door , DOOR ) ;
{
add_building_type ( Floodgate , FLOODGATE ) ;
debug ( " buliding no longer exists; removing config " ) ;
add_building_type ( Hatch , HATCH_COVER ) ;
DFHack : : World : : DeletePersistentData ( config ) ;
add_building_type ( GrateWall , GRATE ) ;
continue ;
add_building_type ( GrateFloor , GRATE ) ;
}
add_building_type ( BarsVertical , BAR ) ;
add_building_type ( BarsFloor , BAR ) ;
if ( bld - > getBuildStage ( ) ! = 0 | | bld - > jobs . size ( ) ! = 1
add_building_type ( Cabinet , CABINET ) ;
| | bld - > jobs [ 0 ] - > job_items . size ( ) ! = 1 )
add_building_type ( Box , BOX ) ;
{
// skip kennels, farm plot
debug ( " building in invalid state; removing config " ) ;
add_building_type ( Weaponrack , WEAPONRACK ) ;
DFHack : : World : : DeletePersistentData ( config ) ;
add_building_type ( Statue , STATUE ) ;
continue ;
add_building_type ( Slab , SLAB ) ;
}
add_building_type ( Table , TABLE ) ;
// skip roads ... furnaces
// fix up the building so we can set the material properties later
add_building_type ( WindowGlass , WINDOW ) ;
bld - > mat_type = - 1 ;
// skip gem window ... support
bld - > mat_index = - 1 ;
add_building_type ( AnimalTrap , ANIMALTRAP ) ;
add_building_type ( Chain , CHAIN ) ;
// the v1 filters are not initialized correctly and will match any item.
add_building_type ( Cage , CAGE ) ;
// we need to fix them up a bit.
// skip archery target
auto filter = bld - > jobs [ 0 ] - > job_items [ 0 ] ;
add_building_type ( TractionBench , TRACTION_BENCH ) ;
df : : item_type type ;
// skip nest box, hive (tools)
switch ( bld - > getType ( ) )
{
# undef add_building_type
case df : : building_type : : Armorstand : type = df : : item_type : : ARMORSTAND ; break ;
case df : : building_type : : Bed : type = df : : item_type : : BED ; break ;
case df : : building_type : : Chair : type = df : : item_type : : CHAIR ; break ;
case df : : building_type : : Coffin : type = df : : item_type : : COFFIN ; break ;
case df : : building_type : : Door : type = df : : item_type : : DOOR ; break ;
case df : : building_type : : Floodgate : type = df : : item_type : : FLOODGATE ; break ;
case df : : building_type : : Hatch : type = df : : item_type : : HATCH_COVER ; break ;
case df : : building_type : : GrateWall : type = df : : item_type : : GRATE ; break ;
case df : : building_type : : GrateFloor : type = df : : item_type : : GRATE ; break ;
case df : : building_type : : BarsVertical : type = df : : item_type : : BAR ; break ;
case df : : building_type : : BarsFloor : type = df : : item_type : : BAR ; break ;
case df : : building_type : : Cabinet : type = df : : item_type : : CABINET ; break ;
case df : : building_type : : Box : type = df : : item_type : : BOX ; break ;
case df : : building_type : : Weaponrack : type = df : : item_type : : WEAPONRACK ; break ;
case df : : building_type : : Statue : type = df : : item_type : : STATUE ; break ;
case df : : building_type : : Slab : type = df : : item_type : : SLAB ; break ;
case df : : building_type : : Table : type = df : : item_type : : TABLE ; break ;
case df : : building_type : : WindowGlass : type = df : : item_type : : WINDOW ; break ;
case df : : building_type : : AnimalTrap : type = df : : item_type : : ANIMALTRAP ; break ;
case df : : building_type : : Chain : type = df : : item_type : : CHAIN ; break ;
case df : : building_type : : Cage : type = df : : item_type : : CAGE ; break ;
case df : : building_type : : TractionBench : type = df : : item_type : : TRACTION_BENCH ; break ;
default :
debug ( " building has unhandled type; removing config " ) ;
DFHack : : World : : DeletePersistentData ( config ) ;
continue ;
}
filter - > item_type = type ;
filter - > item_subtype = - 1 ;
filter - > mat_type = - 1 ;
filter - > mat_index = - 1 ;
filter - > flags1 . whole = 0 ;
filter - > flags2 . whole = 0 ;
filter - > flags2 . bits . allow_artifact = true ;
filter - > flags3 . whole = 0 ;
filter - > flags4 = 0 ;
filter - > flags5 = 0 ;
filter - > metal_ore = - 1 ;
filter - > min_dimension = - 1 ;
filter - > has_tool_use = df : : tool_uses : : NONE ;
filter - > quantity = 1 ;
std : : vector < std : : string > tokens ;
split_string ( & tokens , config . val ( ) , " / " ) ;
if ( tokens . size ( ) ! = 2 )
{
debug ( " invalid v1 format; removing config " ) ;
DFHack : : World : : DeletePersistentData ( config ) ;
continue ;
}
ItemFilter item_filter ;
item_filter . deserializeMaterialMask ( tokens [ 0 ] ) ;
item_filter . deserializeMaterials ( tokens [ 1 ] ) ;
item_filter . setMinQuality ( config . ival ( 2 ) - 1 ) ;
item_filter . setMaxQuality ( config . ival ( 4 ) - 1 ) ;
if ( config . ival ( 3 ) - 1 )
item_filter . toggleDecoratedOnly ( ) ;
// create the v2 record
std : : vector < ItemFilter > item_filters ;
item_filters . push_back ( item_filter ) ;
PlannedBuilding pb ( bld , item_filters ) ;
// remove the v1 record
DFHack : : World : : DeletePersistentData ( config ) ;
debug ( " v1 record successfully migrated " ) ;
}
}
}
void Planner : : reset ( )
void Planner : : reset ( )
@ -458,9 +504,12 @@ void Planner::reset()
debug ( " resetting Planner state " ) ;
debug ( " resetting Planner state " ) ;
default_item_filters . clear ( ) ;
default_item_filters . clear ( ) ;
planned_buildings . clear ( ) ;
planned_buildings . clear ( ) ;
tasks . clear ( ) ;
migrateV1ToV2 ( ) ;
std : : vector < PersistentDataItem > items ;
std : : vector < PersistentDataItem > items ;
DFHack : : World : : GetPersistentData ( & items , planned_building_persistence_key_v1 ) ;
DFHack : : World : : GetPersistentData ( & items , planned_building_persistence_key_v 2 ) ;
debug ( " found data for %zu planned buildings " , items . size ( ) ) ;
debug ( " found data for %zu planned buildings " , items . size ( ) ) ;
for ( auto i = items . begin ( ) ; i ! = items . end ( ) ; i + + )
for ( auto i = items . begin ( ) ; i ! = items . end ( ) ; i + + )
@ -472,7 +521,8 @@ void Planner::reset()
continue ;
continue ;
}
}
planned_buildings . push_back ( pb ) ;
if ( registerTasks ( pb ) )
planned_buildings . insert ( std : : make_pair ( pb . getBuilding ( ) - > id , pb ) ) ;
}
}
}
}
@ -487,19 +537,19 @@ void Planner::addPlannedBuilding(df::building *bld)
}
}
// protect against multiple registrations
// protect against multiple registrations
if ( getPlannedBuilding( bld ) )
if ( planned_buildings. count ( bld - > id ) ! = 0 )
{
{
debug ( " building already registered" ) ;
debug ( " failed to add building: already registered" ) ;
return ;
return ;
}
}
PlannedBuilding pb ( bld , item_filters ) ;
PlannedBuilding pb ( bld , item_filters ) ;
if ( pb . isValid ( ) )
if ( pb . isValid ( ) & & registerTasks ( pb ) )
{
{
for ( auto job : bld - > jobs )
for ( auto job : bld - > jobs )
job - > flags . bits . suspend = true ;
job - > flags . bits . suspend = true ;
planned_buildings . push_back( pb ) ;
planned_buildings . insert( std : : make_pair ( bld - > id , pb ) ) ;
}
}
else
else
{
{
@ -507,19 +557,107 @@ void Planner::addPlannedBuilding(df::building *bld)
}
}
}
}
PlannedBuilding * Planner : : getPlannedBuilding ( df : : building * bld )
static std : : string getBucket ( const df : : job_item & ji ,
const std : : vector < ItemFilter > & item_filters )
{
std : : ostringstream ser ;
// pull out and serialize only known relevant fields. if we miss a few, then
// the filter bucket will be slighly less specific than it could be, but
// that's probably ok. we'll just end up bucketing slightly different items
// together. this is only a problem if the different filter at the front of
// the queue doesn't match any available items and blocks filters behind it
// that could be matched.
ser < < ji . item_type < < ' : ' < < ji . item_subtype < < ' : ' < < ji . mat_type < < ' : '
< < ji . mat_index < < ' : ' < < ji . flags1 . whole < < ' : ' < < ji . flags2 . whole
< < ' : ' < < ji . flags3 . whole < < ' : ' < < ji . flags4 < < ' : ' < < ji . flags5 < < ' : '
< < ji . metal_ore < < ' : ' < < ji . has_tool_use ;
for ( auto & item_filter : item_filters )
{
ser < < ' : ' < < item_filter . serialize ( ) ;
}
return ser . str ( ) ;
}
bool Planner : : registerTasks ( PlannedBuilding & pb )
{
{
for ( auto & pb : planned_buildings )
df : : building * bld = pb . getBuilding ( ) ;
if ( bld - > jobs . size ( ) ! = 1 )
{
debug ( " unexpected number of jobs: want 1, got %zu " , bld - > jobs . size ( ) ) ;
return false ;
}
auto job_items = bld - > jobs [ 0 ] - > job_items ;
int num_job_items = job_items . size ( ) ;
if ( num_job_items < 1 )
{
debug ( " unexpected number of job items: want >0, got %d " , num_job_items ) ;
return false ;
}
for ( int job_item_idx = 0 ; job_item_idx < num_job_items ; + + job_item_idx )
{
auto vector_id = df : : job_item_vector_id : : IN_PLAY ;
auto job_item = job_items [ job_item_idx ] ;
if ( job_item - > vector_id )
vector_id = job_item - > vector_id ;
auto bucket = getBucket ( * job_item , pb . getFilters ( ) ) ;
for ( int item_num = 0 ; item_num < job_item - > quantity ; + + item_num )
{
{
if ( pb . getBuilding ( ) = = bld )
int32_t id = bld - > id ;
return & pb ;
tasks [ vector_id ] [ bucket ] . push ( std : : make_pair ( id , job_item_idx ) ) ;
debug ( " added task: %s/%s/%d,%d; "
" %zu vectors, %zu buckets, %zu tasks in bucket " ,
ENUM_KEY_STR ( job_item_vector_id , vector_id ) . c_str ( ) ,
bucket . c_str ( ) , id , job_item_idx , tasks . size ( ) ,
tasks [ vector_id ] . size ( ) , tasks [ vector_id ] [ bucket ] . size ( ) ) ;
}
}
}
return true ;
}
PlannedBuilding * Planner : : getPlannedBuilding ( df : : building * bld )
{
if ( ! bld | | planned_buildings . count ( bld - > id ) = = 0 )
return NULL ;
return NULL ;
return & planned_buildings . at ( bld - > id ) ;
}
}
bool Planner : : isPlannableBuilding ( BuildingTypeKey key )
bool Planner : : isPlannableBuilding ( BuildingTypeKey key )
{
{
return item_for_building_type . count ( std : : get < 0 > ( key ) ) > 0 ;
if ( getNumFilters ( key ) = = 0 )
return false ;
// restrict supported types to be the same as the previous implementation
switch ( std : : get < 0 > ( key ) )
{
case df : : enums : : building_type : : Armorstand :
case df : : enums : : building_type : : Bed :
case df : : enums : : building_type : : Chair :
case df : : enums : : building_type : : Coffin :
case df : : enums : : building_type : : Door :
case df : : enums : : building_type : : Floodgate :
case df : : enums : : building_type : : Hatch :
case df : : enums : : building_type : : GrateWall :
case df : : enums : : building_type : : GrateFloor :
case df : : enums : : building_type : : BarsVertical :
case df : : enums : : building_type : : BarsFloor :
case df : : enums : : building_type : : Cabinet :
case df : : enums : : building_type : : Box :
case df : : enums : : building_type : : Weaponrack :
case df : : enums : : building_type : : Statue :
case df : : enums : : building_type : : Slab :
case df : : enums : : building_type : : Table :
case df : : enums : : building_type : : WindowGlass :
case df : : enums : : building_type : : AnimalTrap :
case df : : enums : : building_type : : Chain :
case df : : enums : : building_type : : Cage :
case df : : enums : : building_type : : TractionBench :
return true ;
default :
return false ;
}
}
}
Planner : : ItemFiltersWrapper Planner : : getItemFilters ( BuildingTypeKey key )
Planner : : ItemFiltersWrapper Planner : : getItemFilters ( BuildingTypeKey key )
@ -535,84 +673,250 @@ Planner::ItemFiltersWrapper Planner::getItemFilters(BuildingTypeKey key)
return ItemFiltersWrapper ( default_item_filters [ key ] ) ;
return ItemFiltersWrapper ( default_item_filters [ key ] ) ;
}
}
void Planner : : doCycle ( )
// precompute a bitmask with bad item flags
struct BadFlags
{
{
debug ( " Running Cycle " ) ;
uint32_t whole ;
if ( planned_buildings . size ( ) = = 0 )
return ;
debug ( " Planned count: %zu " , planned_buildings . size ( ) ) ;
gather_available_items ( ) ;
BadFlags ( )
for ( auto building_iter = planned_buildings . begin ( ) ; building_iter ! = planned_buildings . end ( ) ; )
{
if ( building_iter - > isValid ( ) )
{
{
auto type = building_iter - > getBuilding ( ) - > getType ( ) ;
df : : item_flags flags ;
debug ( " Trying to allocate %s " , enum_item_key_str ( type ) ) ;
# define F(x) flags.bits.x = true;
F ( dump ) ; F ( forbid ) ; F ( garbage_collect ) ;
F ( hostile ) ; F ( on_fire ) ; F ( rotten ) ; F ( trader ) ;
F ( in_building ) ; F ( construction ) ; F ( in_job ) ;
F ( owned ) ; F ( in_chest ) ; F ( removed ) ; F ( encased ) ;
# undef F
whole = flags . whole ;
}
} ;
static bool itemPassesScreen ( df : : item * item )
{
static BadFlags bad_flags ;
return ! ( item - > flags . whole & bad_flags . whole )
& & ! item - > isAssignedToStockpile ( )
// TODO: make this configurable
& & ! ( item - > getType ( ) = = df : : item_type : : BOX & & item - > isBag ( ) ) ;
}
static bool matchesFilters ( df : : item * item ,
df : : job_item * job_item ,
const ItemFilter & item_filter )
{
if ( job_item - > item_type > - 1 & & job_item - > item_type ! = item - > getType ( ) )
return false ;
if ( job_item - > item_subtype > - 1 & &
job_item - > item_subtype ! = item - > getSubtype ( ) )
return false ;
auto required_item_type = item_for_building_type [ type ] ;
if ( job_item - > has_tool_use > df : : tool_uses : : NONE
auto items_vector = & available_item_vectors [ required_item_type ] ;
& & ! item - > hasToolUse ( job_item - > has_tool_use ) )
if ( items_vector - > size ( ) = = 0 | | ! building_iter - > assignClosestItem ( items_vector ) )
return false ;
return DFHack : : Job : : isSuitableItem (
job_item , item - > getType ( ) , item - > getSubtype ( ) )
& & DFHack : : Job : : isSuitableMaterial (
job_item , item - > getMaterial ( ) , item - > getMaterialIndex ( ) )
& & item_filter . matches ( item ) ;
}
// note that this just removes the PlannedBuilding. the tasks will get dropped
// as we discover them in the tasks queues and they fail their isValid() check.
// this "lazy" task cleaning algorithm works because there is no way to
// re-register a building once it has been removed -- if it fails isValid()
// then it has either been built or desroyed. therefore there is no chance of
// duplicate tasks getting added to the tasks queues.
void Planner : : unregisterBuilding ( int32_t id )
{
if ( planned_buildings . count ( id ) > 0 )
{
{
debug ( " Unable to allocate an item " ) ;
planned_buildings . at ( id ) . remove ( ) ;
+ + building_iter ;
planned_buildings . erase ( id ) ;
continue ;
}
}
debug ( " Removing building plan " ) ;
building_iter - > remove ( ) ;
building_iter = planned_buildings . erase ( building_iter ) ;
}
}
}
}
void Planner : : gather_available_items ( )
static bool isJobReady ( df : : job * job )
{
{
debug ( " Gather available items " ) ;
int needed_items = 0 ;
for ( auto iter = available_item_vectors . begin ( ) ; iter ! = available_item_vectors . end ( ) ; iter + + )
for ( auto job_item : job - > job_items ) { needed_items + = job_item - > quantity ; }
if ( needed_items )
{
{
iter - > second . clear ( ) ;
debug ( " building needs %d more item(s) " , needed_items ) ;
return false ;
}
}
return true ;
}
// Precompute a bitmask with the bad flags
static bool job_item_idx_lt ( df : : job_item_ref * a , df : : job_item_ref * b )
df : : item_flags bad_flags ;
{
bad_flags . whole = 0 ;
// we want the items in the opposite order of the filters
return a - > job_item_idx > b - > job_item_idx ;
}
# define F(x) bad_flags.bits.x = true;
// this function does not remove the job_items since their quantity fields are
F ( dump ) ; F ( forbid ) ; F ( garbage_collect ) ;
// now all at 0, so there is no risk of having extra items attached. we don't
F ( hostile ) ; F ( on_fire ) ; F ( rotten ) ; F ( trader ) ;
// remove them to keep the "finalize with buildingplan active" path as similar
F ( in_building ) ; F ( construction ) ; F ( artifact ) ;
// as possible to the "finalize with buildingplan disabled" path.
# undef F
static void finalizeBuilding ( df : : building * bld )
{
debug ( " finalizing building %d " , bld - > id ) ;
auto job = bld - > jobs [ 0 ] ;
std : : vector < df : : item * > & items = df : : global : : world - > items . other [ df : : items_other_id : : IN_PLAY ] ;
// sort the items so they get added to the structure in the correct order
std : : sort ( job - > items . begin ( ) , job - > items . end ( ) , job_item_idx_lt ) ;
for ( size_t i = 0 ; i < items . size ( ) ; i + + )
// derive the material properties of the building and job from the first
// applicable item, though if any boulders are involved, it makes the whole
// structure "rough".
bool rough = false ;
for ( auto attached_item : job - > items )
{
df : : item * item = attached_item - > item ;
rough = rough | | item - > getType ( ) = = item_type : : BOULDER ;
if ( bld - > mat_type = = - 1 )
{
bld - > mat_type = item - > getMaterial ( ) ;
job - > mat_type = bld - > mat_type ;
}
if ( bld - > mat_index = = - 1 )
{
{
df : : item * item = items [ i ] ;
bld - > mat_index = item - > getMaterialIndex ( ) ;
job - > mat_index = bld - > mat_index ;
}
}
if ( item - > flags . whole & bad_flags . whole )
if ( bld - > needsDesign ( ) )
continue ;
{
auto act = ( df : : building_actual * ) bld ;
if ( ! act - > design )
act - > design = new df : : building_design ( ) ;
act - > design - > flags . bits . rough = rough ;
}
df : : item_type itype = item - > getType ( ) ;
// we're good to go!
if ( ! is_relevant_item_type [ itype ] )
job - > flags . bits . suspend = false ;
continue ;
Job : : checkBuildingsNow ( ) ;
}
if ( itype = = df : : item_type : : BOX & & item - > isBag ( ) )
void Planner : : popInvalidTasks ( std : : queue < std : : pair < int32_t , int > > & task_queue )
continue ; //Skip bags
{
while ( ! task_queue . empty ( ) )
{
auto & task = task_queue . front ( ) ;
auto id = task . first ;
if ( planned_buildings . count ( id ) > 0 )
{
PlannedBuilding & pb = planned_buildings . at ( id ) ;
if ( pb . isValid ( ) & &
pb . getBuilding ( ) - > jobs [ 0 ] - > job_items [ task . second ] - > quantity )
{
break ;
}
}
debug ( " discarding invalid task: bld=%d, job_item_idx=%d " ,
id , task . second ) ;
task_queue . pop ( ) ;
unregisterBuilding ( id ) ;
}
}
if ( item - > flags . bits . artifact )
void Planner : : doCycle ( )
{
debug ( " running cycle for %zu registered buildings " ,
planned_buildings . size ( ) ) ;
for ( auto it = tasks . begin ( ) ; it ! = tasks . end ( ) ; )
{
auto & buckets = it - > second ;
auto other_id = ENUM_ATTR ( job_item_vector_id , other , it - > first ) ;
auto item_vector = df : : global : : world - > items . other [ other_id ] ;
debug ( " matching %zu items in vector %s against %zu buckets " ,
item_vector . size ( ) ,
ENUM_KEY_STR ( job_item_vector_id , it - > first ) . c_str ( ) ,
buckets . size ( ) ) ;
for ( auto item_it = item_vector . rbegin ( ) ;
item_it ! = item_vector . rend ( ) ;
+ + item_it )
{
auto item = * item_it ;
if ( ! itemPassesScreen ( item ) )
continue ;
continue ;
for ( auto bucket_it = buckets . begin ( ) ; bucket_it ! = buckets . end ( ) ; )
if ( item - > flags . bits . in_job | |
item - > isAssignedToStockpile ( ) | |
item - > flags . bits . owned | |
item - > flags . bits . in_chest )
{
{
auto & task_queue = bucket_it - > second ;
popInvalidTasks ( task_queue ) ;
if ( task_queue . empty ( ) )
{
debug ( " removing empty bucket: %s/%s; %zu buckets left " ,
ENUM_KEY_STR ( job_item_vector_id , it - > first ) . c_str ( ) ,
bucket_it - > first . c_str ( ) ,
buckets . size ( ) - 1 ) ;
bucket_it = buckets . erase ( bucket_it ) ;
continue ;
continue ;
}
}
auto & task = task_queue . front ( ) ;
available_item_vectors [ itype ] . push_back ( item ) ;
auto id = task . first ;
auto & pb = planned_buildings . at ( id ) ;
auto building = pb . getBuilding ( ) ;
auto job = building - > jobs [ 0 ] ;
auto filter_idx = task . second ;
if ( matchesFilters ( item , job - > job_items [ filter_idx ] ,
pb . getFilters ( ) [ filter_idx ] )
& & DFHack : : Job : : attachJobItem ( job , item ,
df : : job_item_ref : : Hauled , filter_idx ) )
{
MaterialInfo material ;
material . decode ( item ) ;
ItemTypeInfo item_type ;
item_type . decode ( item ) ;
debug ( " attached %s %s to filter %d for %s(%d): %s/%s " ,
material . toString ( ) . c_str ( ) ,
item_type . toString ( ) . c_str ( ) ,
filter_idx ,
ENUM_KEY_STR ( building_type , building - > getType ( ) ) . c_str ( ) ,
id ,
ENUM_KEY_STR ( job_item_vector_id , it - > first ) . c_str ( ) ,
bucket_it - > first . c_str ( ) ) ;
// keep quantity aligned with the actual number of remaining
// items so if buildingplan is turned off, the building will
// be completed with the correct number of items.
- - job - > job_items [ filter_idx ] - > quantity ;
task_queue . pop ( ) ;
if ( isJobReady ( job ) )
{
finalizeBuilding ( building ) ;
unregisterBuilding ( id ) ;
}
if ( task_queue . empty ( ) )
{
debug (
" removing empty item bucket: %s/%s; %zu remaining " ,
ENUM_KEY_STR ( job_item_vector_id , it - > first ) . c_str ( ) ,
bucket_it - > first . c_str ( ) ,
buckets . size ( ) - 1 ) ;
buckets . erase ( bucket_it ) ;
}
// we found a home for this item; no need to look further
break ;
}
+ + bucket_it ;
}
if ( buckets . empty ( ) )
break ;
}
if ( buckets . empty ( ) )
{
debug ( " removing empty vector: %s; %zu vectors left " ,
ENUM_KEY_STR ( job_item_vector_id , it - > first ) . c_str ( ) ,
tasks . size ( ) - 1 ) ;
it = tasks . erase ( it ) ;
}
else
+ + it ;
}
}
debug ( " cycle done; %zu registered buildings left " ,
planned_buildings . size ( ) ) ;
}
}
Planner planner ;
Planner planner ;