Merge remote-tracking branch 'upstream/develop' into blueprint_in_blueprints

develop
Myk Taylor 2020-07-16 09:24:52 -07:00
commit bb91fdcf8c
17 changed files with 163 additions and 26 deletions

1
.gitignore vendored

@ -17,6 +17,7 @@ build/VC2010
# Sphinx generated documentation # Sphinx generated documentation
docs/_* docs/_*
docs/html/ docs/html/
docs/pdf/
# in-place build # in-place build
build/Makefile build/Makefile

@ -359,3 +359,5 @@ latex_documents = [
(master_doc, 'DFHack.tex', 'DFHack Documentation', (master_doc, 'DFHack.tex', 'DFHack Documentation',
'The DFHack Team', 'manual'), 'The DFHack Team', 'manual'),
] ]
latex_toplevel_sectioning = 'part'

@ -236,9 +236,9 @@ Running Sphinx manually
----------------------- -----------------------
You can also build the documentation without going through CMake, which may be You can also build the documentation without going through CMake, which may be
faster. There is a ``docs/build.sh`` script available for Linux and macOS that faster. There is a ``docs/build.sh`` script provided for Linux and macOS that
will run essentially the same command that CMake runs - see the script for will run essentially the same command that CMake runs - see the script for
options. additional options.
To build the documentation with default options, run the following command from To build the documentation with default options, run the following command from
the root DFHack folder:: the root DFHack folder::
@ -248,6 +248,19 @@ the root DFHack folder::
Sphinx has many options to enable clean builds, parallel builds, logging, and Sphinx has many options to enable clean builds, parallel builds, logging, and
more - run ``sphinx-build --help`` for details. more - run ``sphinx-build --help`` for details.
Building a PDF version
----------------------
ReadTheDocs automatically builds a PDF version of the documentation (available
under the "Downloads" section when clicking on the release selector). If you
want to build a PDF version locally, you will need ``pdflatex``, which is part
of a TeX distribution. The following command will then build a PDF, located in
``docs/pdf/latex/DFHack.pdf``, with default options::
sphinx-build -M latexpdf . ./docs/pdf
There is a ``docs/build-pdf.sh`` script provided for Linux and macOS that runs
this command for convenience - see the script for additional options.
.. _build-changelog: .. _build-changelog:

@ -158,7 +158,9 @@ that don't fit any of the other reference types. Such
references can only appear as a value of a pointer field, references can only appear as a value of a pointer field,
or as a result of calling the ``_field()`` method. or as a result of calling the ``_field()`` method.
They behave as structs with one field ``value`` of the right type. They behave as structs with a ``value`` field of the right type. If the
object's XML definition has a ``ref-target`` attribute, they will also have
a read-only ``ref_target`` field set to the corresponding type object.
To make working with numeric buffers easier, they also allow To make working with numeric buffers easier, they also allow
numeric indices. Note that other than excluding negative values numeric indices. Note that other than excluding negative values

@ -0,0 +1,23 @@
#!/bin/sh
# usage:
# ./build-pdf.sh
# SPHINX=/path/to/sphinx-build ./build-pdf.sh
# JOBS=3 ./build-pdf.sh ...
# all command-line arguments are passed directly to sphinx-build - run
# ``sphinx-build --help`` for a list, or see
# https://www.sphinx-doc.org/en/master/man/sphinx-build.html
cd $(dirname "$0")
cd ..
sphinx=sphinx-build
if [ -n "$SPHINX" ]; then
sphinx=$SPHINX
fi
if [ -z "$JOBS" ]; then
JOBS=2
fi
"$sphinx" -M latexpdf . ./docs/pdf -w ./docs/_sphinx-warnings.txt -j "$JOBS" "$@"

@ -1,9 +1,12 @@
#!/bin/sh #!/bin/sh
# usage: # usage:
# ./build.sh # ./build.sh
# ./build.sh sphinx-executable # SPHINX=/path/to/sphinx-build ./build.sh
# JOBS=3 ./build.sh ... # JOBS=3 ./build.sh ...
# all command-line arguments are passed directly to sphinx-build - run
# ``sphinx-build --help`` for a list, or see
# https://www.sphinx-doc.org/en/master/man/sphinx-build.html
cd $(dirname "$0") cd $(dirname "$0")
cd .. cd ..
@ -17,4 +20,4 @@ if [ -z "$JOBS" ]; then
JOBS=2 JOBS=2
fi fi
"$sphinx" -a -E -b html . ./docs/html -w ./docs/_sphinx-warnings.txt -j "$JOBS" "$@" "$sphinx" -a -b html . ./docs/html -w ./docs/_sphinx-warnings.txt -j "$JOBS" "$@"

@ -37,6 +37,7 @@ changelog.txt uses a syntax similar to RST, with a few special sequences:
- Fixed a segfault when attempting to start a headless session with a graphical PRINT_MODE setting - Fixed a segfault when attempting to start a headless session with a graphical PRINT_MODE setting
- Fixed an issue with the macOS launcher failing to un-quarantine some files - Fixed an issue with the macOS launcher failing to un-quarantine some files
- `labormanager`: fixed handling of new jobs in 0.47 - `labormanager`: fixed handling of new jobs in 0.47
- `embark-assistant`: fixed a couple of incursion handling bugs.
- Fixed ``Units::isEggLayer``, ``Units::isGrazer``, ``Units::isMilkable``, ``Units::isTrainableHunting``, ``Units::isTrainableWar``, and ``Units::isTamable`` ignoring the unit's caste - Fixed ``Units::isEggLayer``, ``Units::isGrazer``, ``Units::isMilkable``, ``Units::isTrainableHunting``, ``Units::isTrainableWar``, and ``Units::isTamable`` ignoring the unit's caste
- `RemoteFortressReader`: fixed a couple crashes that could result from decoding invalid enum items (``site_realization_building_type`` and ``improvement_type``) - `RemoteFortressReader`: fixed a couple crashes that could result from decoding invalid enum items (``site_realization_building_type`` and ``improvement_type``)
@ -45,6 +46,9 @@ changelog.txt uses a syntax similar to RST, with a few special sequences:
- `blueprint`: now automatically creates folder trees when organizing blueprints into subfolders (e.g. ``blueprint 30 30 1 rooms/dining dig`` will create the file ``blueprints/rooms/dining-dig.csv``); previously it would fail if the ``blueprints/rooms/`` directory didn't already exist - `blueprint`: now automatically creates folder trees when organizing blueprints into subfolders (e.g. ``blueprint 30 30 1 rooms/dining dig`` will create the file ``blueprints/rooms/dining-dig.csv``); previously it would fail if the ``blueprints/rooms/`` directory didn't already exist
- `confirm`: added a confirmation dialog for convicting dwarves of crimes - `confirm`: added a confirmation dialog for convicting dwarves of crimes
## Lua
- Added a ``ref_target`` field to primitive field references, corresponding to the ``ref-target`` XML attribute
## Ruby ## Ruby
- Updated ``item_find`` and ``building_find`` to use centralized logic that works on more screens - Updated ``item_find`` and ``building_find`` to use centralized logic that works on more screens

@ -56,3 +56,8 @@ div.sphinxsidebar h3.logo-name a {
div.body { div.body {
min-width: unset; min-width: unset;
} }
div.body li > p {
margin-top: 0;
margin-bottom: 0;
}

@ -536,6 +536,7 @@ static void field_reference(lua_State *state, const struct_field_info *field, vo
case struct_field_info::PRIMITIVE: case struct_field_info::PRIMITIVE:
case struct_field_info::SUBSTRUCT: case struct_field_info::SUBSTRUCT:
push_object_internal(state, field->type, ptr); push_object_internal(state, field->type, ptr);
get_object_ref_header(state, -1)->field_info = field;
return; return;
case struct_field_info::POINTER: case struct_field_info::POINTER:
@ -706,6 +707,17 @@ static type_identity *find_primitive_field(lua_State *state, int field, const ch
*/ */
static int meta_primitive_index(lua_State *state) static int meta_primitive_index(lua_State *state)
{ {
const char *attr = lua_tostring(state, -1);
if (strcmp(attr, "ref_target") == 0) {
const struct_field_info *field_info = get_object_ref_header(state, 1)->field_info;
if (field_info && field_info->extra && field_info->extra->ref_target) {
LookupInTable(state, field_info->extra->ref_target, &DFHACK_TYPEID_TABLE_TOKEN);
} else {
lua_pushnil(state);
}
return 1;
}
uint8_t *ptr = get_object_addr(state, 1, 2, "read"); uint8_t *ptr = get_object_addr(state, 1, 2, "read");
auto type = find_primitive_field(state, 2, "read", &ptr); auto type = find_primitive_field(state, 2, "read", &ptr);
if (!type) if (!type)
@ -1304,6 +1316,8 @@ static void MakePrimitiveMetatable(lua_State *state, type_identity *type)
{ {
EnableMetaField(state, base+2, "value", type); EnableMetaField(state, base+2, "value", type);
AssociateId(state, base+3, 1, "value"); AssociateId(state, base+3, 1, "value");
EnableMetaField(state, base+2, "ref_target", NULL);
} }
// Add the iteration metamethods // Add the iteration metamethods

@ -170,18 +170,24 @@ void LuaWrapper::push_object_ref(lua_State *state, void *ptr)
// stack: [metatable] // stack: [metatable]
auto ref = (DFRefHeader*)lua_newuserdata(state, sizeof(DFRefHeader)); auto ref = (DFRefHeader*)lua_newuserdata(state, sizeof(DFRefHeader));
ref->ptr = ptr; ref->ptr = ptr;
ref->field_info = NULL;
lua_swap(state); lua_swap(state);
lua_setmetatable(state, -2); lua_setmetatable(state, -2);
// stack: [userdata] // stack: [userdata]
} }
void *LuaWrapper::get_object_ref(lua_State *state, int val_index) DFRefHeader *LuaWrapper::get_object_ref_header(lua_State *state, int val_index)
{ {
assert(!lua_islightuserdata(state, val_index)); assert(!lua_islightuserdata(state, val_index));
auto ref = (DFRefHeader*)lua_touserdata(state, val_index); auto ref = (DFRefHeader*)lua_touserdata(state, val_index);
return ref->ptr; return ref;
}
void *LuaWrapper::get_object_ref(lua_State *state, int val_index)
{
return get_object_ref_header(state, val_index)->ptr;
} }
/** /**

@ -126,6 +126,7 @@ namespace LuaWrapper {
*/ */
struct DFRefHeader { struct DFRefHeader {
void *ptr; void *ptr;
const struct_field_info *field_info;
}; };
/** /**
@ -133,15 +134,7 @@ namespace LuaWrapper {
*/ */
void push_object_ref(lua_State *state, void *ptr); void push_object_ref(lua_State *state, void *ptr);
DFHACK_EXPORT void *get_object_ref(lua_State *state, int val_index); DFHACK_EXPORT void *get_object_ref(lua_State *state, int val_index);
DFHACK_EXPORT DFRefHeader *get_object_ref_header(lua_State *state, int val_index);
/*
* The system might be extended to carry some simple
* objects inline inside the reference buffer.
*/
inline bool is_self_contained(DFRefHeader *ptr) {
void **pp = &ptr->ptr;
return **(void****)pp == (pp + 1);
}
/** /**
* Report an error while accessing a field (index = field name). * Report an error while accessing a field (index = field name).

@ -1 +1 @@
Subproject commit b9028b0bb9ad40d3ad4dc3f10934bb61aa16629b Subproject commit 2c21bf2503751fae87cfd62e9608003b5f0dc1c5

@ -119,6 +119,8 @@ namespace embark_assist {
df::world_region_type region_type[16][16]; // Required for incursion override detection. We could store only the df::world_region_type region_type[16][16]; // Required for incursion override detection. We could store only the
// edges, but storing it for every tile allows for a unified fetching // edges, but storing it for every tile allows for a unified fetching
// logic. // logic.
int8_t north_row_biome_x[16]; // "biome_x" data cached for the northern row for access from the north.
int8_t west_column_biome_y[16]; // "biome_y" data cached for the western row for access from the west.
}; };
struct geo_datum { struct geo_datum {

@ -1470,6 +1470,8 @@ void embark_assist::survey::survey_mid_level_tile(embark_assist::defs::geo_data
tile->north_corner_selection[i] = world_data->region_details[0]->edges.biome_corner[i][0]; tile->north_corner_selection[i] = world_data->region_details[0]->edges.biome_corner[i][0];
tile->west_corner_selection[i] = world_data->region_details[0]->edges.biome_corner[0][i]; tile->west_corner_selection[i] = world_data->region_details[0]->edges.biome_corner[0][i];
tile->north_row_biome_x[i] = world_data->region_details[0]->edges.biome_x[i][0];
tile->west_column_biome_y[i] = world_data->region_details[0]->edges.biome_y[0][i];
} }
for (uint8_t i = 0; i < 16; i++) { for (uint8_t i = 0; i < 16; i++) {
@ -1601,7 +1603,7 @@ df::world_region_type embark_assist::survey::region_type_of(embark_assist::defs:
int16_t effective_y = y; int16_t effective_y = y;
int8_t effective_i = i; int8_t effective_i = i;
int8_t effective_k = k; int8_t effective_k = k;
adjust_coordinates(&effective_x, &effective_y, &effective_i, &effective_i); adjust_coordinates(&effective_x, &effective_y, &effective_i, &effective_k);
if (effective_x < 0 || if (effective_x < 0 ||
effective_x >= world_data->world_width || effective_x >= world_data->world_width ||
@ -1657,7 +1659,7 @@ uint8_t embark_assist::survey::translate_corner(embark_assist::defs::world_tile
effective_k = k + 1; effective_k = k + 1;
} }
adjust_coordinates(&effective_x, &effective_y, &effective_i, &effective_i); adjust_coordinates(&effective_x, &effective_y, &effective_i, &effective_k);
if (effective_x == world_data->world_width) { if (effective_x == world_data->world_width) {
if (effective_y == world_data->world_height) { // Only the SE corner of the SE most tile of the world can reference this. if (effective_y == world_data->world_height) { // Only the SE corner of the SE most tile of the world can reference this.
@ -1919,7 +1921,17 @@ uint8_t embark_assist::survey::translate_ns_edge(embark_assist::defs::world_tile
north_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k - 1); north_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k - 1);
} }
else { else {
effective_edge = world_data->region_details[0]->edges.biome_x[i][k + 1]; if (k < 15) { // We're still within the same world tile
effective_edge = world_data->region_details[0]->edges.biome_x[i][k + 1];
}
else { // Getting the data from the world tile to the south
if (y + 1 == world_data->world_height) {
return 4; // There's nothing to the south, so we fall back on our own tile.
}
effective_edge = survey_results->at(x).at(y + 1).north_row_biome_x[i];
}
north_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k); north_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k);
south_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k + 1); south_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k + 1);
} }
@ -1993,7 +2005,16 @@ uint8_t embark_assist::survey::translate_ew_edge(embark_assist::defs::world_tile
west_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i - 1, k); west_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i - 1, k);
} }
else { else {
effective_edge = world_data->region_details[0]->edges.biome_y[i + 1][k]; if (i < 15) { // We're still within the same world tile
effective_edge = world_data->region_details[0]->edges.biome_y[i + 1][k];
}
else { // Getting the data from the world tile to the east
if (x + 1 == world_data->world_width) {
return 4; // There's nothing to the east, so we fall back on our own tile.
}
effective_edge = survey_results->at(x + 1).at(y).west_column_biome_y[k];
}
west_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k); west_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i, k);
east_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i + 1, k); east_region_type = embark_assist::survey::region_type_of(survey_results, x, y, i + 1, k);
} }
@ -2424,7 +2445,7 @@ void embark_assist::survey::survey_embark(embark_assist::defs::mid_level_tiles *
} }
else { else {
process_embark_incursion_mid_level_tile process_embark_incursion_mid_level_tile
(translate_ns_edge(survey_results, (translate_ew_edge(survey_results,
true, true,
x, x,
y, y,
@ -2481,7 +2502,7 @@ void embark_assist::survey::survey_embark(embark_assist::defs::mid_level_tiles *
} }
else { else {
process_embark_incursion_mid_level_tile process_embark_incursion_mid_level_tile
(translate_ns_edge(survey_results, (translate_ew_edge(survey_results,
false, false,
x, x,
y, y,

@ -1 +1 @@
Subproject commit e8de92efb73d5ef4d0b52df000d60d3350f07a37 Subproject commit af749e7086739a058cd5095a6ee1a60d7e795a7c

@ -69,6 +69,19 @@ function expect.error(func, ...)
return true return true
end end
end end
function expect.error_match(func, matcher, ...)
local ok, err = pcall(func, ...)
if ok then
return false, 'no error raised by function call'
elseif type(matcher) == 'string' then
if not tostring(err):match(matcher) then
return false, ('error "%s" did not match "%s"'):format(err, matcher)
end
elseif not matcher(err) then
return false, ('error "%s" did not satisfy matcher'):format(err)
end
return true
end
function expect.pairs_contains(table, key, comment) function expect.pairs_contains(table, key, comment)
for k, v in pairs(table) do for k, v in pairs(table) do
if k == key then if k == key then

@ -0,0 +1,35 @@
function test.get()
dfhack.with_temp_object(df.unit:new(), function(unit)
expect.eq(unit:_field('hist_figure_id').ref_target, df.historical_figure)
end)
end
function test.get_nil()
dfhack.with_temp_object(df.coord:new(), function(coord)
expect.nil_(coord:_field('x').ref_target)
end)
end
function test.get_non_primitive()
dfhack.with_temp_object(df.unit:new(), function(unit)
expect.error_match(function()
return unit:_field('status').ref_target
end, 'not found')
end)
end
function test.set()
dfhack.with_temp_object(df.unit:new(), function(unit)
expect.error_match(function()
unit:_field('hist_figure_id').ref_target = df.coord
end, 'builtin property or method')
end)
end
function test.set_non_primitive()
dfhack.with_temp_object(df.unit:new(), function(unit)
expect.error_match(function()
unit:_field('status').ref_target = df.coord
end, 'not found')
end)
end