Merge pull request #3622 from myk002/myk_test_test

first attempt to run tests on windows
develop
Myk 2023-08-03 23:05:46 -07:00 committed by GitHub
commit 3599403f21
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 271 additions and 168 deletions

@ -35,6 +35,9 @@ on:
extras: extras:
type: boolean type: boolean
default: false default: false
tests:
type: boolean
default: false
gcc-ver: gcc-ver:
type: string type: string
default: "10" default: "10"
@ -112,6 +115,7 @@ jobs:
-DBUILD_SIZECHECK:BOOL=${{ inputs.extras }} \ -DBUILD_SIZECHECK:BOOL=${{ inputs.extras }} \
-DBUILD_SKELETON:BOOL=${{ inputs.extras }} \ -DBUILD_SKELETON:BOOL=${{ inputs.extras }} \
-DBUILD_DOCS:BOOL=${{ inputs.docs }} \ -DBUILD_DOCS:BOOL=${{ inputs.docs }} \
-DBUILD_TESTS:BOOL=${{ inputs.tests }} \
-DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} \ -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} \
-DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }}
- name: Build DFHack - name: Build DFHack

@ -5,6 +5,8 @@ on:
inputs: inputs:
dfhack_ref: dfhack_ref:
type: string type: string
scripts_ref:
type: string
structures_ref: structures_ref:
type: string type: string
artifact-name: artifact-name:
@ -21,6 +23,15 @@ on:
common-files: common-files:
type: boolean type: boolean
default: true default: true
docs:
type: boolean
default: false
stonesense:
type: boolean
default: false
tests:
type: boolean
default: false
launchdf: launchdf:
type: boolean type: boolean
default: false default: false
@ -40,6 +51,13 @@ jobs:
ref: ${{ inputs.dfhack_ref }} ref: ${{ inputs.dfhack_ref }}
submodules: true submodules: true
fetch-depth: 0 fetch-depth: 0
- name: Clone scripts
if: inputs.scripts_ref
uses: actions/checkout@v3
with:
repository: 'DFHack/scripts'
ref: ${{ inputs.scripts_ref }}
path: scripts
- name: Clone structures - name: Clone structures
if: inputs.structures_ref if: inputs.structures_ref
uses: actions/checkout@v3 uses: actions/checkout@v3
@ -65,14 +83,14 @@ jobs:
win-msvc win-msvc
- name: Cross-compile - name: Cross-compile
env: env:
CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=1 -DBUILD_DOCS:BOOL=${{ inputs.common-files }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }}' CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=${{ inputs.stonesense }} -DBUILD_DOCS:BOOL=${{ inputs.docs }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }} -DBUILD_TESTS:BOOL=${{ inputs.tests }}'
run: | run: |
cd build cd build
bash -x build-win64-from-linux.sh bash -x build-win64-from-linux.sh
- name: Trim cache - name: Trim cache
run: | run: |
cd build cd build
ccache -d win64-cross/ccache --max-size 200M ccache -d win64-cross/ccache --max-size 150M
ccache -d win64-cross/ccache --cleanup ccache -d win64-cross/ccache --cleanup
ccache -d win64-cross/ccache --show-stats --verbose ccache -d win64-cross/ccache --show-stats --verbose
- name: Save ccache - name: Save ccache

@ -25,6 +25,8 @@ jobs:
artifact-name: dfhack-win64-build artifact-name: dfhack-win64-build
append-date-and-hash: true append-date-and-hash: true
cache-id: release cache-id: release
stonesense: true
docs: true
secrets: inherit secrets: inherit
docs: docs:

@ -20,6 +20,8 @@ jobs:
dfhack_ref: ${{ github.event.inputs && github.event.inputs.ref || github.event.ref }} dfhack_ref: ${{ github.event.inputs && github.event.inputs.ref || github.event.ref }}
cache-id: release cache-id: release
cache-readonly: true cache-readonly: true
stonesense: true
docs: true
launchdf: true launchdf: true
secrets: inherit secrets: inherit

@ -43,6 +43,7 @@ jobs:
cache-id: release cache-id: release
cache-readonly: true cache-readonly: true
common-files: false common-files: false
stonesense: true
launchdf: true launchdf: true
secrets: inherit secrets: inherit

@ -11,6 +11,17 @@ on:
type: string type: string
jobs: jobs:
build-windows:
name: Windows MSVC
uses: ./.github/workflows/build-windows.yml
with:
dfhack_ref: ${{ inputs.dfhack_ref }}
scripts_ref: ${{ inputs.scripts_ref }}
structures_ref: ${{ inputs.structures_ref }}
artifact-name: test-msvc
cache-id: test
tests: true
build-linux: build-linux:
name: Linux gcc-${{ matrix.gcc }} name: Linux gcc-${{ matrix.gcc }}
uses: ./.github/workflows/build-linux.yml uses: ./.github/workflows/build-linux.yml
@ -22,6 +33,7 @@ jobs:
cache-id: test cache-id: test
stonesense: ${{ matrix.plugins == 'all' }} stonesense: ${{ matrix.plugins == 'all' }}
extras: ${{ matrix.plugins == 'all' }} extras: ${{ matrix.plugins == 'all' }}
tests: true
gcc-ver: ${{ matrix.gcc }} gcc-ver: ${{ matrix.gcc }}
secrets: inherit secrets: inherit
strategy: strategy:
@ -33,51 +45,78 @@ jobs:
- gcc: 12 - gcc: 12
plugins: "all" plugins: "all"
test-linux: test-windows:
name: Test (Linux, GCC ${{ matrix.gcc }}, ${{ matrix.plugins }} plugins) name: Test (${{ matrix.os }}, ${{ matrix.compiler }}, ${{ matrix.plugins }} plugins)
needs: build-linux needs:
runs-on: ubuntu-latest - build-windows
- build-linux
runs-on: ${{ matrix.os }}-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- gcc: 10 - os: windows
compiler: msvc
plugins: "default" plugins: "default"
- gcc: 12 # TODO: uncomment once we have a linux build we can download from bay12
plugins: "all" # - os: ubuntu
# compiler: gcc-10
# gcc: 10
# plugins: "default"
# - os: ubuntu
# compiler: gcc-10
# gcc: 12
# plugins: "all"
steps: steps:
- name: Download artifact - name: Set env (windows)
if: matrix.os == 'windows'
run: echo "DF_FOLDER=DF" >> $env:GITHUB_ENV
- name: Set env (posix)
if: matrix.os != 'windows'
run: echo "DF_FOLDER=DF" >> $GITHUB_ENV
- name: Clone DFHack
uses: actions/checkout@v3
with:
repository: 'DFHack/dfhack'
ref: ${{ inputs.dfhack_ref }}
- name: Detect DF version (windows)
if: matrix.os == 'windows'
run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $env:GITHUB_ENV
- name: Detect DF version (posix)
if: matrix.os != 'windows'
run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $GITHUB_ENV
- name: Fetch DF cache
id: restore-df
uses: actions/cache/restore@v3
with:
path: ${{ env.DF_FOLDER }}
key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }}
- name: Download DF
if: steps.restore-df.outputs.cache-hit != 'true'
run: sh ci/download-df.sh ${{ env.DF_FOLDER }} ${{ matrix.os }} ${{ env.DF_VERSION }}
- name: Save DF cache
if: steps.restore-df.outputs.cache-hit != 'true'
uses: actions/cache/save@v3
with:
path: ${{ env.DF_FOLDER }}
key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }}
- name: Download DFHack
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3
with: with:
name: test-gcc-${{ matrix.gcc }} name: test-${{ matrix.compiler }}
# - name: Fetch DF cache path: ${{ env.DF_FOLDER }}
# uses: actions/cache@v3 - name: Run lua tests
# with: timeout-minutes: 10
# path: ~/DF run: python ci/run-tests.py --keep-status "${{ env.DF_FOLDER }}"
# key: df-${{ hashFiles('ci/download-df.sh') }} - name: Check RPC interface
# - name: Download DF run: python ci/check-rpc.py "${{ env.DF_FOLDER }}/dfhack-rpc.txt"
# run: | - name: Upload test artifacts
# sh ci/download-df.sh uses: actions/upload-artifact@v3
# - name: Run lua tests if: always()
# id: run_tests_lua continue-on-error: true
# run: | with:
# export TERM=dumb name: test-artifacts-msvc
# status=0 path: |
# script -qe -c "python ci/run-tests.py --headless --keep-status \"$DF_FOLDER\"" || status=$((status + 1)) ${{ env.DF_FOLDER }}/dfhack-rpc.txt
# python ci/check-rpc.py "$DF_FOLDER/dfhack-rpc.txt" || status=$((status + 2)) ${{ env.DF_FOLDER }}/test*.json
# mkdir -p artifacts ${{ env.DF_FOLDER }}/*.log
# cp "$DF_FOLDER"/test*.json "$DF_FOLDER"/*.log artifacts || status=$((status + 4))
# exit $status
# - name: Upload test artifacts
# uses: actions/upload-artifact@v3
# if: (success() || failure()) && steps.run_tests.outcome != 'skipped'
# continue-on-error: true
# with:
# name: test-artifacts-${{ matrix.gcc }}
# path: artifacts
# - name: Clean up DF folder
# # prevent DFHack-generated files from ending up in the cache
# # (download-df.sh also removes them, this is just to save cache space)
# if: success() || failure()
# run: |
# rm -rf "$DF_FOLDER"

@ -1,8 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import glob import glob
import itertools
import sys import sys
actual = {'': {}} actual = {'': {}}
SEP = ('=' * 80)
with open(sys.argv[1]) as f: with open(sys.argv[1]) as f:
plugin_name = '' plugin_name = ''
@ -26,7 +28,7 @@ for p in glob.iglob('library/proto/*.proto'):
parts = line.split(' ') parts = line.split(' ')
expected[''][parts[2]] = (parts[4], parts[6]) expected[''][parts[2]] = (parts[4], parts[6])
for p in glob.iglob('plugins/proto/*.proto'): for p in itertools.chain(glob.iglob('plugins/proto/*.proto'), glob.iglob('plugins/*/proto/*.proto')):
plugin_name = '' plugin_name = ''
with open(p) as f: with open(p) as f:
for line in f: for line in f:
@ -53,6 +55,7 @@ for plugin_name in actual:
methods = actual[plugin_name] methods = actual[plugin_name]
if plugin_name not in expected: if plugin_name not in expected:
print(SEP)
print('Missing documentation for plugin proto files: ' + plugin_name) print('Missing documentation for plugin proto files: ' + plugin_name)
print('Add the following lines:') print('Add the following lines:')
print('// Plugin: ' + plugin_name) print('// Plugin: ' + plugin_name)
@ -73,12 +76,14 @@ for plugin_name in actual:
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1]) missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
if len(missing) > 0: if len(missing) > 0:
print(SEP)
print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:') print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:')
for m in missing: for m in missing:
print(m) print(m)
error_count += 1 error_count += 1
if len(wrong) > 0: if len(wrong) > 0:
print(SEP)
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:') print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:')
for m in wrong: for m in wrong:
print(m) print(m)
@ -88,6 +93,7 @@ for plugin_name in expected:
methods = expected[plugin_name] methods = expected[plugin_name]
if plugin_name not in actual: if plugin_name not in actual:
print(SEP)
print('Incorrect documentation for plugin proto files: ' + plugin_name) print('Incorrect documentation for plugin proto files: ' + plugin_name)
print('The following methods are documented, but the plugin does not provide any RPC methods:') print('The following methods are documented, but the plugin does not provide any RPC methods:')
for m in methods: for m in methods:
@ -102,6 +108,7 @@ for plugin_name in expected:
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1]) missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
if len(missing) > 0: if len(missing) > 0:
print(SEP)
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:') print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:')
for m in missing: for m in missing:
print(m) print(m)

@ -1,52 +1,50 @@
#!/bin/sh #!/bin/sh
DF_FOLDER=$1
OS_TARGET=$2
DF_VERSION=$3
set -e set -e
df_tardest="df.tar.bz2" minor=$(echo "$DF_VERSION" | cut -d. -f1)
save_tardest="test_save.tgz" patch=$(echo "$DF_VERSION" | cut -d. -f2)
df_url="https://www.bay12games.com/dwarves/df_${minor}_${patch}"
cd "$(dirname "$0")" if test "$OS_TARGET" = "windows"; then
echo "DF_VERSION: $DF_VERSION" WGET="C:/msys64/usr/bin/wget.exe"
echo "DF_FOLDER: $DF_FOLDER" df_url="${df_url}_win_s.zip"
mkdir -p "$DF_FOLDER" df_archive_name="df.zip"
# back out of df_linux df_extract_cmd="unzip -d ${DF_FOLDER}"
cd "$DF_FOLDER/.." elif test "$OS_TARGET" = "ubuntu"; then
WGET=wget
if ! test -f "$df_tardest"; then df_url="${df_url}_linux.tar.bz2"
minor=$(echo "$DF_VERSION" | cut -d. -f2) df_archive_name="df.tar.bz2"
patch=$(echo "$DF_VERSION" | cut -d. -f3) df_extract_cmd="tar -x -j --strip-components=1 -f"
echo "Downloading DF $DF_VERSION" else
while read url; do echo "Unhandled OS target: ${OS_TARGET}"
echo "Attempting download: ${url}"
if wget -v "$url" -O "$df_tardest"; then
break
fi
done <<URLS
https://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2
https://files.dfhack.org/DF/${minor}.${patch}/df_${minor}_${patch}_linux.tar.bz2
URLS
echo $df_tardest
if ! test -f "$df_tardest"; then
echo "DF failed to download: $df_tardest not found"
exit 1 exit 1
fi fi
echo "Downloading test save" if ! $WGET -v "$df_url" -O "$df_archive_name"; then
#test_save_url="https://files.dfhack.org/DF/0.${minor}.${patch}/test_save.tgz" echo "Failed to download DF from $df_url"
test_save_url="https://drive.google.com/uc?export=download&id=1XvYngl-DFONiZ9SD9OC4B2Ooecu8rPFz"
if ! wget -v "$test_save_url" -O "$save_tardest"; then
echo "failed to download test save"
exit 1 exit 1
fi fi
echo $save_tardest
fi
rm -rf df_linux save_url="https://dffd.bay12games.com/download.php?id=15434&f=dreamfort.7z"
mkdir -p df_linux/save save_archive_name="test_save.7z"
save_extract_cmd="7z x -oDF/save"
if ! $WGET -v "$save_url" -O "$save_archive_name"; then
echo "Failed to download test save from $save_url"
exit 1
fi
echo Extracting echo Extracting
tar xf "$df_tardest" --strip-components=1 -C df_linux $df_extract_cmd "$df_archive_name"
tar xf "$save_tardest" -C df_linux/save $save_extract_cmd "$save_archive_name"
mv DF/save/* DF/save/region1
echo Done echo Done
ls -l ls -l
md5sum "$df_archive_name" "$save_archive_name"

@ -55,7 +55,13 @@ if os.path.exists(test_status_file):
os.remove(test_status_file) os.remove(test_status_file)
print('Backing up init.txt to init.txt.orig') print('Backing up init.txt to init.txt.orig')
init_txt_path = 'data/init/init.txt' default_init_txt_path = 'data/init/init_default.txt'
prefs_path = 'prefs'
init_txt_path = 'prefs/init.txt'
if not os.path.exists(init_txt_path):
os.makedirs(prefs_path, exist_ok=True)
shutil.copyfile(default_init_txt_path, init_txt_path)
shutil.copyfile(init_txt_path, init_txt_path + '.orig') shutil.copyfile(init_txt_path, init_txt_path + '.orig')
with open(init_txt_path) as f: with open(init_txt_path) as f:
init_contents = f.read() init_contents = f.read()
@ -72,11 +78,7 @@ init_path = 'dfhack-config/init'
if not os.path.isdir('hack/init'): if not os.path.isdir('hack/init'):
# we're on an old branch that still reads init files from the root dir # we're on an old branch that still reads init files from the root dir
init_path = '.' init_path = '.'
try: os.makedirs(init_path, exist_ok=True)
os.mkdir(init_path)
except OSError as error:
# ignore already exists errors
pass
test_init_file = os.path.join(init_path, 'dfhackzzz_test.init') # Core sorts these alphabetically test_init_file = os.path.join(init_path, 'dfhackzzz_test.init') # Core sorts these alphabetically
with open(test_init_file, 'w') as f: with open(test_init_file, 'w') as f:
f.write(''' f.write('''

@ -1,11 +1,12 @@
-- DFHack developer test harness -- DFHack developer test harness
--@ module = true --@ module = true
local expect = require 'test_util.expect' local expect = require('test_util.expect')
local json = require 'json' local helpdb = require('helpdb')
local mock = require 'test_util.mock' local json = require('json')
local script = require 'gui.script' local mock = require('test_util.mock')
local utils = require 'utils' local script = require('gui.script')
local utils = require('utils')
local help_text = local help_text =
[====[ [====[
@ -13,17 +14,24 @@ local help_text =
test test
==== ====
Run DFHack tests. Tags: dev
Usage: Command: "test"
Run DFHack regression tests.
Discover DFHack functionality that has broken due to recent changes in DF or DFHack.
Usage
-----
test [<options>] [<done_command>] test [<options>] [<done_command>]
If a done_command is specified, it will be run after the tests complete. If a done_command is specified, it will be run after the tests complete.
Options: Options
-------
-h, --help display this help message and exit.
-d, --test_dir specifies which directory to look in for tests. defaults to -d, --test_dir specifies which directory to look in for tests. defaults to
the "hack/scripts/test" folder in your DF installation. the "hack/scripts/test" folder in your DF installation.
-m, --modes only run tests in the given comma separated list of modes. -m, --modes only run tests in the given comma separated list of modes.
@ -36,9 +44,11 @@ Options:
a "fortress" mode test is run. if not specified, defaults to a "fortress" mode test is run. if not specified, defaults to
'region1'. 'region1'.
-t, --tests only run tests that match one of the comma separated list of -t, --tests only run tests that match one of the comma separated list of
patterns. if not specified, no tests are filtered. patterns. if not specified, no tests are filtered and all tessts
are run.
Modes: Modes
-----
none the test can be run on any screen none the test can be run on any screen
title the test must be run on the DF title screen. note that if the game title the test must be run on the DF title screen. note that if the game
@ -47,7 +57,8 @@ Modes:
currently on the title screen, the save specified by the save_dir currently on the title screen, the save specified by the save_dir
parameter will be loaded. parameter will be loaded.
Examples: Examples
--------
test runs all tests test runs all tests
test -r runs all tests that haven't been run before test -r runs all tests that haven't been run before
@ -352,18 +363,33 @@ local function load_tests(file, tests)
if not code then if not code then
dfhack.printerr('Failed to load file: ' .. tostring(err)) dfhack.printerr('Failed to load file: ' .. tostring(err))
return false return false
else end
dfhack.internal.IN_TEST = true dfhack.internal.IN_TEST = true
local ok, err = dfhack.pcall(code) local ok, err = dfhack.pcall(code)
dfhack.internal.IN_TEST = false dfhack.internal.IN_TEST = false
if not ok then if not ok then
dfhack.printerr('Error when running file: ' .. tostring(err)) dfhack.printerr('Error when running file: ' .. tostring(err))
return false return false
else end
if not MODES[env.config.mode] then if not MODES[env.config.mode] then
dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode)) dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode))
return false return false
end end
if not env.config.targets then
dfhack.printerr('Skipping tests for unspecified target in ' .. file)
return true -- TODO: change to false once existing tests have targets specified
end
local targets = type(env.config.targets) == table and env.config.targets or {env.config.targets}
for _,target in ipairs(targets) do
if target == 'core' then goto continue end
if type(target) ~= 'string' or not helpdb.is_entry(target) or
helpdb.get_entry_tags(target).unavailable
then
dfhack.printerr('Skipping tests for unavailable target: ' .. target)
return true
end
::continue::
end
for name, test_func in pairs(env.test) do for name, test_func in pairs(env.test) do
if env.config.wrapper then if env.config.wrapper then
local fn = test_func local fn = test_func
@ -378,8 +404,6 @@ local function load_tests(file, tests)
test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '') test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '')
table.insert(tests, test_data) table.insert(tests, test_data)
end end
end
end
return true return true
end end
@ -575,7 +599,7 @@ local function dump_df_state()
enabler = { enabler = {
fps = df.global.enabler.fps, fps = df.global.enabler.fps,
gfps = df.global.enabler.gfps, gfps = df.global.enabler.gfps,
fullscreen = df.global.enabler.fullscreen, fullscreen_state = df.global.enabler.fullscreen_state.whole,
}, },
gps = { gps = {
dimx = df.global.gps.dimx, dimx = df.global.gps.dimx,

@ -169,7 +169,7 @@ if(BUILD_SUPPORTED)
#dfhack_plugin(trackstop trackstop.cpp) #dfhack_plugin(trackstop trackstop.cpp)
#dfhack_plugin(tubefill tubefill.cpp) #dfhack_plugin(tubefill tubefill.cpp)
#add_subdirectory(tweak) #add_subdirectory(tweak)
#dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua) dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua)
dfhack_plugin(work-now work-now.cpp) dfhack_plugin(work-now work-now.cpp)
dfhack_plugin(xlsxreader xlsxreader.cpp LINK_LIBRARIES lua xlsxio_read_STATIC zip expat) dfhack_plugin(xlsxreader xlsxreader.cpp LINK_LIBRARIES lua xlsxio_read_STATIC zip expat)
dfhack_plugin(zone zone.cpp LINK_LIBRARIES lua) dfhack_plugin(zone zone.cpp LINK_LIBRARIES lua)

@ -5,7 +5,7 @@ package isoworldremote;
option optimize_for = LITE_RUNTIME; option optimize_for = LITE_RUNTIME;
// Plugin: isoworldremote // DISABLED Plugin: isoworldremote
enum BasicMaterial { enum BasicMaterial {
AIR = 0; AIR = 0;
@ -54,7 +54,7 @@ message EmbarkTile {
optional bool is_valid = 7; optional bool is_valid = 7;
} }
// RPC GetEmbarkTile : TileRequest -> EmbarkTile // DISABLED RPC GetEmbarkTile : TileRequest -> EmbarkTile
message TileRequest { message TileRequest {
optional int32 want_x = 1; optional int32 want_x = 1;
optional int32 want_y = 2; optional int32 want_y = 2;
@ -64,7 +64,7 @@ message MapRequest {
optional string save_folder = 1; optional string save_folder = 1;
} }
// RPC GetEmbarkInfo : MapRequest -> MapReply // DISABLED RPC GetEmbarkInfo : MapRequest -> MapReply
message MapReply { message MapReply {
required bool available = 1; required bool available = 1;
optional int32 region_x = 2; optional int32 region_x = 2;
@ -75,7 +75,7 @@ message MapReply {
optional int32 current_season = 7; optional int32 current_season = 7;
} }
// RPC GetRawNames : MapRequest -> RawNames // DISABLED RPC GetRawNames : MapRequest -> RawNames
message RawNames { message RawNames {
required bool available = 1; required bool available = 1;
repeated string inorganic = 2; repeated string inorganic = 2;

@ -4,9 +4,9 @@ package dfproto;
option optimize_for = LITE_RUNTIME; option optimize_for = LITE_RUNTIME;
// Plugin: rename // DISABLED Plugin: rename
// RPC RenameSquad : RenameSquadIn -> EmptyMessage // DISABLED RPC RenameSquad : RenameSquadIn -> EmptyMessage
message RenameSquadIn { message RenameSquadIn {
required int32 squad_id = 1; required int32 squad_id = 1;
@ -14,7 +14,7 @@ message RenameSquadIn {
optional string alias = 3; optional string alias = 3;
} }
// RPC RenameUnit : RenameUnitIn -> EmptyMessage // DISABLED RPC RenameUnit : RenameUnitIn -> EmptyMessage
message RenameUnitIn { message RenameUnitIn {
required int32 unit_id = 1; required int32 unit_id = 1;
@ -22,7 +22,7 @@ message RenameUnitIn {
optional string profession = 3; optional string profession = 3;
} }
// RPC RenameBuilding : RenameBuildingIn -> EmptyMessage // DISABLED RPC RenameBuilding : RenameBuildingIn -> EmptyMessage
message RenameBuildingIn { message RenameBuildingIn {
required int32 building_id = 1; required int32 building_id = 1;

@ -1,3 +1,5 @@
config.targets = 'core'
local function clean_path(p) local function clean_path(p)
-- todo: replace with dfhack.filesystem call? -- todo: replace with dfhack.filesystem call?
return p:gsub('\\', '/'):gsub('//', '/'):gsub('/$', '') return p:gsub('\\', '/'):gsub('//', '/'):gsub('/$', '')

@ -1,3 +1,5 @@
config.targets = 'core'
function test.toSearchNormalized() function test.toSearchNormalized()
expect.eq(dfhack.toSearchNormalized(''), '') expect.eq(dfhack.toSearchNormalized(''), '')
expect.eq(dfhack.toSearchNormalized('abcd'), 'abcd') expect.eq(dfhack.toSearchNormalized('abcd'), 'abcd')

@ -1,3 +1,5 @@
config.targets = 'core'
function test.internal_in_test() function test.internal_in_test()
expect.true_(dfhack.internal.IN_TEST) expect.true_(dfhack.internal.IN_TEST)
end end