Merge pull request #3622 from myk002/myk_test_test

first attempt to run tests on windows
develop
Myk 2023-08-03 23:05:46 -07:00 committed by GitHub
commit 3599403f21
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 271 additions and 168 deletions

@ -35,6 +35,9 @@ on:
extras:
type: boolean
default: false
tests:
type: boolean
default: false
gcc-ver:
type: string
default: "10"
@ -112,6 +115,7 @@ jobs:
-DBUILD_SIZECHECK:BOOL=${{ inputs.extras }} \
-DBUILD_SKELETON:BOOL=${{ inputs.extras }} \
-DBUILD_DOCS:BOOL=${{ inputs.docs }} \
-DBUILD_TESTS:BOOL=${{ inputs.tests }} \
-DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} \
-DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }}
- name: Build DFHack

@ -5,6 +5,8 @@ on:
inputs:
dfhack_ref:
type: string
scripts_ref:
type: string
structures_ref:
type: string
artifact-name:
@ -21,6 +23,15 @@ on:
common-files:
type: boolean
default: true
docs:
type: boolean
default: false
stonesense:
type: boolean
default: false
tests:
type: boolean
default: false
launchdf:
type: boolean
default: false
@ -40,6 +51,13 @@ jobs:
ref: ${{ inputs.dfhack_ref }}
submodules: true
fetch-depth: 0
- name: Clone scripts
if: inputs.scripts_ref
uses: actions/checkout@v3
with:
repository: 'DFHack/scripts'
ref: ${{ inputs.scripts_ref }}
path: scripts
- name: Clone structures
if: inputs.structures_ref
uses: actions/checkout@v3
@ -65,14 +83,14 @@ jobs:
win-msvc
- name: Cross-compile
env:
CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=1 -DBUILD_DOCS:BOOL=${{ inputs.common-files }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }}'
CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=${{ inputs.stonesense }} -DBUILD_DOCS:BOOL=${{ inputs.docs }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }} -DBUILD_TESTS:BOOL=${{ inputs.tests }}'
run: |
cd build
bash -x build-win64-from-linux.sh
- name: Trim cache
run: |
cd build
ccache -d win64-cross/ccache --max-size 200M
ccache -d win64-cross/ccache --max-size 150M
ccache -d win64-cross/ccache --cleanup
ccache -d win64-cross/ccache --show-stats --verbose
- name: Save ccache

@ -25,6 +25,8 @@ jobs:
artifact-name: dfhack-win64-build
append-date-and-hash: true
cache-id: release
stonesense: true
docs: true
secrets: inherit
docs:

@ -20,6 +20,8 @@ jobs:
dfhack_ref: ${{ github.event.inputs && github.event.inputs.ref || github.event.ref }}
cache-id: release
cache-readonly: true
stonesense: true
docs: true
launchdf: true
secrets: inherit

@ -43,6 +43,7 @@ jobs:
cache-id: release
cache-readonly: true
common-files: false
stonesense: true
launchdf: true
secrets: inherit

@ -11,6 +11,17 @@ on:
type: string
jobs:
build-windows:
name: Windows MSVC
uses: ./.github/workflows/build-windows.yml
with:
dfhack_ref: ${{ inputs.dfhack_ref }}
scripts_ref: ${{ inputs.scripts_ref }}
structures_ref: ${{ inputs.structures_ref }}
artifact-name: test-msvc
cache-id: test
tests: true
build-linux:
name: Linux gcc-${{ matrix.gcc }}
uses: ./.github/workflows/build-linux.yml
@ -22,6 +33,7 @@ jobs:
cache-id: test
stonesense: ${{ matrix.plugins == 'all' }}
extras: ${{ matrix.plugins == 'all' }}
tests: true
gcc-ver: ${{ matrix.gcc }}
secrets: inherit
strategy:
@ -33,51 +45,78 @@ jobs:
- gcc: 12
plugins: "all"
test-linux:
name: Test (Linux, GCC ${{ matrix.gcc }}, ${{ matrix.plugins }} plugins)
needs: build-linux
runs-on: ubuntu-latest
test-windows:
name: Test (${{ matrix.os }}, ${{ matrix.compiler }}, ${{ matrix.plugins }} plugins)
needs:
- build-windows
- build-linux
runs-on: ${{ matrix.os }}-latest
strategy:
fail-fast: false
matrix:
include:
- gcc: 10
- os: windows
compiler: msvc
plugins: "default"
- gcc: 12
plugins: "all"
# TODO: uncomment once we have a linux build we can download from bay12
# - os: ubuntu
# compiler: gcc-10
# gcc: 10
# plugins: "default"
# - os: ubuntu
# compiler: gcc-10
# gcc: 12
# plugins: "all"
steps:
- name: Download artifact
- name: Set env (windows)
if: matrix.os == 'windows'
run: echo "DF_FOLDER=DF" >> $env:GITHUB_ENV
- name: Set env (posix)
if: matrix.os != 'windows'
run: echo "DF_FOLDER=DF" >> $GITHUB_ENV
- name: Clone DFHack
uses: actions/checkout@v3
with:
repository: 'DFHack/dfhack'
ref: ${{ inputs.dfhack_ref }}
- name: Detect DF version (windows)
if: matrix.os == 'windows'
run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $env:GITHUB_ENV
- name: Detect DF version (posix)
if: matrix.os != 'windows'
run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $GITHUB_ENV
- name: Fetch DF cache
id: restore-df
uses: actions/cache/restore@v3
with:
path: ${{ env.DF_FOLDER }}
key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }}
- name: Download DF
if: steps.restore-df.outputs.cache-hit != 'true'
run: sh ci/download-df.sh ${{ env.DF_FOLDER }} ${{ matrix.os }} ${{ env.DF_VERSION }}
- name: Save DF cache
if: steps.restore-df.outputs.cache-hit != 'true'
uses: actions/cache/save@v3
with:
path: ${{ env.DF_FOLDER }}
key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }}
- name: Download DFHack
uses: actions/download-artifact@v3
with:
name: test-gcc-${{ matrix.gcc }}
# - name: Fetch DF cache
# uses: actions/cache@v3
# with:
# path: ~/DF
# key: df-${{ hashFiles('ci/download-df.sh') }}
# - name: Download DF
# run: |
# sh ci/download-df.sh
# - name: Run lua tests
# id: run_tests_lua
# run: |
# export TERM=dumb
# status=0
# script -qe -c "python ci/run-tests.py --headless --keep-status \"$DF_FOLDER\"" || status=$((status + 1))
# python ci/check-rpc.py "$DF_FOLDER/dfhack-rpc.txt" || status=$((status + 2))
# mkdir -p artifacts
# cp "$DF_FOLDER"/test*.json "$DF_FOLDER"/*.log artifacts || status=$((status + 4))
# exit $status
# - name: Upload test artifacts
# uses: actions/upload-artifact@v3
# if: (success() || failure()) && steps.run_tests.outcome != 'skipped'
# continue-on-error: true
# with:
# name: test-artifacts-${{ matrix.gcc }}
# path: artifacts
# - name: Clean up DF folder
# # prevent DFHack-generated files from ending up in the cache
# # (download-df.sh also removes them, this is just to save cache space)
# if: success() || failure()
# run: |
# rm -rf "$DF_FOLDER"
name: test-${{ matrix.compiler }}
path: ${{ env.DF_FOLDER }}
- name: Run lua tests
timeout-minutes: 10
run: python ci/run-tests.py --keep-status "${{ env.DF_FOLDER }}"
- name: Check RPC interface
run: python ci/check-rpc.py "${{ env.DF_FOLDER }}/dfhack-rpc.txt"
- name: Upload test artifacts
uses: actions/upload-artifact@v3
if: always()
continue-on-error: true
with:
name: test-artifacts-msvc
path: |
${{ env.DF_FOLDER }}/dfhack-rpc.txt
${{ env.DF_FOLDER }}/test*.json
${{ env.DF_FOLDER }}/*.log

@ -1,8 +1,10 @@
#!/usr/bin/env python3
import glob
import itertools
import sys
actual = {'': {}}
SEP = ('=' * 80)
with open(sys.argv[1]) as f:
plugin_name = ''
@ -26,7 +28,7 @@ for p in glob.iglob('library/proto/*.proto'):
parts = line.split(' ')
expected[''][parts[2]] = (parts[4], parts[6])
for p in glob.iglob('plugins/proto/*.proto'):
for p in itertools.chain(glob.iglob('plugins/proto/*.proto'), glob.iglob('plugins/*/proto/*.proto')):
plugin_name = ''
with open(p) as f:
for line in f:
@ -53,6 +55,7 @@ for plugin_name in actual:
methods = actual[plugin_name]
if plugin_name not in expected:
print(SEP)
print('Missing documentation for plugin proto files: ' + plugin_name)
print('Add the following lines:')
print('// Plugin: ' + plugin_name)
@ -73,12 +76,14 @@ for plugin_name in actual:
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
if len(missing) > 0:
print(SEP)
print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:')
for m in missing:
print(m)
error_count += 1
if len(wrong) > 0:
print(SEP)
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:')
for m in wrong:
print(m)
@ -88,6 +93,7 @@ for plugin_name in expected:
methods = expected[plugin_name]
if plugin_name not in actual:
print(SEP)
print('Incorrect documentation for plugin proto files: ' + plugin_name)
print('The following methods are documented, but the plugin does not provide any RPC methods:')
for m in methods:
@ -102,6 +108,7 @@ for plugin_name in expected:
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
if len(missing) > 0:
print(SEP)
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:')
for m in missing:
print(m)

@ -1,52 +1,50 @@
#!/bin/sh
DF_FOLDER=$1
OS_TARGET=$2
DF_VERSION=$3
set -e
df_tardest="df.tar.bz2"
save_tardest="test_save.tgz"
cd "$(dirname "$0")"
echo "DF_VERSION: $DF_VERSION"
echo "DF_FOLDER: $DF_FOLDER"
mkdir -p "$DF_FOLDER"
# back out of df_linux
cd "$DF_FOLDER/.."
if ! test -f "$df_tardest"; then
minor=$(echo "$DF_VERSION" | cut -d. -f2)
patch=$(echo "$DF_VERSION" | cut -d. -f3)
echo "Downloading DF $DF_VERSION"
while read url; do
echo "Attempting download: ${url}"
if wget -v "$url" -O "$df_tardest"; then
break
fi
done <<URLS
https://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2
https://files.dfhack.org/DF/${minor}.${patch}/df_${minor}_${patch}_linux.tar.bz2
URLS
echo $df_tardest
if ! test -f "$df_tardest"; then
echo "DF failed to download: $df_tardest not found"
exit 1
fi
echo "Downloading test save"
#test_save_url="https://files.dfhack.org/DF/0.${minor}.${patch}/test_save.tgz"
test_save_url="https://drive.google.com/uc?export=download&id=1XvYngl-DFONiZ9SD9OC4B2Ooecu8rPFz"
if ! wget -v "$test_save_url" -O "$save_tardest"; then
echo "failed to download test save"
exit 1
fi
echo $save_tardest
minor=$(echo "$DF_VERSION" | cut -d. -f1)
patch=$(echo "$DF_VERSION" | cut -d. -f2)
df_url="https://www.bay12games.com/dwarves/df_${minor}_${patch}"
if test "$OS_TARGET" = "windows"; then
WGET="C:/msys64/usr/bin/wget.exe"
df_url="${df_url}_win_s.zip"
df_archive_name="df.zip"
df_extract_cmd="unzip -d ${DF_FOLDER}"
elif test "$OS_TARGET" = "ubuntu"; then
WGET=wget
df_url="${df_url}_linux.tar.bz2"
df_archive_name="df.tar.bz2"
df_extract_cmd="tar -x -j --strip-components=1 -f"
else
echo "Unhandled OS target: ${OS_TARGET}"
exit 1
fi
if ! $WGET -v "$df_url" -O "$df_archive_name"; then
echo "Failed to download DF from $df_url"
exit 1
fi
rm -rf df_linux
mkdir -p df_linux/save
save_url="https://dffd.bay12games.com/download.php?id=15434&f=dreamfort.7z"
save_archive_name="test_save.7z"
save_extract_cmd="7z x -oDF/save"
if ! $WGET -v "$save_url" -O "$save_archive_name"; then
echo "Failed to download test save from $save_url"
exit 1
fi
echo Extracting
tar xf "$df_tardest" --strip-components=1 -C df_linux
tar xf "$save_tardest" -C df_linux/save
$df_extract_cmd "$df_archive_name"
$save_extract_cmd "$save_archive_name"
mv DF/save/* DF/save/region1
echo Done
ls -l
md5sum "$df_archive_name" "$save_archive_name"

@ -55,7 +55,13 @@ if os.path.exists(test_status_file):
os.remove(test_status_file)
print('Backing up init.txt to init.txt.orig')
init_txt_path = 'data/init/init.txt'
default_init_txt_path = 'data/init/init_default.txt'
prefs_path = 'prefs'
init_txt_path = 'prefs/init.txt'
if not os.path.exists(init_txt_path):
os.makedirs(prefs_path, exist_ok=True)
shutil.copyfile(default_init_txt_path, init_txt_path)
shutil.copyfile(init_txt_path, init_txt_path + '.orig')
with open(init_txt_path) as f:
init_contents = f.read()
@ -72,11 +78,7 @@ init_path = 'dfhack-config/init'
if not os.path.isdir('hack/init'):
# we're on an old branch that still reads init files from the root dir
init_path = '.'
try:
os.mkdir(init_path)
except OSError as error:
# ignore already exists errors
pass
os.makedirs(init_path, exist_ok=True)
test_init_file = os.path.join(init_path, 'dfhackzzz_test.init') # Core sorts these alphabetically
with open(test_init_file, 'w') as f:
f.write('''

@ -1,11 +1,12 @@
-- DFHack developer test harness
--@ module = true
local expect = require 'test_util.expect'
local json = require 'json'
local mock = require 'test_util.mock'
local script = require 'gui.script'
local utils = require 'utils'
local expect = require('test_util.expect')
local helpdb = require('helpdb')
local json = require('json')
local mock = require('test_util.mock')
local script = require('gui.script')
local utils = require('utils')
local help_text =
[====[
@ -13,49 +14,59 @@ local help_text =
test
====
Run DFHack tests.
Tags: dev
Usage:
Command: "test"
Run DFHack regression tests.
Discover DFHack functionality that has broken due to recent changes in DF or DFHack.
Usage
-----
test [<options>] [<done_command>]
If a done_command is specified, it will be run after the tests complete.
Options:
-h, --help display this help message and exit.
-d, --test_dir specifies which directory to look in for tests. defaults to
the "hack/scripts/test" folder in your DF installation.
-m, --modes only run tests in the given comma separated list of modes.
see the next section for a list of valid modes. if not
specified, the tests are not filtered by modes.
-r, --resume skip tests that have already been run. remove the
test_status.json file to reset the record.
-s, --save_dir the save folder to load for "fortress" mode tests. this
save is only loaded if a fort is not already loaded when
a "fortress" mode test is run. if not specified, defaults to
'region1'.
-t, --tests only run tests that match one of the comma separated list of
patterns. if not specified, no tests are filtered.
Modes:
none the test can be run on any screen
title the test must be run on the DF title screen. note that if the game
has a map loaded, "title" mode tests cannot be run
fortress the test must be run while a map is loaded. if the game is
currently on the title screen, the save specified by the save_dir
parameter will be loaded.
Examples:
test runs all tests
test -r runs all tests that haven't been run before
test -m none runs tests that don't need the game to be in a
specific mode
test -t quickfort runs quickfort tests
test -d /path/to/dfhack-scripts/repo/test
runs tests in your dev scripts repo
Options
-------
-d, --test_dir specifies which directory to look in for tests. defaults to
the "hack/scripts/test" folder in your DF installation.
-m, --modes only run tests in the given comma separated list of modes.
see the next section for a list of valid modes. if not
specified, the tests are not filtered by modes.
-r, --resume skip tests that have already been run. remove the
test_status.json file to reset the record.
-s, --save_dir the save folder to load for "fortress" mode tests. this
save is only loaded if a fort is not already loaded when
a "fortress" mode test is run. if not specified, defaults to
'region1'.
-t, --tests only run tests that match one of the comma separated list of
patterns. if not specified, no tests are filtered and all tessts
are run.
Modes
-----
none the test can be run on any screen
title the test must be run on the DF title screen. note that if the game
has a map loaded, "title" mode tests cannot be run
fortress the test must be run while a map is loaded. if the game is
currently on the title screen, the save specified by the save_dir
parameter will be loaded.
Examples
--------
test runs all tests
test -r runs all tests that haven't been run before
test -m none runs tests that don't need the game to be in a
specific mode
test -t quickfort runs quickfort tests
test -d /path/to/dfhack-scripts/repo/test
runs tests in your dev scripts repo
Default values for the options may be set in a file named test_config.json in
your DF folder. Options with comma-separated values should be written as json
@ -352,33 +363,46 @@ local function load_tests(file, tests)
if not code then
dfhack.printerr('Failed to load file: ' .. tostring(err))
return false
else
dfhack.internal.IN_TEST = true
local ok, err = dfhack.pcall(code)
dfhack.internal.IN_TEST = false
if not ok then
dfhack.printerr('Error when running file: ' .. tostring(err))
return false
else
if not MODES[env.config.mode] then
dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode))
return false
end
for name, test_func in pairs(env.test) do
if env.config.wrapper then
local fn = test_func
test_func = function() env.config.wrapper(fn) end
end
local test_data = {
full_name = short_filename .. ':' .. name,
func = test_func,
private = env_private,
config = env.config,
}
test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '')
table.insert(tests, test_data)
end
end
dfhack.internal.IN_TEST = true
local ok, err = dfhack.pcall(code)
dfhack.internal.IN_TEST = false
if not ok then
dfhack.printerr('Error when running file: ' .. tostring(err))
return false
end
if not MODES[env.config.mode] then
dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode))
return false
end
if not env.config.targets then
dfhack.printerr('Skipping tests for unspecified target in ' .. file)
return true -- TODO: change to false once existing tests have targets specified
end
local targets = type(env.config.targets) == table and env.config.targets or {env.config.targets}
for _,target in ipairs(targets) do
if target == 'core' then goto continue end
if type(target) ~= 'string' or not helpdb.is_entry(target) or
helpdb.get_entry_tags(target).unavailable
then
dfhack.printerr('Skipping tests for unavailable target: ' .. target)
return true
end
::continue::
end
for name, test_func in pairs(env.test) do
if env.config.wrapper then
local fn = test_func
test_func = function() env.config.wrapper(fn) end
end
local test_data = {
full_name = short_filename .. ':' .. name,
func = test_func,
private = env_private,
config = env.config,
}
test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '')
table.insert(tests, test_data)
end
return true
end
@ -575,7 +599,7 @@ local function dump_df_state()
enabler = {
fps = df.global.enabler.fps,
gfps = df.global.enabler.gfps,
fullscreen = df.global.enabler.fullscreen,
fullscreen_state = df.global.enabler.fullscreen_state.whole,
},
gps = {
dimx = df.global.gps.dimx,

@ -169,7 +169,7 @@ if(BUILD_SUPPORTED)
#dfhack_plugin(trackstop trackstop.cpp)
#dfhack_plugin(tubefill tubefill.cpp)
#add_subdirectory(tweak)
#dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua)
dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua)
dfhack_plugin(work-now work-now.cpp)
dfhack_plugin(xlsxreader xlsxreader.cpp LINK_LIBRARIES lua xlsxio_read_STATIC zip expat)
dfhack_plugin(zone zone.cpp LINK_LIBRARIES lua)

@ -5,7 +5,7 @@ package isoworldremote;
option optimize_for = LITE_RUNTIME;
// Plugin: isoworldremote
// DISABLED Plugin: isoworldremote
enum BasicMaterial {
AIR = 0;
@ -54,7 +54,7 @@ message EmbarkTile {
optional bool is_valid = 7;
}
// RPC GetEmbarkTile : TileRequest -> EmbarkTile
// DISABLED RPC GetEmbarkTile : TileRequest -> EmbarkTile
message TileRequest {
optional int32 want_x = 1;
optional int32 want_y = 2;
@ -64,7 +64,7 @@ message MapRequest {
optional string save_folder = 1;
}
// RPC GetEmbarkInfo : MapRequest -> MapReply
// DISABLED RPC GetEmbarkInfo : MapRequest -> MapReply
message MapReply {
required bool available = 1;
optional int32 region_x = 2;
@ -75,7 +75,7 @@ message MapReply {
optional int32 current_season = 7;
}
// RPC GetRawNames : MapRequest -> RawNames
// DISABLED RPC GetRawNames : MapRequest -> RawNames
message RawNames {
required bool available = 1;
repeated string inorganic = 2;

@ -4,9 +4,9 @@ package dfproto;
option optimize_for = LITE_RUNTIME;
// Plugin: rename
// DISABLED Plugin: rename
// RPC RenameSquad : RenameSquadIn -> EmptyMessage
// DISABLED RPC RenameSquad : RenameSquadIn -> EmptyMessage
message RenameSquadIn {
required int32 squad_id = 1;
@ -14,7 +14,7 @@ message RenameSquadIn {
optional string alias = 3;
}
// RPC RenameUnit : RenameUnitIn -> EmptyMessage
// DISABLED RPC RenameUnit : RenameUnitIn -> EmptyMessage
message RenameUnitIn {
required int32 unit_id = 1;
@ -22,7 +22,7 @@ message RenameUnitIn {
optional string profession = 3;
}
// RPC RenameBuilding : RenameBuildingIn -> EmptyMessage
// DISABLED RPC RenameBuilding : RenameBuildingIn -> EmptyMessage
message RenameBuildingIn {
required int32 building_id = 1;

@ -1,3 +1,5 @@
config.targets = 'core'
local function clean_path(p)
-- todo: replace with dfhack.filesystem call?
return p:gsub('\\', '/'):gsub('//', '/'):gsub('/$', '')

@ -1,3 +1,5 @@
config.targets = 'core'
function test.toSearchNormalized()
expect.eq(dfhack.toSearchNormalized(''), '')
expect.eq(dfhack.toSearchNormalized('abcd'), 'abcd')

@ -1,3 +1,5 @@
config.targets = 'core'
function test.internal_in_test()
expect.true_(dfhack.internal.IN_TEST)
end