Merge remote-tracking branch 'lethosor/ci-scripts-cleanup' into develop
commit
fbe0319515
@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python3
|
||||
""" Overly-complicated script to check formatting/sorting in Authors.rst """
|
||||
|
||||
import os, re, sys
|
||||
|
||||
def main():
|
||||
success = [True]
|
||||
def error(line, msg, **kwargs):
|
||||
info = ''
|
||||
for k in kwargs:
|
||||
info += ' %s %s:' % (k, kwargs[k])
|
||||
print('line %i:%s %s' % (line, info, msg))
|
||||
if os.environ.get('GITHUB_ACTIONS'):
|
||||
print('::error file=docs/Authors.rst,line=%i::%s %s' % (line, info.lstrip(), msg))
|
||||
success[0] = False
|
||||
with open('docs/Authors.rst', 'rb') as f:
|
||||
lines = list(map(lambda line: line.decode('utf8').replace('\n', ''), f.readlines()))
|
||||
|
||||
if lines[1].startswith('='):
|
||||
if len(lines[0]) != len(lines[1]):
|
||||
error(2, 'Length of header does not match underline')
|
||||
if lines[1].replace('=', ''):
|
||||
error(2, 'Invalid header')
|
||||
|
||||
first_div_index = list(filter(lambda pair: pair[1].startswith('==='), enumerate(lines[2:])))[0][0] + 2
|
||||
first_div = lines[first_div_index]
|
||||
div_indices = []
|
||||
for i, line in enumerate(lines[first_div_index:]):
|
||||
line_number = i + first_div_index + 1
|
||||
if '\t' in line:
|
||||
error(line_number, 'contains tabs')
|
||||
if line.startswith('==='):
|
||||
div_indices.append(i + first_div_index)
|
||||
if not re.match(r'^=+( =+)+$', line):
|
||||
error(line_number, 'bad table divider')
|
||||
if line != lines[first_div_index]:
|
||||
error(line_number, 'malformed table divider')
|
||||
if len(div_indices) < 3:
|
||||
error(len(lines), 'missing table divider(s)')
|
||||
for i in div_indices[3:]:
|
||||
error(i + 1, 'extra table divider')
|
||||
|
||||
col_ranges = []
|
||||
i = 0
|
||||
while True:
|
||||
j = first_div.find(' ', i)
|
||||
col_ranges.append(slice(i, j if j > 0 else None))
|
||||
if j == -1:
|
||||
break
|
||||
i = j + 1
|
||||
|
||||
for i, line in enumerate(lines[div_indices[1] + 1:div_indices[2]]):
|
||||
line_number = i + div_indices[1] + 2
|
||||
for c, col in enumerate(col_ranges):
|
||||
cell = line[col]
|
||||
if cell.startswith(' '):
|
||||
error(line_number, 'text does not start in correct location', column=c+1)
|
||||
# check for text extending into next column if this isn't the last column
|
||||
if col.stop is not None and col.stop < len(line) and line[col.stop] != ' ':
|
||||
error(line_number, 'text extends into next column', column=c+1)
|
||||
if i > 0:
|
||||
prev_line = lines[div_indices[1] + i]
|
||||
if line.lower()[col_ranges[0]] < prev_line.lower()[col_ranges[0]]:
|
||||
error(line_number, 'not sorted: should come before line %i ("%s" before "%s")' %
|
||||
(line_number - 1, line[col_ranges[0]].rstrip(' '), prev_line[col_ranges[0]].rstrip(' ')))
|
||||
|
||||
return success[0]
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(int(not main()))
|
@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import sys
|
||||
|
||||
actual = {'': {}}
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
plugin_name = ''
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// Plugin: '):
|
||||
plugin_name = line.split(' ')[2]
|
||||
if plugin_name not in actual:
|
||||
actual[plugin_name] = {}
|
||||
elif line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
actual[plugin_name][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
expected = {'': {}}
|
||||
|
||||
for p in glob.iglob('library/proto/*.proto'):
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
expected[''][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
for p in glob.iglob('plugins/proto/*.proto'):
|
||||
plugin_name = ''
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// Plugin: '):
|
||||
plugin_name = line.split(' ')[2]
|
||||
if plugin_name not in expected:
|
||||
expected[plugin_name] = {}
|
||||
break
|
||||
|
||||
if plugin_name == '':
|
||||
continue
|
||||
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
expected[plugin_name][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
error_count = 0
|
||||
|
||||
for plugin_name in actual:
|
||||
methods = actual[plugin_name]
|
||||
|
||||
if plugin_name not in expected:
|
||||
print('Missing documentation for plugin proto files: ' + plugin_name)
|
||||
print('Add the following lines:')
|
||||
print('// Plugin: ' + plugin_name)
|
||||
error_count += 1
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
print('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
error_count += 1
|
||||
else:
|
||||
missing = []
|
||||
wrong = []
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
if m in expected[plugin_name]:
|
||||
if expected[plugin_name][m] != io:
|
||||
wrong.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
else:
|
||||
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
|
||||
if len(missing) > 0:
|
||||
print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:')
|
||||
for m in missing:
|
||||
print(m)
|
||||
error_count += 1
|
||||
|
||||
if len(wrong) > 0:
|
||||
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:')
|
||||
for m in wrong:
|
||||
print(m)
|
||||
error_count += 1
|
||||
|
||||
for plugin_name in expected:
|
||||
methods = expected[plugin_name]
|
||||
|
||||
if plugin_name not in actual:
|
||||
print('Incorrect documentation for plugin proto files: ' + plugin_name)
|
||||
print('The following methods are documented, but the plugin does not provide any RPC methods:')
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
print('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
error_count += 1
|
||||
else:
|
||||
missing = []
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
if m not in actual[plugin_name]:
|
||||
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
|
||||
if len(missing) > 0:
|
||||
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:')
|
||||
for m in missing:
|
||||
print(m)
|
||||
error_count += 1
|
||||
|
||||
sys.exit(min(100, error_count))
|
@ -0,0 +1,56 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
tardest="df.tar.bz2"
|
||||
|
||||
selfmd5=$(openssl md5 < "$0")
|
||||
echo $selfmd5
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
echo "DF_VERSION: $DF_VERSION"
|
||||
echo "DF_FOLDER: $DF_FOLDER"
|
||||
mkdir -p "$DF_FOLDER"
|
||||
# back out of df_linux
|
||||
cd "$DF_FOLDER/.."
|
||||
|
||||
if [ -f receipt ]; then
|
||||
if [ "$selfmd5" != "$(cat receipt)" ]; then
|
||||
echo "download-df.sh changed; removing DF"
|
||||
rm receipt
|
||||
else
|
||||
echo "Already downloaded $DF_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f receipt ]; then
|
||||
rm -f "$tardest"
|
||||
minor=$(echo "$DF_VERSION" | cut -d. -f2)
|
||||
patch=$(echo "$DF_VERSION" | cut -d. -f3)
|
||||
url="http://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2"
|
||||
echo Downloading
|
||||
while read url; do
|
||||
echo "Attempting download: ${url}"
|
||||
if wget -v "$url" -O "$tardest"; then
|
||||
break
|
||||
fi
|
||||
done <<URLS
|
||||
https://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2
|
||||
https://files.dfhack.org/DF/0.${minor}.${patch}/df_${minor}_${patch}_linux.tar.bz2
|
||||
URLS
|
||||
echo $tardest
|
||||
if ! test -f "$tardest"; then
|
||||
echo "DF failed to download: $tardest not found"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf df_linux
|
||||
mkdir df_linux
|
||||
|
||||
echo Extracting
|
||||
tar xf "$tardest" --strip-components=1 -C df_linux
|
||||
echo Done
|
||||
|
||||
echo "$selfmd5" > receipt
|
||||
ls
|
@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
cd "$(dirname "$0")"
|
||||
cd ..
|
||||
grep -i 'set(DF_VERSION' CMakeLists.txt | perl -ne 'print "$&\n" if /[\d\.]+/'
|
@ -0,0 +1,32 @@
|
||||
# Files that lint.py should check
|
||||
|
||||
*.bash
|
||||
*.bat
|
||||
*.c
|
||||
*.cc
|
||||
*.cmake
|
||||
*.cpp
|
||||
*.css
|
||||
*.gitignore
|
||||
*.h
|
||||
*.hh
|
||||
*.hpp
|
||||
*.in
|
||||
*.inc
|
||||
*.init
|
||||
*.init-example
|
||||
*.js
|
||||
*.lua
|
||||
*.manifest
|
||||
*.md
|
||||
*.mm
|
||||
*.pl
|
||||
*.proto
|
||||
*.py
|
||||
*.rb
|
||||
*.rst
|
||||
*.sh
|
||||
*.txt
|
||||
*.vbs
|
||||
*.yaml
|
||||
*.yml
|
@ -0,0 +1,22 @@
|
||||
# Files that lint.py should ignore
|
||||
|
||||
.git/*
|
||||
|
||||
# Old files exempt from checks for now
|
||||
plugins/isoworld/*.txt
|
||||
plugins/raw/*.txt
|
||||
plugins/stonesense/*.txt
|
||||
|
||||
# Generated files
|
||||
*.pb.h
|
||||
build*/*
|
||||
docs/_*
|
||||
docs/html/*
|
||||
docs/pdf/*
|
||||
library/include/df/*
|
||||
|
||||
# Dependencies that we don't control
|
||||
depends/*
|
||||
plugins/isoworld/agui/*
|
||||
plugins/isoworld/allegro/*
|
||||
plugins/stonesense/allegro/*
|
@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import fnmatch
|
||||
import re
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
DFHACK_ROOT = os.path.normpath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
def load_pattern_files(paths):
|
||||
patterns = []
|
||||
for p in paths:
|
||||
with open(p) as f:
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
patterns.append(line)
|
||||
return patterns
|
||||
|
||||
def valid_file(rel_path, check_patterns, ignore_patterns):
|
||||
return (
|
||||
any(fnmatch.fnmatch(rel_path, pattern) for pattern in check_patterns)
|
||||
and not any(fnmatch.fnmatch(rel_path, pattern) for pattern in ignore_patterns)
|
||||
)
|
||||
|
||||
success = True
|
||||
def error(msg=None):
|
||||
global success
|
||||
success = False
|
||||
if msg:
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
def format_lines(lines, total):
|
||||
if len(lines) == total - 1:
|
||||
return 'entire file'
|
||||
if not len(lines):
|
||||
# should never happen
|
||||
return 'nowhere'
|
||||
if len(lines) == 1:
|
||||
return 'line %i' % lines[0]
|
||||
s = 'lines '
|
||||
range_start = range_end = lines[0]
|
||||
for i, line in enumerate(lines):
|
||||
if line > range_end + 1:
|
||||
if range_start == range_end:
|
||||
s += ('%i, ' % range_end)
|
||||
else:
|
||||
s += ('%i-%i, ' % (range_start, range_end))
|
||||
range_start = range_end = line
|
||||
if i == len(lines) - 1:
|
||||
s += ('%i' % line)
|
||||
else:
|
||||
range_end = line
|
||||
if i == len(lines) - 1:
|
||||
s += ('%i-%i, ' % (range_start, range_end))
|
||||
return s.rstrip(' ').rstrip(',')
|
||||
|
||||
class LinterError(Exception):
|
||||
def __init__(self, message, lines, total_lines):
|
||||
self.message = message
|
||||
self.lines = lines
|
||||
self.total_lines = total_lines
|
||||
|
||||
def __str__(self):
|
||||
return '%s: %s' % (self.message, format_lines(self.lines, self.total_lines))
|
||||
|
||||
def github_actions_workflow_command(self, filename):
|
||||
first_line = self.lines[0] if self.lines else 1
|
||||
return '::error file=%s,line=%i::%s' % (filename, first_line, self)
|
||||
|
||||
class Linter(object):
|
||||
ignore = False
|
||||
def check(self, lines):
|
||||
failures = []
|
||||
for i, line in enumerate(lines):
|
||||
if not self.check_line(line):
|
||||
failures.append(i + 1)
|
||||
if len(failures):
|
||||
raise LinterError(self.msg, failures, len(lines))
|
||||
|
||||
def fix(self, lines):
|
||||
for i in range(len(lines)):
|
||||
lines[i] = self.fix_line(lines[i])
|
||||
|
||||
|
||||
class NewlineLinter(Linter):
|
||||
msg = 'Contains DOS-style newlines'
|
||||
# git supports newline conversion. Catch in CI, ignore on Windows.
|
||||
ignore = os.linesep != '\n' and not os.environ.get('CI')
|
||||
def check_line(self, line):
|
||||
return '\r' not in line
|
||||
def fix_line(self, line):
|
||||
return line.replace('\r', '')
|
||||
|
||||
class TrailingWhitespaceLinter(Linter):
|
||||
msg = 'Contains trailing whitespace'
|
||||
def check_line(self, line):
|
||||
line = line.replace('\r', '').replace('\n', '')
|
||||
return not line.strip() or line == line.rstrip('\t ')
|
||||
def fix_line(self, line):
|
||||
return line.rstrip('\t ')
|
||||
|
||||
class TabLinter(Linter):
|
||||
msg = 'Contains tabs'
|
||||
def check_line(self, line):
|
||||
return '\t' not in line
|
||||
def fix_line(self, line):
|
||||
return line.replace('\t', ' ')
|
||||
|
||||
linters = [cls() for cls in Linter.__subclasses__() if not cls.ignore]
|
||||
|
||||
def walk_all(root_path):
|
||||
for cur, dirnames, filenames in os.walk(root_path):
|
||||
for filename in filenames:
|
||||
full_path = os.path.join(cur, filename)
|
||||
yield full_path
|
||||
|
||||
def walk_git_files(root_path):
|
||||
p = subprocess.Popen(['git', '-C', root_path, 'ls-files', root_path], stdout=subprocess.PIPE)
|
||||
for line in p.stdout.readlines():
|
||||
path = line.decode('utf-8').strip()
|
||||
full_path = os.path.join(root_path, path)
|
||||
yield full_path
|
||||
if p.wait() != 0:
|
||||
raise RuntimeError('git exited with %r' % p.returncode)
|
||||
|
||||
def main(args):
|
||||
root_path = os.path.abspath(args.path)
|
||||
if not os.path.exists(args.path):
|
||||
print('Nonexistent path: %s' % root_path)
|
||||
sys.exit(2)
|
||||
|
||||
check_patterns = load_pattern_files(args.check_patterns)
|
||||
ignore_patterns = load_pattern_files(args.ignore_patterns)
|
||||
|
||||
walk_iter = walk_all
|
||||
if args.git_only:
|
||||
walk_iter = walk_git_files
|
||||
|
||||
for full_path in walk_iter(root_path):
|
||||
rel_path = full_path.replace(root_path, '').replace('\\', '/').lstrip('/')
|
||||
if not valid_file(rel_path, check_patterns, ignore_patterns):
|
||||
continue
|
||||
if args.verbose:
|
||||
print('Checking:', rel_path)
|
||||
lines = []
|
||||
with open(full_path, 'rb') as f:
|
||||
lines = f.read().split(b'\n')
|
||||
for i, line in enumerate(lines):
|
||||
try:
|
||||
lines[i] = line.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
msg_params = (rel_path, i + 1, 'Invalid UTF-8 (other errors will be ignored)')
|
||||
error('%s:%i: %s' % msg_params)
|
||||
if args.github_actions:
|
||||
print('::error file=%s,line=%i::%s' % msg_params)
|
||||
lines[i] = ''
|
||||
for linter in linters:
|
||||
try:
|
||||
linter.check(lines)
|
||||
except LinterError as e:
|
||||
error('%s: %s' % (rel_path, e))
|
||||
if args.github_actions:
|
||||
print(e.github_actions_workflow_command(rel_path))
|
||||
if args.fix:
|
||||
linter.fix(lines)
|
||||
contents = '\n'.join(lines)
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(contents.encode('utf-8'))
|
||||
|
||||
if success:
|
||||
print('All linters completed successfully')
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('path', nargs='?', default='.',
|
||||
help='Path to scan (default: current directory)')
|
||||
parser.add_argument('--fix', action='store_true',
|
||||
help='Attempt to modify files in-place to fix identified issues')
|
||||
parser.add_argument('--git-only', action='store_true',
|
||||
help='Only check files tracked by git')
|
||||
parser.add_argument('--github-actions', action='store_true',
|
||||
help='Enable GitHub Actions workflow command output')
|
||||
parser.add_argument('-v', '--verbose', action='store_true',
|
||||
help='Log files as they are checked')
|
||||
parser.add_argument('--check-patterns', action='append',
|
||||
default=[os.path.join(DFHACK_ROOT, 'ci', 'lint-check.txt')],
|
||||
help='File(s) containing filename patterns to check')
|
||||
parser.add_argument('--ignore-patterns', action='append',
|
||||
default=[os.path.join(DFHACK_ROOT, 'ci', 'lint-ignore.txt')],
|
||||
help='File(s) containing filename patterns to ignore')
|
||||
args = parser.parse_args()
|
||||
main(args)
|
@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import enum
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('df_folder', help='DF base folder')
|
||||
parser.add_argument('--headless', action='store_true',
|
||||
help='Run without opening DF window (requires non-Windows)')
|
||||
parser.add_argument('--keep-status', action='store_true',
|
||||
help='Do not delete final status file')
|
||||
parser.add_argument('--no-quit', action='store_true',
|
||||
help='Do not quit DF when done')
|
||||
parser.add_argument('--test-dir', '--test-folder',
|
||||
help='Base test folder (default: df_folder/test)')
|
||||
parser.add_argument('-t', '--test', dest='tests', nargs='+',
|
||||
help='Test(s) to run (Lua patterns accepted)')
|
||||
args = parser.parse_args()
|
||||
|
||||
if (not sys.stdin.isatty() or not sys.stdout.isatty() or not sys.stderr.isatty()) and not args.headless:
|
||||
print('WARN: no TTY detected, enabling headless mode')
|
||||
args.headless = True
|
||||
|
||||
if args.test_dir is not None:
|
||||
args.test_dir = os.path.normpath(os.path.join(os.getcwd(), args.test_dir))
|
||||
if not os.path.isdir(args.test_dir):
|
||||
print('ERROR: invalid test folder: %r' % args.test_dir)
|
||||
|
||||
MAX_TRIES = 5
|
||||
|
||||
dfhack = 'Dwarf Fortress.exe' if sys.platform == 'win32' else './dfhack'
|
||||
test_status_file = 'test_status.json'
|
||||
|
||||
class TestStatus(enum.Enum):
|
||||
PENDING = 'pending'
|
||||
PASSED = 'passed'
|
||||
FAILED = 'failed'
|
||||
|
||||
def get_test_status():
|
||||
if os.path.isfile(test_status_file):
|
||||
with open(test_status_file) as f:
|
||||
return {k: TestStatus(v) for k, v in json.load(f).items()}
|
||||
|
||||
def change_setting(content, setting, value):
|
||||
return '[' + setting + ':' + value + ']\n' + re.sub(
|
||||
r'\[' + setting + r':.+?\]', '(overridden)', content, flags=re.IGNORECASE)
|
||||
|
||||
os.chdir(args.df_folder)
|
||||
if os.path.exists(test_status_file):
|
||||
os.remove(test_status_file)
|
||||
|
||||
print('Backing up init.txt to init.txt.orig')
|
||||
init_txt_path = 'data/init/init.txt'
|
||||
shutil.copyfile(init_txt_path, init_txt_path + '.orig')
|
||||
with open(init_txt_path) as f:
|
||||
init_contents = f.read()
|
||||
init_contents = change_setting(init_contents, 'INTRO', 'NO')
|
||||
init_contents = change_setting(init_contents, 'SOUND', 'NO')
|
||||
init_contents = change_setting(init_contents, 'WINDOWED', 'YES')
|
||||
init_contents = change_setting(init_contents, 'WINDOWEDX', '80')
|
||||
init_contents = change_setting(init_contents, 'WINDOWEDY', '25')
|
||||
init_contents = change_setting(init_contents, 'FPS', 'YES')
|
||||
if args.headless:
|
||||
init_contents = change_setting(init_contents, 'PRINT_MODE', 'TEXT')
|
||||
|
||||
test_init_file = 'dfhackzzz_test.init' # Core sorts these alphabetically
|
||||
with open(test_init_file, 'w') as f:
|
||||
f.write('''
|
||||
devel/dump-rpc dfhack-rpc.txt
|
||||
:lua dfhack.internal.addScriptPath(dfhack.getHackPath())
|
||||
test --resume --modes=none,title "lua scr.breakdown_level=df.interface_breakdown_types.%s"
|
||||
''' % ('NONE' if args.no_quit else 'QUIT'))
|
||||
|
||||
test_config_file = 'test_config.json'
|
||||
with open(test_config_file, 'w') as f:
|
||||
json.dump({
|
||||
'test_dir': args.test_dir,
|
||||
'tests': args.tests,
|
||||
}, f)
|
||||
|
||||
try:
|
||||
with open(init_txt_path, 'w') as f:
|
||||
f.write(init_contents)
|
||||
|
||||
tries = 0
|
||||
while True:
|
||||
status = get_test_status()
|
||||
if status is not None:
|
||||
if all(s != TestStatus.PENDING for s in status.values()):
|
||||
print('Done!')
|
||||
sys.exit(int(any(s != TestStatus.PASSED for s in status.values())))
|
||||
elif tries > 0:
|
||||
print('ERROR: Could not read status file')
|
||||
sys.exit(2)
|
||||
|
||||
tries += 1
|
||||
print('Starting DF: #%i' % (tries))
|
||||
if tries > MAX_TRIES:
|
||||
print('ERROR: Too many tries - aborting')
|
||||
sys.exit(1)
|
||||
|
||||
if args.headless:
|
||||
os.environ['DFHACK_HEADLESS'] = '1'
|
||||
os.environ['DFHACK_DISABLE_CONSOLE'] = '1'
|
||||
|
||||
process = subprocess.Popen([dfhack],
|
||||
stdin=subprocess.PIPE if args.headless else sys.stdin,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
_, err = process.communicate()
|
||||
if err:
|
||||
print('WARN: DF produced stderr: ' + repr(err[:5000]))
|
||||
if process.returncode != 0:
|
||||
print('ERROR: DF exited with ' + repr(process.returncode))
|
||||
finally:
|
||||
print('\nRestoring original init.txt')
|
||||
shutil.copyfile(init_txt_path + '.orig', init_txt_path)
|
||||
if os.path.isfile(test_init_file):
|
||||
os.remove(test_init_file)
|
||||
if not args.keep_status and os.path.isfile(test_status_file):
|
||||
os.remove(test_status_file)
|
||||
print('Cleanup done')
|
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
from os.path import basename, dirname, join, splitext
|
||||
import sys
|
||||
|
||||
SCRIPT_PATH = sys.argv[1] if len(sys.argv) > 1 else 'scripts'
|
||||
IS_GITHUB_ACTIONS = bool(os.environ.get('GITHUB_ACTIONS'))
|
||||
|
||||
def expected_cmd(path):
|
||||
"""Get the command from the name of a script."""
|
||||
dname, fname = basename(dirname(path)), splitext(basename(path))[0]
|
||||
if dname in ('devel', 'fix', 'gui', 'modtools'):
|
||||
return dname + '/' + fname
|
||||
return fname
|
||||
|
||||
|
||||
def check_ls(fname, line):
|
||||
"""Check length & existence of leading comment for "ls" builtin command."""
|
||||
line = line.strip()
|
||||
comment = '--' if fname.endswith('.lua') else '#'
|
||||
if '[====[' in line or not line.startswith(comment):
|
||||
print_error('missing leading comment (requred for `ls`)', fname)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def print_error(message, filename, line=None):
|
||||
if not isinstance(line, int):
|
||||
line = 1
|
||||
print('Error: %s:%i: %s' % (filename, line, message))
|
||||
if IS_GITHUB_ACTIONS:
|
||||
print('::error file=%s,line=%i::%s' % (filename, line, message))
|
||||
|
||||
|
||||
def check_file(fname):
|
||||
errors, doclines = 0, []
|
||||
tok1, tok2 = ('=begin', '=end') if fname.endswith('.rb') else \
|
||||
('[====[', ']====]')
|
||||
doc_start_line = None
|
||||
with open(fname, errors='ignore') as f:
|
||||
lines = f.readlines()
|
||||
if not lines:
|
||||
print_error('empty file', fname)
|
||||
return 1
|
||||
errors += check_ls(fname, lines[0])
|
||||
for i, l in enumerate(lines):
|
||||
if doclines or l.strip().endswith(tok1):
|
||||
if not doclines:
|
||||
doc_start_line = i + 1
|
||||
doclines.append(l.rstrip())
|
||||
if l.startswith(tok2):
|
||||
break
|
||||
else:
|
||||
if doclines:
|
||||
print_error('docs start but do not end', fname, doc_start_line)
|
||||
else:
|
||||
print_error('no documentation found', fname)
|
||||
return 1
|
||||
|
||||
if not doclines:
|
||||
print_error('missing or malformed documentation', fname)
|
||||
return 1
|
||||
|
||||
title, underline = [d for d in doclines
|
||||
if d and '=begin' not in d and '[====[' not in d][:2]
|
||||
title_line = doc_start_line + doclines.index(title)
|
||||
expected_underline = '=' * len(title)
|
||||
if underline != expected_underline:
|
||||
print_error('title/underline mismatch: expected {!r}, got {!r}'.format(
|
||||
expected_underline, underline),
|
||||
fname, title_line + 1)
|
||||
errors += 1
|
||||
if title != expected_cmd(fname):
|
||||
print_error('expected script title {!r}, got {!r}'.format(
|
||||
expected_cmd(fname), title),
|
||||
fname, title_line)
|
||||
errors += 1
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
"""Check that all DFHack scripts include documentation"""
|
||||
err = 0
|
||||
exclude = set(['internal', 'test'])
|
||||
for root, dirs, files in os.walk(SCRIPT_PATH, topdown=True):
|
||||
dirs[:] = [d for d in dirs if d not in exclude]
|
||||
for f in files:
|
||||
if f[-3:] in {'.rb', 'lua'}:
|
||||
err += check_file(join(root, f))
|
||||
return err
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(min(100, main()))
|
@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def print_stderr(stderr, args):
|
||||
if not args.github_actions:
|
||||
sys.stderr.write(stderr + '\n')
|
||||
return
|
||||
|
||||
for line in stderr.split('\n'):
|
||||
print(line)
|
||||
parts = list(map(str.strip, line.split(':')))
|
||||
# e.g. luac prints "luac:" in front of messages, so find the first part
|
||||
# containing the actual filename
|
||||
for i in range(len(parts) - 1):
|
||||
if parts[i].endswith('.' + args.ext) and parts[i + 1].isdigit():
|
||||
print('::error file=%s,line=%s::%s' % (parts[i], parts[i + 1], ':'.join(parts[i + 2:])))
|
||||
break
|
||||
|
||||
|
||||
def main(args):
|
||||
root_path = os.path.abspath(args.path)
|
||||
cmd = args.cmd.split(' ')
|
||||
if not os.path.exists(root_path):
|
||||
print('Nonexistent path: %s' % root_path)
|
||||
sys.exit(2)
|
||||
err = False
|
||||
for cur, dirnames, filenames in os.walk(root_path):
|
||||
parts = cur.replace('\\', '/').split('/')
|
||||
if '.git' in parts or 'depends' in parts:
|
||||
continue
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.' + args.ext):
|
||||
continue
|
||||
full_path = os.path.join(cur, filename)
|
||||
try:
|
||||
p = subprocess.Popen(cmd + [full_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
_, stderr = p.communicate()
|
||||
stderr = stderr.decode('utf-8', errors='ignore')
|
||||
if stderr:
|
||||
print_stderr(stderr, args)
|
||||
if p.returncode != 0:
|
||||
err = True
|
||||
except subprocess.CalledProcessError:
|
||||
err = True
|
||||
except IOError:
|
||||
if not err:
|
||||
print('Warning: cannot check %s script syntax' % args.ext)
|
||||
err = True
|
||||
sys.exit(int(err))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--path', default='.', help='Root directory')
|
||||
parser.add_argument('--ext', help='Script extension', required=True)
|
||||
parser.add_argument('--cmd', help='Command', required=True)
|
||||
parser.add_argument('--github-actions', action='store_true',
|
||||
help='Enable GitHub Actions workflow command output')
|
||||
args = parser.parse_args()
|
||||
main(args)
|
@ -1 +1 @@
|
||||
Subproject commit d81cb598e7aa179bc85440d3ba06ef6ec119815f
|
||||
Subproject commit 296ba91c2d7e3b011895df17caa032e90a27e186
|
@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse, os, sys, time
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-n', '--dry-run', action='store_true', help='Display commands without running them')
|
||||
args = parser.parse_args()
|
||||
|
||||
red = '\x1b[31m\x1b[1m'
|
||||
green = '\x1b[32m\x1b[1m'
|
||||
reset = '\x1b(B\x1b[m'
|
||||
if os.environ.get('TRAVIS', '') == 'true':
|
||||
print('This script cannot be used in a travis build')
|
||||
sys.exit(1)
|
||||
os.chdir(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
commands = []
|
||||
with open('.travis.yml') as f:
|
||||
lines = list(f.readlines())
|
||||
script_found = False
|
||||
for line in lines:
|
||||
if line.startswith('script:'):
|
||||
script_found = True
|
||||
elif script_found:
|
||||
if line.startswith('- '):
|
||||
if line.startswith('- python '):
|
||||
commands.append(line[2:].rstrip('\r\n'))
|
||||
else:
|
||||
break
|
||||
ret = 0
|
||||
for cmd in commands:
|
||||
print('$ %s' % cmd)
|
||||
if args.dry_run:
|
||||
continue
|
||||
start = time.time()
|
||||
code = os.system(cmd)
|
||||
end = time.time()
|
||||
if code != 0:
|
||||
ret = 1
|
||||
print('\n%sThe command "%s" exited with %i.%s [%.3f secs]' %
|
||||
(green if code == 0 else red, cmd, code, reset, end - start))
|
||||
|
||||
print('\nDone. Your build exited with %i.' % ret)
|
||||
sys.exit(ret)
|
@ -1,70 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
""" Overly-complicated script to check formatting/sorting in Authors.rst """
|
||||
|
||||
import os, re, sys
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
def main():
|
||||
success = [True]
|
||||
def error(line, msg, **kwargs):
|
||||
info = ''
|
||||
for k in kwargs:
|
||||
info += ' %s %s:' % (k, kwargs[k])
|
||||
print('line %i:%s %s' % (line, info, msg))
|
||||
if os.environ.get('GITHUB_ACTIONS'):
|
||||
print('::error file=docs/Authors.rst,line=%i::%s %s' % (line, info.lstrip(), msg))
|
||||
success[0] = False
|
||||
with open('docs/Authors.rst', 'rb') as f:
|
||||
lines = list(map(lambda line: line.decode('utf8').replace('\n', ''), f.readlines()))
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
if lines[1].startswith('='):
|
||||
if len(lines[0]) != len(lines[1]):
|
||||
error(2, 'Length of header does not match underline')
|
||||
if lines[1].replace('=', ''):
|
||||
error(2, 'Invalid header')
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
first_div_index = list(filter(lambda pair: pair[1].startswith('==='), enumerate(lines[2:])))[0][0] + 2
|
||||
first_div = lines[first_div_index]
|
||||
div_indices = []
|
||||
for i, line in enumerate(lines[first_div_index:]):
|
||||
line_number = i + first_div_index + 1
|
||||
if '\t' in line:
|
||||
error(line_number, 'contains tabs')
|
||||
if line.startswith('==='):
|
||||
div_indices.append(i + first_div_index)
|
||||
if not re.match(r'^=+( =+)+$', line):
|
||||
error(line_number, 'bad table divider')
|
||||
if line != lines[first_div_index]:
|
||||
error(line_number, 'malformed table divider')
|
||||
if len(div_indices) < 3:
|
||||
error(len(lines), 'missing table divider(s)')
|
||||
for i in div_indices[3:]:
|
||||
error(i + 1, 'extra table divider')
|
||||
|
||||
col_ranges = []
|
||||
i = 0
|
||||
while True:
|
||||
j = first_div.find(' ', i)
|
||||
col_ranges.append(slice(i, j if j > 0 else None))
|
||||
if j == -1:
|
||||
break
|
||||
i = j + 1
|
||||
|
||||
for i, line in enumerate(lines[div_indices[1] + 1:div_indices[2]]):
|
||||
line_number = i + div_indices[1] + 2
|
||||
for c, col in enumerate(col_ranges):
|
||||
cell = line[col]
|
||||
if cell.startswith(' '):
|
||||
error(line_number, 'text does not start in correct location', column=c+1)
|
||||
# check for text extending into next column if this isn't the last column
|
||||
if col.stop is not None and col.stop < len(line) and line[col.stop] != ' ':
|
||||
error(line_number, 'text extends into next column', column=c+1)
|
||||
if i > 0:
|
||||
prev_line = lines[div_indices[1] + i]
|
||||
if line.lower()[col_ranges[0]] < prev_line.lower()[col_ranges[0]]:
|
||||
error(line_number, 'not sorted: should come before line %i ("%s" before "%s")' %
|
||||
(line_number - 1, line[col_ranges[0]].rstrip(' '), prev_line[col_ranges[0]].rstrip(' ')))
|
||||
|
||||
return success[0]
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(int(not main()))
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
@ -1,110 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import sys
|
||||
|
||||
actual = {'': {}}
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
plugin_name = ''
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// Plugin: '):
|
||||
plugin_name = line.split(' ')[2]
|
||||
if plugin_name not in actual:
|
||||
actual[plugin_name] = {}
|
||||
elif line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
actual[plugin_name][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
expected = {'': {}}
|
||||
|
||||
for p in glob.iglob('library/proto/*.proto'):
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
expected[''][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
for p in glob.iglob('plugins/proto/*.proto'):
|
||||
plugin_name = ''
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// Plugin: '):
|
||||
plugin_name = line.split(' ')[2]
|
||||
if plugin_name not in expected:
|
||||
expected[plugin_name] = {}
|
||||
break
|
||||
|
||||
if plugin_name == '':
|
||||
continue
|
||||
|
||||
with open(p) as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith('// RPC '):
|
||||
parts = line.split(' ')
|
||||
expected[plugin_name][parts[2]] = (parts[4], parts[6])
|
||||
|
||||
error_count = 0
|
||||
|
||||
for plugin_name in actual:
|
||||
methods = actual[plugin_name]
|
||||
|
||||
if plugin_name not in expected:
|
||||
print('Missing documentation for plugin proto files: ' + plugin_name)
|
||||
print('Add the following lines:')
|
||||
print('// Plugin: ' + plugin_name)
|
||||
error_count += 1
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
print('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
error_count += 1
|
||||
else:
|
||||
missing = []
|
||||
wrong = []
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
if m in expected[plugin_name]:
|
||||
if expected[plugin_name][m] != io:
|
||||
wrong.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
else:
|
||||
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
|
||||
if len(missing) > 0:
|
||||
print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:')
|
||||
for m in missing:
|
||||
print(m)
|
||||
error_count += 1
|
||||
|
||||
if len(wrong) > 0:
|
||||
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:')
|
||||
for m in wrong:
|
||||
print(m)
|
||||
error_count += 1
|
||||
|
||||
for plugin_name in expected:
|
||||
methods = expected[plugin_name]
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
if plugin_name not in actual:
|
||||
print('Incorrect documentation for plugin proto files: ' + plugin_name)
|
||||
print('The following methods are documented, but the plugin does not provide any RPC methods:')
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
print('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
error_count += 1
|
||||
else:
|
||||
missing = []
|
||||
for m in methods:
|
||||
io = methods[m]
|
||||
if m not in actual[plugin_name]:
|
||||
missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1])
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
if len(missing) > 0:
|
||||
print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:')
|
||||
for m in missing:
|
||||
print(m)
|
||||
error_count += 1
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
sys.exit(min(100, error_count))
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
@ -1,56 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
script_name="$(basename "$0")"
|
||||
new_script_path="$(dirname "$0")/../ci/${script_name}"
|
||||
|
||||
tardest="df.tar.bz2"
|
||||
printf >&2 "\nNote: travis/%s is deprecated. Use ci/%s instead.\n\n" "${script_name}" "${script_name}"
|
||||
|
||||
selfmd5=$(openssl md5 < "$0")
|
||||
echo $selfmd5
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
echo "DF_VERSION: $DF_VERSION"
|
||||
echo "DF_FOLDER: $DF_FOLDER"
|
||||
mkdir -p "$DF_FOLDER"
|
||||
# back out of df_linux
|
||||
cd "$DF_FOLDER/.."
|
||||
|
||||
if [ -f receipt ]; then
|
||||
if [ "$selfmd5" != "$(cat receipt)" ]; then
|
||||
echo "download-df.sh changed; removing DF"
|
||||
rm receipt
|
||||
else
|
||||
echo "Already downloaded $DF_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f receipt ]; then
|
||||
rm -f "$tardest"
|
||||
minor=$(echo "$DF_VERSION" | cut -d. -f2)
|
||||
patch=$(echo "$DF_VERSION" | cut -d. -f3)
|
||||
url="http://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2"
|
||||
echo Downloading
|
||||
while read url; do
|
||||
echo "Attempting download: ${url}"
|
||||
if wget -v "$url" -O "$tardest"; then
|
||||
break
|
||||
fi
|
||||
done <<URLS
|
||||
https://www.bay12games.com/dwarves/df_${minor}_${patch}_linux.tar.bz2
|
||||
https://files.dfhack.org/DF/0.${minor}.${patch}/df_${minor}_${patch}_linux.tar.bz2
|
||||
URLS
|
||||
echo $tardest
|
||||
if ! test -f "$tardest"; then
|
||||
echo "DF failed to download: $tardest not found"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf df_linux
|
||||
mkdir df_linux
|
||||
|
||||
echo Extracting
|
||||
tar xf "$tardest" --strip-components=1 -C df_linux
|
||||
echo Done
|
||||
|
||||
echo "$selfmd5" > receipt
|
||||
ls
|
||||
"${new_script_path}" "$@"
|
||||
exit $?
|
||||
|
@ -1,4 +1,9 @@
|
||||
#!/bin/sh
|
||||
cd "$(dirname "$0")"
|
||||
cd ..
|
||||
grep -i 'set(DF_VERSION' CMakeLists.txt | perl -ne 'print "$&\n" if /[\d\.]+/'
|
||||
|
||||
script_name="$(basename "$0")"
|
||||
new_script_path="$(dirname "$0")/../ci/${script_name}"
|
||||
|
||||
printf >&2 "\nNote: travis/%s is deprecated. Use ci/%s instead.\n\n" "${script_name}" "${script_name}"
|
||||
|
||||
"${new_script_path}" "$@"
|
||||
exit $?
|
||||
|
@ -1,155 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import re, os, sys
|
||||
|
||||
valid_extensions = ['c', 'cpp', 'h', 'hpp', 'mm', 'lua', 'rb', 'proto',
|
||||
'init', 'init-example', 'rst']
|
||||
path_blacklist = [
|
||||
'^library/include/df/',
|
||||
'^plugins/stonesense/allegro',
|
||||
'^plugins/isoworld/allegro',
|
||||
'^plugins/isoworld/agui',
|
||||
'^depends/',
|
||||
'^.git/',
|
||||
'^build',
|
||||
'.pb.h',
|
||||
]
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
def valid_file(filename):
|
||||
return len(list(filter(lambda ext: filename.endswith('.' + ext), valid_extensions))) and \
|
||||
not len(list(filter(lambda path: path.replace('\\', '/') in filename.replace('\\', '/'), path_blacklist)))
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
success = True
|
||||
def error(msg=None):
|
||||
global success
|
||||
success = False
|
||||
if msg:
|
||||
sys.stderr.write(msg + '\n')
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
def format_lines(lines, total):
|
||||
if len(lines) == total - 1:
|
||||
return 'entire file'
|
||||
if not len(lines):
|
||||
# should never happen
|
||||
return 'nowhere'
|
||||
if len(lines) == 1:
|
||||
return 'line %i' % lines[0]
|
||||
s = 'lines '
|
||||
range_start = range_end = lines[0]
|
||||
for i, line in enumerate(lines):
|
||||
if line > range_end + 1:
|
||||
if range_start == range_end:
|
||||
s += ('%i, ' % range_end)
|
||||
else:
|
||||
s += ('%i-%i, ' % (range_start, range_end))
|
||||
range_start = range_end = line
|
||||
if i == len(lines) - 1:
|
||||
s += ('%i' % line)
|
||||
else:
|
||||
range_end = line
|
||||
if i == len(lines) - 1:
|
||||
s += ('%i-%i, ' % (range_start, range_end))
|
||||
return s.rstrip(' ').rstrip(',')
|
||||
|
||||
class LinterError(Exception):
|
||||
def __init__(self, message, lines, total_lines):
|
||||
self.message = message
|
||||
self.lines = lines
|
||||
self.total_lines = total_lines
|
||||
|
||||
def __str__(self):
|
||||
return '%s: %s' % (self.message, format_lines(self.lines, self.total_lines))
|
||||
|
||||
def github_actions_workflow_command(self, filename):
|
||||
first_line = self.lines[0] if self.lines else 1
|
||||
return '::error file=%s,line=%i::%s' % (filename, first_line, self)
|
||||
|
||||
class Linter(object):
|
||||
ignore = False
|
||||
def check(self, lines):
|
||||
failures = []
|
||||
for i, line in enumerate(lines):
|
||||
if not self.check_line(line):
|
||||
failures.append(i + 1)
|
||||
if len(failures):
|
||||
raise LinterError(self.msg, failures, len(lines))
|
||||
|
||||
def fix(self, lines):
|
||||
for i in range(len(lines)):
|
||||
lines[i] = self.fix_line(lines[i])
|
||||
|
||||
|
||||
class NewlineLinter(Linter):
|
||||
msg = 'Contains DOS-style newlines'
|
||||
# git supports newline conversion. Catch in CI, ignore on Windows.
|
||||
ignore = os.linesep != '\n' and not os.environ.get('TRAVIS')
|
||||
def check_line(self, line):
|
||||
return '\r' not in line
|
||||
def fix_line(self, line):
|
||||
return line.replace('\r', '')
|
||||
|
||||
class TrailingWhitespaceLinter(Linter):
|
||||
msg = 'Contains trailing whitespace'
|
||||
def check_line(self, line):
|
||||
line = line.replace('\r', '').replace('\n', '')
|
||||
return not line.strip() or line == line.rstrip('\t ')
|
||||
def fix_line(self, line):
|
||||
return line.rstrip('\t ')
|
||||
|
||||
class TabLinter(Linter):
|
||||
msg = 'Contains tabs'
|
||||
def check_line(self, line):
|
||||
return '\t' not in line
|
||||
def fix_line(self, line):
|
||||
return line.replace('\t', ' ')
|
||||
|
||||
linters = [cls() for cls in Linter.__subclasses__() if not cls.ignore]
|
||||
|
||||
def main():
|
||||
is_github_actions = bool(os.environ.get('GITHUB_ACTIONS'))
|
||||
root_path = os.path.abspath(sys.argv[1] if len(sys.argv) > 1 else '.')
|
||||
if not os.path.exists(root_path):
|
||||
print('Nonexistent path: %s' % root_path)
|
||||
sys.exit(2)
|
||||
fix = (len(sys.argv) > 2 and sys.argv[2] == '--fix')
|
||||
global path_blacklist
|
||||
path_blacklist = list(map(lambda s: os.path.join(root_path, s.replace('^', '')) if s.startswith('^') else s, path_blacklist))
|
||||
|
||||
for cur, dirnames, filenames in os.walk(root_path):
|
||||
for filename in filenames:
|
||||
full_path = os.path.join(cur, filename)
|
||||
rel_path = full_path.replace(root_path, '.')
|
||||
if not valid_file(full_path):
|
||||
continue
|
||||
lines = []
|
||||
with open(full_path, 'rb') as f:
|
||||
lines = f.read().split(b'\n')
|
||||
for i, line in enumerate(lines):
|
||||
try:
|
||||
lines[i] = line.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
msg_params = (rel_path, i + 1, 'Invalid UTF-8 (other errors will be ignored)')
|
||||
error('%s:%i: %s' % msg_params)
|
||||
if is_github_actions:
|
||||
print('::error file=%s,line=%i::%s' % msg_params)
|
||||
lines[i] = ''
|
||||
for linter in linters:
|
||||
try:
|
||||
linter.check(lines)
|
||||
except LinterError as e:
|
||||
error('%s: %s' % (rel_path, e))
|
||||
if is_github_actions:
|
||||
print(e.github_actions_workflow_command(rel_path))
|
||||
if fix:
|
||||
linter.fix(lines)
|
||||
contents = '\n'.join(lines)
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(contents)
|
||||
|
||||
if success:
|
||||
print('All linters completed successfully')
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
@ -1,127 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import enum
|
||||
import json
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('df_folder', help='DF base folder')
|
||||
parser.add_argument('--headless', action='store_true',
|
||||
help='Run without opening DF window (requires non-Windows)')
|
||||
parser.add_argument('--keep-status', action='store_true',
|
||||
help='Do not delete final status file')
|
||||
parser.add_argument('--no-quit', action='store_true',
|
||||
help='Do not quit DF when done')
|
||||
parser.add_argument('--test-dir', '--test-folder',
|
||||
help='Base test folder (default: df_folder/test)')
|
||||
parser.add_argument('-t', '--test', dest='tests', nargs='+',
|
||||
help='Test(s) to run (Lua patterns accepted)')
|
||||
args = parser.parse_args()
|
||||
|
||||
if (not sys.stdin.isatty() or not sys.stdout.isatty() or not sys.stderr.isatty()) and not args.headless:
|
||||
print('WARN: no TTY detected, enabling headless mode')
|
||||
args.headless = True
|
||||
|
||||
if args.test_dir is not None:
|
||||
args.test_dir = os.path.normpath(os.path.join(os.getcwd(), args.test_dir))
|
||||
if not os.path.isdir(args.test_dir):
|
||||
print('ERROR: invalid test folder: %r' % args.test_dir)
|
||||
|
||||
MAX_TRIES = 5
|
||||
|
||||
dfhack = 'Dwarf Fortress.exe' if sys.platform == 'win32' else './dfhack'
|
||||
test_status_file = 'test_status.json'
|
||||
|
||||
class TestStatus(enum.Enum):
|
||||
PENDING = 'pending'
|
||||
PASSED = 'passed'
|
||||
FAILED = 'failed'
|
||||
|
||||
def get_test_status():
|
||||
if os.path.isfile(test_status_file):
|
||||
with open(test_status_file) as f:
|
||||
return {k: TestStatus(v) for k, v in json.load(f).items()}
|
||||
|
||||
def change_setting(content, setting, value):
|
||||
return '[' + setting + ':' + value + ']\n' + re.sub(
|
||||
r'\[' + setting + r':.+?\]', '(overridden)', content, flags=re.IGNORECASE)
|
||||
|
||||
os.chdir(args.df_folder)
|
||||
if os.path.exists(test_status_file):
|
||||
os.remove(test_status_file)
|
||||
|
||||
print('Backing up init.txt to init.txt.orig')
|
||||
init_txt_path = 'data/init/init.txt'
|
||||
shutil.copyfile(init_txt_path, init_txt_path + '.orig')
|
||||
with open(init_txt_path) as f:
|
||||
init_contents = f.read()
|
||||
init_contents = change_setting(init_contents, 'INTRO', 'NO')
|
||||
init_contents = change_setting(init_contents, 'SOUND', 'NO')
|
||||
init_contents = change_setting(init_contents, 'WINDOWED', 'YES')
|
||||
init_contents = change_setting(init_contents, 'WINDOWEDX', '80')
|
||||
init_contents = change_setting(init_contents, 'WINDOWEDY', '25')
|
||||
init_contents = change_setting(init_contents, 'FPS', 'YES')
|
||||
if args.headless:
|
||||
init_contents = change_setting(init_contents, 'PRINT_MODE', 'TEXT')
|
||||
|
||||
test_init_file = 'dfhackzzz_test.init' # Core sorts these alphabetically
|
||||
with open(test_init_file, 'w') as f:
|
||||
f.write('''
|
||||
devel/dump-rpc dfhack-rpc.txt
|
||||
:lua dfhack.internal.addScriptPath(dfhack.getHackPath())
|
||||
test --resume --modes=none,title "lua scr.breakdown_level=df.interface_breakdown_types.%s"
|
||||
''' % ('NONE' if args.no_quit else 'QUIT'))
|
||||
|
||||
test_config_file = 'test_config.json'
|
||||
with open(test_config_file, 'w') as f:
|
||||
json.dump({
|
||||
'test_dir': args.test_dir,
|
||||
'tests': args.tests,
|
||||
}, f)
|
||||
|
||||
try:
|
||||
with open(init_txt_path, 'w') as f:
|
||||
f.write(init_contents)
|
||||
|
||||
tries = 0
|
||||
while True:
|
||||
status = get_test_status()
|
||||
if status is not None:
|
||||
if all(s != TestStatus.PENDING for s in status.values()):
|
||||
print('Done!')
|
||||
sys.exit(int(any(s != TestStatus.PASSED for s in status.values())))
|
||||
elif tries > 0:
|
||||
print('ERROR: Could not read status file')
|
||||
sys.exit(2)
|
||||
|
||||
tries += 1
|
||||
print('Starting DF: #%i' % (tries))
|
||||
if tries > MAX_TRIES:
|
||||
print('ERROR: Too many tries - aborting')
|
||||
sys.exit(1)
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
if args.headless:
|
||||
os.environ['DFHACK_HEADLESS'] = '1'
|
||||
os.environ['DFHACK_DISABLE_CONSOLE'] = '1'
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
process = subprocess.Popen([dfhack],
|
||||
stdin=subprocess.PIPE if args.headless else sys.stdin,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
_, err = process.communicate()
|
||||
if err:
|
||||
print('WARN: DF produced stderr: ' + repr(err[:5000]))
|
||||
if process.returncode != 0:
|
||||
print('ERROR: DF exited with ' + repr(process.returncode))
|
||||
finally:
|
||||
print('\nRestoring original init.txt')
|
||||
shutil.copyfile(init_txt_path + '.orig', init_txt_path)
|
||||
if os.path.isfile(test_init_file):
|
||||
os.remove(test_init_file)
|
||||
if not args.keep_status and os.path.isfile(test_status_file):
|
||||
os.remove(test_status_file)
|
||||
print('Cleanup done')
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
@ -1,94 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
from os.path import basename, dirname, join, splitext
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
SCRIPT_PATH = sys.argv[1] if len(sys.argv) > 1 else 'scripts'
|
||||
IS_GITHUB_ACTIONS = bool(os.environ.get('GITHUB_ACTIONS'))
|
||||
|
||||
def expected_cmd(path):
|
||||
"""Get the command from the name of a script."""
|
||||
dname, fname = basename(dirname(path)), splitext(basename(path))[0]
|
||||
if dname in ('devel', 'fix', 'gui', 'modtools'):
|
||||
return dname + '/' + fname
|
||||
return fname
|
||||
|
||||
|
||||
def check_ls(fname, line):
|
||||
"""Check length & existence of leading comment for "ls" builtin command."""
|
||||
line = line.strip()
|
||||
comment = '--' if fname.endswith('.lua') else '#'
|
||||
if '[====[' in line or not line.startswith(comment):
|
||||
print_error('missing leading comment (requred for `ls`)', fname)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def print_error(message, filename, line=None):
|
||||
if not isinstance(line, int):
|
||||
line = 1
|
||||
print('Error: %s:%i: %s' % (filename, line, message))
|
||||
if IS_GITHUB_ACTIONS:
|
||||
print('::error file=%s,line=%i::%s' % (filename, line, message))
|
||||
|
||||
|
||||
def check_file(fname):
|
||||
errors, doclines = 0, []
|
||||
tok1, tok2 = ('=begin', '=end') if fname.endswith('.rb') else \
|
||||
('[====[', ']====]')
|
||||
doc_start_line = None
|
||||
with open(fname, errors='ignore') as f:
|
||||
lines = f.readlines()
|
||||
if not lines:
|
||||
print_error('empty file', fname)
|
||||
return 1
|
||||
errors += check_ls(fname, lines[0])
|
||||
for i, l in enumerate(lines):
|
||||
if doclines or l.strip().endswith(tok1):
|
||||
if not doclines:
|
||||
doc_start_line = i + 1
|
||||
doclines.append(l.rstrip())
|
||||
if l.startswith(tok2):
|
||||
break
|
||||
else:
|
||||
if doclines:
|
||||
print_error('docs start but do not end', fname, doc_start_line)
|
||||
else:
|
||||
print_error('no documentation found', fname)
|
||||
return 1
|
||||
|
||||
if not doclines:
|
||||
print_error('missing or malformed documentation', fname)
|
||||
return 1
|
||||
|
||||
title, underline = [d for d in doclines
|
||||
if d and '=begin' not in d and '[====[' not in d][:2]
|
||||
title_line = doc_start_line + doclines.index(title)
|
||||
expected_underline = '=' * len(title)
|
||||
if underline != expected_underline:
|
||||
print_error('title/underline mismatch: expected {!r}, got {!r}'.format(
|
||||
expected_underline, underline),
|
||||
fname, title_line + 1)
|
||||
errors += 1
|
||||
if title != expected_cmd(fname):
|
||||
print_error('expected script title {!r}, got {!r}'.format(
|
||||
expected_cmd(fname), title),
|
||||
fname, title_line)
|
||||
errors += 1
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
"""Check that all DFHack scripts include documentation"""
|
||||
err = 0
|
||||
exclude = set(['internal', 'test'])
|
||||
for root, dirs, files in os.walk(SCRIPT_PATH, topdown=True):
|
||||
dirs[:] = [d for d in dirs if d not in exclude]
|
||||
for f in files:
|
||||
if f[-3:] in {'.rb', 'lua'}:
|
||||
err += check_file(join(root, f))
|
||||
return err
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(min(100, main()))
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
@ -1,64 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
script_name = os.path.basename(__file__)
|
||||
new_script_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'ci', script_name)
|
||||
|
||||
def print_stderr(stderr, args):
|
||||
if not args.github_actions:
|
||||
sys.stderr.write(stderr + '\n')
|
||||
return
|
||||
|
||||
for line in stderr.split('\n'):
|
||||
print(line)
|
||||
parts = list(map(str.strip, line.split(':')))
|
||||
# e.g. luac prints "luac:" in front of messages, so find the first part
|
||||
# containing the actual filename
|
||||
for i in range(len(parts) - 1):
|
||||
if parts[i].endswith('.' + args.ext) and parts[i + 1].isdigit():
|
||||
print('::error file=%s,line=%s::%s' % (parts[i], parts[i + 1], ':'.join(parts[i + 2:])))
|
||||
break
|
||||
|
||||
|
||||
def main(args):
|
||||
root_path = os.path.abspath(args.path)
|
||||
cmd = args.cmd.split(' ')
|
||||
if not os.path.exists(root_path):
|
||||
print('Nonexistent path: %s' % root_path)
|
||||
sys.exit(2)
|
||||
err = False
|
||||
for cur, dirnames, filenames in os.walk(root_path):
|
||||
parts = cur.replace('\\', '/').split('/')
|
||||
if '.git' in parts or 'depends' in parts:
|
||||
continue
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.' + args.ext):
|
||||
continue
|
||||
full_path = os.path.join(cur, filename)
|
||||
try:
|
||||
p = subprocess.Popen(cmd + [full_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
_, stderr = p.communicate()
|
||||
stderr = stderr.decode('utf-8', errors='ignore')
|
||||
if stderr:
|
||||
print_stderr(stderr, args)
|
||||
if p.returncode != 0:
|
||||
err = True
|
||||
except subprocess.CalledProcessError:
|
||||
err = True
|
||||
except IOError:
|
||||
if not err:
|
||||
print('Warning: cannot check %s script syntax' % args.ext)
|
||||
err = True
|
||||
sys.exit(int(err))
|
||||
|
||||
sys.stderr.write('\nNote: travis/{script_name} is deprecated. Use ci/{script_name} instead.\n\n'.format(script_name=script_name))
|
||||
sys.stderr.flush()
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--path', default='.', help='Root directory')
|
||||
parser.add_argument('--ext', help='Script extension', required=True)
|
||||
parser.add_argument('--cmd', help='Command', required=True)
|
||||
parser.add_argument('--github-actions', action='store_true',
|
||||
help='Enable GitHub Actions workflow command output')
|
||||
args = parser.parse_args()
|
||||
main(args)
|
||||
p = subprocess.run([sys.executable, new_script_path] + sys.argv[1:])
|
||||
sys.exit(p.returncode)
|
||||
|
Loading…
Reference in New Issue