Expose to_search_normalized to Lua, add test, and use in ListColumn (most useful for the `stocks` plugin)

develop
lethosor 2020-10-10 01:40:00 -04:00
parent 8d85261cdc
commit 9c8098b4f0
No known key found for this signature in database
GPG Key ID: 76A269552F4F58C1
5 changed files with 27 additions and 7 deletions

@ -881,14 +881,23 @@ can be omitted.
Convert a string from DF's CP437 encoding to the correct encoding for the Convert a string from DF's CP437 encoding to the correct encoding for the
DFHack console. DFHack console.
.. warning::
When printing CP437-encoded text to the console (for example, names returned
from ``dfhack.TranslateName()``), use ``print(dfhack.df2console(text))`` to
ensure proper display on all platforms.
* ``dfhack.utf2df(string)`` * ``dfhack.utf2df(string)``
Convert a string from UTF-8 to DF's CP437 encoding. Convert a string from UTF-8 to DF's CP437 encoding.
**Note:** When printing CP437-encoded text to the console (for example, names * ``dfhack.toSearchNormalized(string)``
returned from TranslateName()), use ``print(dfhack.df2console(text)`` to ensure
proper display on all platforms.
Replace non-ASCII alphabetic characters in a CP437-encoded string with their
nearest ASCII equivalents, if possible, and returns a CP437-encoded string.
Note that the returned string may be longer than the input string. For
example, ``ä`` is replaced with ``a``, and ``æ`` is replaced with ``ae``.
Gui module Gui module
---------- ----------

@ -47,6 +47,7 @@ changelog.txt uses a syntax similar to RST, with a few special sequences:
- `createitem`: added an ``inspect`` subcommand to print the item and material tokens of existing items, which can be used to create additional matching items - `createitem`: added an ``inspect`` subcommand to print the item and material tokens of existing items, which can be used to create additional matching items
- `embark-assistant`: added support for searching for taller waterfalls (up to 50 z-levels tall) - `embark-assistant`: added support for searching for taller waterfalls (up to 50 z-levels tall)
- `search`: added support for searching for names containing non-ASCII characters using their ASCII equivalents - `search`: added support for searching for names containing non-ASCII characters using their ASCII equivalents
- `stocks`: added support for searching for items containing non-ASCII characters using their ASCII equivalents
- `zone`: added an ``enumnick`` subcommand to assign enumerated nicknames (e.g "Hen 1", "Hen 2"...) - `zone`: added an ``enumnick`` subcommand to assign enumerated nicknames (e.g "Hen 1", "Hen 2"...)
- `zone`: added slaughter indication to ``uinfo`` output - `zone`: added slaughter indication to ``uinfo`` output

@ -1417,6 +1417,7 @@ static bool isMapLoaded() { return Core::getInstance().isMapLoaded(); }
static std::string df2utf(std::string s) { return DF2UTF(s); } static std::string df2utf(std::string s) { return DF2UTF(s); }
static std::string utf2df(std::string s) { return UTF2DF(s); } static std::string utf2df(std::string s) { return UTF2DF(s); }
static std::string df2console(color_ostream &out, std::string s) { return DF2CONSOLE(out, s); } static std::string df2console(color_ostream &out, std::string s) { return DF2CONSOLE(out, s); }
static std::string toSearchNormalized(std::string s) { return to_search_normalized(s); }
#define WRAP_VERSION_FUNC(name, function) WRAPN(name, DFHack::Version::function) #define WRAP_VERSION_FUNC(name, function) WRAPN(name, DFHack::Version::function)
@ -1434,6 +1435,7 @@ static const LuaWrapper::FunctionReg dfhack_module[] = {
WRAP(df2utf), WRAP(df2utf),
WRAP(utf2df), WRAP(utf2df),
WRAP(df2console), WRAP(df2console),
WRAP(toSearchNormalized),
WRAP_VERSION_FUNC(getDFHackVersion, dfhack_version), WRAP_VERSION_FUNC(getDFHackVersion, dfhack_version),
WRAP_VERSION_FUNC(getDFHackRelease, dfhack_release), WRAP_VERSION_FUNC(getDFHackRelease, dfhack_release),
WRAP_VERSION_FUNC(getDFHackBuildID, dfhack_build_id), WRAP_VERSION_FUNC(getDFHackBuildID, dfhack_build_id),

@ -139,14 +139,14 @@ public:
virtual void tokenizeSearch (vector<string> *dest, const string search) virtual void tokenizeSearch (vector<string> *dest, const string search)
{ {
if (!search.empty()) if (!search.empty())
split_string(dest, search, " "); split_string(dest, to_search_normalized(search), " ");
} }
virtual bool showEntry(const ListEntry<T> *entry, const vector<string> &search_tokens) virtual bool showEntry(const ListEntry<T> *entry, const vector<string> &search_tokens)
{ {
if (!search_tokens.empty()) if (!search_tokens.empty())
{ {
string item_string = toLower(entry->text); string item_string = to_search_normalized(entry->text);
for (auto si = search_tokens.begin(); si != search_tokens.end(); si++) for (auto si = search_tokens.begin(); si != search_tokens.end(); si++)
{ {
if (!si->empty() && item_string.find(*si) == string::npos && if (!si->empty() && item_string.find(*si) == string::npos &&
@ -164,9 +164,9 @@ public:
ListEntry<T> *prev_selected = (getDisplayListSize() > 0) ? display_list[highlighted_index] : NULL; ListEntry<T> *prev_selected = (getDisplayListSize() > 0) ? display_list[highlighted_index] : NULL;
display_list.clear(); display_list.clear();
search_string = toLower(search_string); search_string = to_search_normalized(search_string);
vector<string> search_tokens; vector<string> search_tokens;
tokenizeSearch(&search_tokens, search_string); tokenizeSearch(&search_tokens, to_search_normalized(search_string));
for (size_t i = 0; i < list.size(); i++) for (size_t i = 0; i < list.size(); i++)
{ {

@ -0,0 +1,8 @@
function test.toSearchNormalized()
expect.eq(dfhack.toSearchNormalized(''), '')
expect.eq(dfhack.toSearchNormalized('abcd'), 'abcd')
expect.eq(dfhack.toSearchNormalized('ABCD'), 'abcd')
expect.eq(dfhack.toSearchNormalized(dfhack.utf2df('áçèîöü')), 'aceiou')
expect.eq(dfhack.toSearchNormalized(dfhack.utf2df('ÄÇÉÖÜÿ')), 'aceouy')
expect.eq(dfhack.toSearchNormalized(dfhack.utf2df('æÆ')), 'aeae')
end