Auto-format code files

This commit is contained in:
Daniel Wolf 2024-12-09 08:25:51 +01:00
parent b365c4c1d5
commit 9d3782a08b
142 changed files with 2557 additions and 2220 deletions

26
.clang-format Normal file
View File

@ -0,0 +1,26 @@
# Config file for clang-format, a C/C++/... code formatter.
BasedOnStyle: Chromium
# TODO: Uncomment once clang-format 20 is out
# BreakBinaryOperations: RespectPrecedence
BreakConstructorInitializers: AfterColon
AccessModifierOffset: -4
AlignAfterOpenBracket: BlockIndent
AlignOperands: DontAlign
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortCaseLabelsOnASingleLine: true
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: WithoutElse
BinPackArguments: false
BreakBeforeBinaryOperators: NonAssignment
BreakStringLiterals: false
ColumnLimit: 100
CompactNamespaces: true
IncludeBlocks: Regroup
IndentWidth: 4
InsertNewlineAtEOF: true
LineEnding: LF
PackConstructorInitializers: Never
SeparateDefinitionBlocks: Always
SortIncludes: CaseInsensitive
SpacesBeforeTrailingComments: 1

14
.editorconfig Normal file
View File

@ -0,0 +1,14 @@
# Config file for generic text editors.
root = true
[*]
charset = utf-8
end_of_line = lf
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
[*.{js,ts,yaml,yml}]
indent_size = 2

4
.gersemirc Normal file
View File

@ -0,0 +1,4 @@
# Config file for gersemi, a CMake code formatter.
line_length: 100
warn_about_unknown_commands: false

7
.gitignore vendored
View File

@ -1,3 +1,8 @@
.vs/ .vs/
.vscode/
*.user
build/ build/
*.user venv/
__pycache__
.doit.db.*

11
.prettierrc.yml Normal file
View File

@ -0,0 +1,11 @@
# Config file for Prettier, a JavaScript/TypeScript code formatter.
tabWidth: 2
printWidth: 100
singleQuote: true
arrowParens: avoid
overrides:
- files: '*.jsx' # Adobe JSX, not React
options:
trailingComma: none

7
.ruff.toml Normal file
View File

@ -0,0 +1,7 @@
# Config file for Ruff, a Python code formatter.
line-length = 100
[format]
quote-style = "single"
skip-magic-trailing-comma = true

View File

@ -13,14 +13,11 @@ add_subdirectory("extras/MagixVegas")
add_subdirectory("extras/EsotericSoftwareSpine") add_subdirectory("extras/EsotericSoftwareSpine")
# Install misc. files # Install misc. files
install( install(FILES README.adoc LICENSE.md CHANGELOG.md DESTINATION .)
FILES README.adoc LICENSE.md CHANGELOG.md
DESTINATION .
)
# Configure CPack # Configure CPack
function(get_short_system_name variable) function(get_short_system_name variable)
if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin") if("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
set(${variable} "macOS" PARENT_SCOPE) set(${variable} "macOS" PARENT_SCOPE)
else() else()
set(${variable} "${CMAKE_SYSTEM_NAME}" PARENT_SCOPE) set(${variable} "${CMAKE_SYSTEM_NAME}" PARENT_SCOPE)

117
dodo.py Normal file
View File

@ -0,0 +1,117 @@
"""Collection of tasks. Run using `doit <task>`."""
from pathlib import Path
import subprocess
from functools import cache
from gitignore_parser import parse_gitignore
from typing import Dict, Optional, List
from enum import Enum
root_dir = Path(__file__).parent
rhubarb_dir = root_dir / 'rhubarb'
def task_format():
"""Format source files"""
files_by_formatters = get_files_by_formatters()
for formatter, files in files_by_formatters.items():
yield {
'name': formatter.value,
'actions': [(format, [files, formatter])],
'file_dep': files,
}
def task_check_formatted():
"""Fails unless source files are formatted"""
files_by_formatters = get_files_by_formatters()
for formatter, files in files_by_formatters.items():
yield {
'basename': 'check-formatted',
'name': formatter.value,
'actions': [(format, [files, formatter], {'check_only': True})],
}
class Formatter(Enum):
"""A source code formatter."""
CLANG_FORMAT = 'clang-format'
GERSEMI = 'gersemi'
PRETTIER = 'prettier'
RUFF = 'ruff'
def format(files: List[Path], formatter: Formatter, *, check_only: bool = False):
match formatter:
case Formatter.CLANG_FORMAT:
subprocess.run(
['clang-format', '--dry-run' if check_only else '-i', '--Werror', *files],
check=True,
)
case Formatter.GERSEMI:
subprocess.run(['gersemi', '--check' if check_only else '-i', *files], check=True)
case Formatter.PRETTIER:
subprocess.run(
[
*['deno', 'run', '-A', 'npm:prettier@3.4.2'],
*['--check' if check_only else '--write', '--log-level', 'warn', *files],
],
check=True,
)
case Formatter.RUFF:
subprocess.run(
['ruff', '--quiet', 'format', *(['--check'] if check_only else []), *files],
check=True,
)
case _:
raise ValueError(f'Unknown formatter: {formatter}')
@cache
def get_files_by_formatters() -> Dict[Formatter, List[Path]]:
"""Returns a dict with all formattable code files grouped by formatter."""
is_gitignored = parse_gitignore(root_dir / '.gitignore')
def is_hidden(path: Path):
return path.name.startswith('.')
def is_third_party(path: Path):
return path.is_relative_to(rhubarb_dir / 'lib') or path.name == 'gradle'
result = {formatter: [] for formatter in Formatter}
def visit(dir: Path):
for path in dir.iterdir():
if is_gitignored(path) or is_hidden(path) or is_third_party(path):
continue
if path.is_file():
formatter = get_formatter(path)
if formatter is not None:
result[formatter].append(path)
else:
visit(path)
visit(root_dir)
return result
def get_formatter(path: Path) -> Optional[Formatter]:
"""Returns the formatter to use for the given code file, if any."""
match path.suffix.lower():
case '.c' | '.cpp' | '.h':
return Formatter.CLANG_FORMAT
case '.cmake':
return Formatter.GERSEMI
case _ if path.name.lower() == 'cmakelists.txt':
return Formatter.GERSEMI
case '.js' | '.jsx' | '.ts':
return Formatter.PRETTIER
case '.py':
return Formatter.RUFF

View File

@ -1,11 +1,5 @@
cmake_minimum_required(VERSION 3.2) cmake_minimum_required(VERSION 3.2)
set(afterEffectsFiles set(afterEffectsFiles "Rhubarb Lip Sync.jsx" "README.adoc")
"Rhubarb Lip Sync.jsx"
"README.adoc"
)
install( install(FILES ${afterEffectsFiles} DESTINATION "extras/AdobeAfterEffects")
FILES ${afterEffectsFiles}
DESTINATION "extras/AdobeAfterEffects"
)

View File

@ -1,4 +1,5 @@
(function polyfill() { // prettier-ignore
(function polyfill() {
// Polyfill for Object.assign // Polyfill for Object.assign
"function"!=typeof Object.assign&&(Object.assign=function(a,b){"use strict";if(null==a)throw new TypeError("Cannot convert undefined or null to object");for(var c=Object(a),d=1;d<arguments.length;d++){var e=arguments[d];if(null!=e)for(var f in e)Object.prototype.hasOwnProperty.call(e,f)&&(c[f]=e[f])}return c}); "function"!=typeof Object.assign&&(Object.assign=function(a,b){"use strict";if(null==a)throw new TypeError("Cannot convert undefined or null to object");for(var c=Object(a),d=1;d<arguments.length;d++){var e=arguments[d];if(null!=e)for(var f in e)Object.prototype.hasOwnProperty.call(e,f)&&(c[f]=e[f])}return c});
@ -34,16 +35,16 @@
// Polyfill for JSON // Polyfill for JSON
"object"!=typeof JSON&&(JSON={}),function(){"use strict";function f(a){return a<10?"0"+a:a}function this_value(){return this.valueOf()}function quote(a){return rx_escapable.lastIndex=0,rx_escapable.test(a)?'"'+a.replace(rx_escapable,function(a){var b=meta[a];return"string"==typeof b?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function str(a,b){var c,d,e,f,h,g=gap,i=b[a];switch(i&&"object"==typeof i&&"function"==typeof i.toJSON&&(i=i.toJSON(a)),"function"==typeof rep&&(i=rep.call(b,a,i)),typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i)return"null";if(gap+=indent,h=[],"[object Array]"===Object.prototype.toString.apply(i)){for(f=i.length,c=0;c<f;c+=1)h[c]=str(c,i)||"null";return e=0===h.length?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]",gap=g,e}if(rep&&"object"==typeof rep)for(f=rep.length,c=0;c<f;c+=1)"string"==typeof rep[c]&&(d=rep[c],(e=str(d,i))&&h.push(quote(d)+(gap?": ":":")+e));else for(d in i)Object.prototype.hasOwnProperty.call(i,d)&&(e=str(d,i))&&h.push(quote(d)+(gap?": ":":")+e);return e=0===h.length?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}",gap=g,e}}var rx_one=/^[\],:{}\s]*$/,rx_two=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,rx_three=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,rx_four=/(?:^|:|,)(?:\s*\[)+/g,rx_escapable=/[\\"\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,rx_dangerous=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;"function"!=typeof Date.prototype.toJSON&&(Date.prototype.toJSON=function(){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null},Boolean.prototype.toJSON=this_value,Number.prototype.toJSON=this_value,String.prototype.toJSON=this_value);var gap,indent,meta,rep;"function"!=typeof JSON.stringify&&(meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},JSON.stringify=function(a,b,c){var d;if(gap="",indent="","number"==typeof c)for(d=0;d<c;d+=1)indent+=" ";else"string"==typeof c&&(indent=c);if(rep=b,b&&"function"!=typeof b&&("object"!=typeof b||"number"!=typeof b.length))throw new Error("JSON.stringify");return str("",{"":a})}),"function"!=typeof JSON.parse&&(JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&"object"==typeof e)for(c in e)Object.prototype.hasOwnProperty.call(e,c)&&(d=walk(e,c),void 0!==d?e[c]=d:delete e[c]);return reviver.call(a,b,e)}var j;if(text=String(text),rx_dangerous.lastIndex=0,rx_dangerous.test(text)&&(text=text.replace(rx_dangerous,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})),rx_one.test(text.replace(rx_two,"@").replace(rx_three,"]").replace(rx_four,"")))return j=eval("("+text+")"),"function"==typeof reviver?walk({"":j},""):j;throw new SyntaxError("JSON.parse")})}(); "object"!=typeof JSON&&(JSON={}),function(){"use strict";function f(a){return a<10?"0"+a:a}function this_value(){return this.valueOf()}function quote(a){return rx_escapable.lastIndex=0,rx_escapable.test(a)?'"'+a.replace(rx_escapable,function(a){var b=meta[a];return"string"==typeof b?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function str(a,b){var c,d,e,f,h,g=gap,i=b[a];switch(i&&"object"==typeof i&&"function"==typeof i.toJSON&&(i=i.toJSON(a)),"function"==typeof rep&&(i=rep.call(b,a,i)),typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i)return"null";if(gap+=indent,h=[],"[object Array]"===Object.prototype.toString.apply(i)){for(f=i.length,c=0;c<f;c+=1)h[c]=str(c,i)||"null";return e=0===h.length?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]",gap=g,e}if(rep&&"object"==typeof rep)for(f=rep.length,c=0;c<f;c+=1)"string"==typeof rep[c]&&(d=rep[c],(e=str(d,i))&&h.push(quote(d)+(gap?": ":":")+e));else for(d in i)Object.prototype.hasOwnProperty.call(i,d)&&(e=str(d,i))&&h.push(quote(d)+(gap?": ":":")+e);return e=0===h.length?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}",gap=g,e}}var rx_one=/^[\],:{}\s]*$/,rx_two=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,rx_three=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,rx_four=/(?:^|:|,)(?:\s*\[)+/g,rx_escapable=/[\\"\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,rx_dangerous=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;"function"!=typeof Date.prototype.toJSON&&(Date.prototype.toJSON=function(){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null},Boolean.prototype.toJSON=this_value,Number.prototype.toJSON=this_value,String.prototype.toJSON=this_value);var gap,indent,meta,rep;"function"!=typeof JSON.stringify&&(meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},JSON.stringify=function(a,b,c){var d;if(gap="",indent="","number"==typeof c)for(d=0;d<c;d+=1)indent+=" ";else"string"==typeof c&&(indent=c);if(rep=b,b&&"function"!=typeof b&&("object"!=typeof b||"number"!=typeof b.length))throw new Error("JSON.stringify");return str("",{"":a})}),"function"!=typeof JSON.parse&&(JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&"object"==typeof e)for(c in e)Object.prototype.hasOwnProperty.call(e,c)&&(d=walk(e,c),void 0!==d?e[c]=d:delete e[c]);return reviver.call(a,b,e)}var j;if(text=String(text),rx_dangerous.lastIndex=0,rx_dangerous.test(text)&&(text=text.replace(rx_dangerous,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})),rx_one.test(text.replace(rx_two,"@").replace(rx_three,"]").replace(rx_four,"")))return j=eval("("+text+")"),"function"==typeof reviver?walk({"":j},""):j;throw new SyntaxError("JSON.parse")})}();
})(); })()
function last(array) { function last(array) {
return array[array.length - 1]; return array[array.length - 1];
} }
function createGuid() { function createGuid() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16 | 0; var r = (Math.random() * 16) | 0;
var v = c == 'x' ? r : (r & 0x3 | 0x8); var v = c == 'x' ? r : (r & 0x3) | 0x8;
return v.toString(16); return v.toString(16);
}); });
} }
@ -99,10 +100,10 @@ function isFrameVisible(compItem, frameNumber) {
if (!compItem) return false; if (!compItem) return false;
var time = frameToTime(frameNumber + epsilon, compItem); var time = frameToTime(frameNumber + epsilon, compItem);
var videoLayers = toArrayBase1(compItem.layers).filter(function(layer) { var videoLayers = toArrayBase1(compItem.layers).filter(function (layer) {
return layer.hasVideo; return layer.hasVideo;
}); });
var result = videoLayers.find(function(layer) { var result = videoLayers.find(function (layer) {
return layer.activeAtTime(time); return layer.activeAtTime(time);
}); });
return Boolean(result); return Boolean(result);
@ -119,12 +120,16 @@ function readTextFile(fileOrPath) {
if (file.error) throw new Error('Error reading file "' + filePath + '": ' + file.error); if (file.error) throw new Error('Error reading file "' + filePath + '": ' + file.error);
} }
try { try {
file.open('r'); check(); file.open('r');
file.encoding = 'UTF-8'; check(); check();
var result = file.read(); check(); file.encoding = 'UTF-8';
check();
var result = file.read();
check();
return result; return result;
} finally { } finally {
file.close(); check(); file.close();
check();
} }
} }
@ -135,11 +140,15 @@ function writeTextFile(fileOrPath, text) {
if (file.error) throw new Error('Error writing file "' + filePath + '": ' + file.error); if (file.error) throw new Error('Error writing file "' + filePath + '": ' + file.error);
} }
try { try {
file.open('w'); check(); file.open('w');
file.encoding = 'UTF-8'; check(); check();
file.write(text); check(); file.encoding = 'UTF-8';
check();
file.write(text);
check();
} finally { } finally {
file.close(); check(); file.close();
check();
} }
} }
@ -163,9 +172,7 @@ var osIsWindows = (system.osName || $.os).match(/windows/i);
// Depending on the operating system, the syntax for escaping command-line arguments differs. // Depending on the operating system, the syntax for escaping command-line arguments differs.
function cliEscape(argument) { function cliEscape(argument) {
return osIsWindows return osIsWindows ? '"' + argument + '"' : "'" + argument.replace(/'/g, "'\\''") + "'";
? '"' + argument + '"'
: "'" + argument.replace(/'/g, "'\\''") + "'";
} }
function exec(command) { function exec(command) {
@ -180,7 +187,8 @@ function execInWindow(command) {
// execute a command, then close the Terminal window. // execute a command, then close the Terminal window.
// If you know a better solution, let me know! // If you know a better solution, let me know!
var escapedCommand = command.replace(/"/g, '\\"'); var escapedCommand = command.replace(/"/g, '\\"');
var appleScript = '\ var appleScript =
'\
tell application "Terminal" \ tell application "Terminal" \
-- Quit terminal \ -- Quit terminal \
-- Yes, that\'s undesirable if there was an open window before. \ -- Yes, that\'s undesirable if there was an open window before. \
@ -189,7 +197,9 @@ function execInWindow(command) {
-- Open terminal \ -- Open terminal \
activate \ activate \
-- Run command in new tab \ -- Run command in new tab \
set newTab to do script ("' + escapedCommand + '") \ set newTab to do script ("' +
escapedCommand +
'") \
-- Wait until command is done \ -- Wait until command is done \
tell newTab \ tell newTab \
repeat while busy \ repeat while busy \
@ -220,20 +230,37 @@ function createResourceString(tree) {
// Object containing functions to create control description trees. // Object containing functions to create control description trees.
// For instance, `controls.StaticText({ text: 'Hello world' })` // For instance, `controls.StaticText({ text: 'Hello world' })`
// returns `{ __type__: StaticText, text: 'Hello world' }`. // returns `{ __type__: StaticText, text: 'Hello world' }`.
var controlFunctions = (function() { var controlFunctions = (function () {
var controlTypes = [ var controlTypes = [
// Strangely, 'dialog' and 'palette' need to start with a lower-case character // Strangely, 'dialog' and 'palette' need to start with a lower-case character
['Dialog', 'dialog'], ['Palette', 'palette'], ['Dialog', 'dialog'],
'Panel', 'Group', 'TabbedPanel', 'Tab', 'Button', 'IconButton', 'Image', 'StaticText', ['Palette', 'palette'],
'EditText', 'Checkbox', 'RadioButton', 'Progressbar', 'Slider', 'Scrollbar', 'ListBox', 'Panel',
'DropDownList', 'TreeView', 'ListItem', 'FlashPlayer' 'Group',
'TabbedPanel',
'Tab',
'Button',
'IconButton',
'Image',
'StaticText',
'EditText',
'Checkbox',
'RadioButton',
'Progressbar',
'Slider',
'Scrollbar',
'ListBox',
'DropDownList',
'TreeView',
'ListItem',
'FlashPlayer'
]; ];
var result = {}; var result = {};
controlTypes.forEach(function(type){ controlTypes.forEach(function (type) {
var isArray = Array.isArray(type); var isArray = Array.isArray(type);
var key = isArray ? type[0] : type; var key = isArray ? type[0] : type;
var value = isArray ? type[1] : type; var value = isArray ? type[1] : type;
result[key] = function(options) { result[key] = function (options) {
return Object.assign({ __type__: value }, options); return Object.assign({ __type__: value }, options);
}; };
}); });
@ -257,7 +284,7 @@ function getItemPath(item) {
// Selects the item within an item control whose text matches the specified text. // Selects the item within an item control whose text matches the specified text.
// If no such item exists, selects the first item, if present. // If no such item exists, selects the first item, if present.
function selectByTextOrFirst(itemControl, text) { function selectByTextOrFirst(itemControl, text) {
var targetItem = toArray(itemControl.items).find(function(item) { var targetItem = toArray(itemControl.items).find(function (item) {
return item.text === text; return item.text === text;
}); });
if (!targetItem && itemControl.items.length) { if (!targetItem && itemControl.items.length) {
@ -269,7 +296,7 @@ function selectByTextOrFirst(itemControl, text) {
} }
function getAudioFileProjectItems() { function getAudioFileProjectItems() {
var result = toArrayBase1(app.project.items).filter(function(item) { var result = toArrayBase1(app.project.items).filter(function (item) {
var isAudioFootage = item instanceof FootageItem && item.hasAudio && !item.hasVideo; var isAudioFootage = item instanceof FootageItem && item.hasAudio && !item.hasVideo;
return isAudioFootage; return isAudioFootage;
}); });
@ -283,9 +310,10 @@ var basicMouthShapeNames = mouthShapeNames.slice(0, basicMouthShapeCount);
var extendedMouthShapeNames = mouthShapeNames.slice(basicMouthShapeCount); var extendedMouthShapeNames = mouthShapeNames.slice(basicMouthShapeCount);
function getMouthCompHelpTip() { function getMouthCompHelpTip() {
var result = 'A composition containing the mouth shapes, one drawing per frame. They must be ' var result =
+ 'arranged as follows:\n'; 'A composition containing the mouth shapes, one drawing per frame. They must be ' +
mouthShapeNames.forEach(function(mouthShapeName, i) { 'arranged as follows:\n';
mouthShapeNames.forEach(function (mouthShapeName, i) {
var isOptional = i >= basicMouthShapeCount; var isOptional = i >= basicMouthShapeCount;
result += '\n00:' + pad(i, 2) + '\t' + mouthShapeName + (isOptional ? ' (optional)' : ''); result += '\n00:' + pad(i, 2) + '\t' + mouthShapeName + (isOptional ? ' (optional)' : '');
}); });
@ -294,7 +322,7 @@ function getMouthCompHelpTip() {
function createExtendedShapeCheckboxes() { function createExtendedShapeCheckboxes() {
var result = {}; var result = {};
extendedMouthShapeNames.forEach(function(shapeName) { extendedMouthShapeNames.forEach(function (shapeName) {
result[shapeName.toLowerCase()] = controlFunctions.Checkbox({ result[shapeName.toLowerCase()] = controlFunctions.Checkbox({
text: shapeName, text: shapeName,
helpTip: 'Controls whether to use the optional ' + shapeName + ' shape.' helpTip: 'Controls whether to use the optional ' + shapeName + ' shape.'
@ -320,9 +348,10 @@ function createDialogWindow() {
active: true active: true
}), }),
value: DropDownList({ value: DropDownList({
helpTip: 'An audio file containing recorded dialog.\n' helpTip:
+ 'This field shows all audio files that exist in ' 'An audio file containing recorded dialog.\n' +
+ 'your After Effects project.' 'This field shows all audio files that exist in ' +
'your After Effects project.'
}) })
}), }),
recognizer: Group({ recognizer: Group({
@ -337,8 +366,9 @@ function createDialogWindow() {
properties: { multiline: true }, properties: { multiline: true },
characters: 60, characters: 60,
minimumSize: [0, 100], minimumSize: [0, 100],
helpTip: 'For better animation results, you can specify the text of ' helpTip:
+ 'the recording here. This field is optional.' 'For better animation results, you can specify the text of ' +
'the recording here. This field is optional.'
}) })
}), }),
mouthComp: Group({ mouthComp: Group({
@ -354,8 +384,9 @@ function createDialogWindow() {
targetFolder: Group({ targetFolder: Group({
label: StaticText({ text: 'Target folder:' }), label: StaticText({ text: 'Target folder:' }),
value: DropDownList({ value: DropDownList({
helpTip: 'The project folder in which to create the animation ' helpTip:
+ 'composition. The composition will be named like the audio file.' 'The project folder in which to create the animation ' +
'composition. The composition will be named like the audio file.'
}) })
}), }),
frameRate: Group({ frameRate: Group({
@ -366,8 +397,9 @@ function createDialogWindow() {
}), }),
auto: Checkbox({ auto: Checkbox({
text: 'From mouth composition', text: 'From mouth composition',
helpTip: 'If checked, the animation will use the same frame rate as ' helpTip:
+ 'the mouth composition.' 'If checked, the animation will use the same frame rate as ' +
'the mouth composition.'
}) })
}) })
}), }),
@ -400,13 +432,13 @@ function createDialogWindow() {
animateButton: window.buttons.animate, animateButton: window.buttons.animate,
cancelButton: window.buttons.cancel cancelButton: window.buttons.cancel
}; };
extendedMouthShapeNames.forEach(function(shapeName) { extendedMouthShapeNames.forEach(function (shapeName) {
controls['mouthShape' + shapeName] = controls['mouthShape' + shapeName] =
window.settings.extendedMouthShapes[shapeName.toLowerCase()]; window.settings.extendedMouthShapes[shapeName.toLowerCase()];
}); });
// Add audio file options // Add audio file options
getAudioFileProjectItems().forEach(function(projectItem) { getAudioFileProjectItems().forEach(function (projectItem) {
var listItem = controls.audioFile.add('item', getItemPath(projectItem)); var listItem = controls.audioFile.add('item', getItemPath(projectItem));
listItem.projectItem = projectItem; listItem.projectItem = projectItem;
}); });
@ -416,7 +448,7 @@ function createDialogWindow() {
{ text: 'PocketSphinx (use for English recordings)', value: 'pocketSphinx' }, { text: 'PocketSphinx (use for English recordings)', value: 'pocketSphinx' },
{ text: 'Phonetic (use for non-English recordings)', value: 'phonetic' } { text: 'Phonetic (use for non-English recordings)', value: 'phonetic' }
]; ];
recognizerOptions.forEach(function(option) { recognizerOptions.forEach(function (option) {
var listItem = controls.recognizer.add('item', option.text); var listItem = controls.recognizer.add('item', option.text);
listItem.value = option.value; listItem.value = option.value;
}); });
@ -425,7 +457,7 @@ function createDialogWindow() {
var comps = toArrayBase1(app.project.items).filter(function (item) { var comps = toArrayBase1(app.project.items).filter(function (item) {
return item instanceof CompItem; return item instanceof CompItem;
}); });
comps.forEach(function(projectItem) { comps.forEach(function (projectItem) {
var listItem = controls.mouthComp.add('item', getItemPath(projectItem)); var listItem = controls.mouthComp.add('item', getItemPath(projectItem));
listItem.projectItem = projectItem; listItem.projectItem = projectItem;
}); });
@ -435,7 +467,7 @@ function createDialogWindow() {
return item instanceof FolderItem; return item instanceof FolderItem;
}); });
projectFolders.unshift(app.project.rootFolder); projectFolders.unshift(app.project.rootFolder);
projectFolders.forEach(function(projectFolder) { projectFolders.forEach(function (projectFolder) {
var listItem = controls.targetFolder.add('item', getItemPath(projectFolder)); var listItem = controls.targetFolder.add('item', getItemPath(projectFolder));
listItem.projectItem = projectFolder; listItem.projectItem = projectFolder;
}); });
@ -446,26 +478,29 @@ function createDialogWindow() {
controls.dialogText.text = settings.dialogText || ''; controls.dialogText.text = settings.dialogText || '';
selectByTextOrFirst(controls.recognizer, settings.recognizer); selectByTextOrFirst(controls.recognizer, settings.recognizer);
selectByTextOrFirst(controls.mouthComp, settings.mouthComp); selectByTextOrFirst(controls.mouthComp, settings.mouthComp);
extendedMouthShapeNames.forEach(function(shapeName) { extendedMouthShapeNames.forEach(function (shapeName) {
controls['mouthShape' + shapeName].value = controls['mouthShape' + shapeName].value = (settings.extendedMouthShapes || {})[
(settings.extendedMouthShapes || {})[shapeName.toLowerCase()]; shapeName.toLowerCase()
];
}); });
selectByTextOrFirst(controls.targetFolder, settings.targetFolder); selectByTextOrFirst(controls.targetFolder, settings.targetFolder);
controls.frameRate.text = settings.frameRate || ''; controls.frameRate.text = settings.frameRate || '';
controls.autoFrameRate.value = settings.autoFrameRate; controls.autoFrameRate.value = settings.autoFrameRate;
// Align controls // Align controls
window.onShow = function() { window.onShow = function () {
// Give uniform width to all labels // Give uniform width to all labels
var groups = toArray(window.settings.children); var groups = toArray(window.settings.children);
var labelWidths = groups.map(function(group) { return group.children[0].size.width; }); var labelWidths = groups.map(function (group) {
return group.children[0].size.width;
});
var maxLabelWidth = Math.max.apply(Math, labelWidths); var maxLabelWidth = Math.max.apply(Math, labelWidths);
groups.forEach(function (group) { groups.forEach(function (group) {
group.children[0].size.width = maxLabelWidth; group.children[0].size.width = maxLabelWidth;
}); });
// Give uniform width to inputs // Give uniform width to inputs
var valueWidths = groups.map(function(group) { var valueWidths = groups.map(function (group) {
return last(group.children).bounds.right - group.children[1].bounds.left; return last(group.children).bounds.right - group.children[1].bounds.left;
}); });
var maxValueWidth = Math.max.apply(Math, valueWidths); var maxValueWidth = Math.max.apply(Math, valueWidths);
@ -512,7 +547,7 @@ function createDialogWindow() {
frameRate: Number(controls.frameRate.text), frameRate: Number(controls.frameRate.text),
autoFrameRate: controls.autoFrameRate.value autoFrameRate: controls.autoFrameRate.value
}; };
extendedMouthShapeNames.forEach(function(shapeName) { extendedMouthShapeNames.forEach(function (shapeName) {
settings.extendedMouthShapes[shapeName.toLowerCase()] = settings.extendedMouthShapes[shapeName.toLowerCase()] =
controls['mouthShape' + shapeName].value; controls['mouthShape' + shapeName].value;
}); });
@ -541,18 +576,24 @@ function createDialogWindow() {
var shapeName = mouthShapeNames[i]; var shapeName = mouthShapeNames[i];
var required = i < basicMouthShapeCount || controls['mouthShape' + shapeName].value; var required = i < basicMouthShapeCount || controls['mouthShape' + shapeName].value;
if (required && !isFrameVisible(comp, i)) { if (required && !isFrameVisible(comp, i)) {
return 'The mouth comp does not seem to contain an image for shape ' return (
+ shapeName + ' at frame ' + i + '.'; 'The mouth comp does not seem to contain an image for shape ' +
shapeName +
' at frame ' +
i +
'.'
);
} }
} }
if (!comp.preserveNestedFrameRate) { if (!comp.preserveNestedFrameRate) {
var fix = Window.confirm( var fix = Window.confirm(
'The setting "Preserve frame rate when nested or in render queue" is not active ' 'The setting "Preserve frame rate when nested or in render queue" is not active ' +
+ 'for the mouth composition. This can result in incorrect animation.\n\n' 'for the mouth composition. This can result in incorrect animation.\n\n' +
+ 'Activate this setting now?', 'Activate this setting now?',
false, false,
'Fix composition setting?'); 'Fix composition setting?'
);
if (fix) { if (fix) {
app.beginUndoGroup(appName + ': Mouth composition setting'); app.beginUndoGroup(appName + ': Mouth composition setting');
comp.preserveNestedFrameRate = true; comp.preserveNestedFrameRate = true;
@ -567,10 +608,14 @@ function createDialogWindow() {
var match = version.match(/Rhubarb Lip Sync version ((\d+)\.(\d+).(\d+)(-[0-9A-Za-z-.]+)?)/); var match = version.match(/Rhubarb Lip Sync version ((\d+)\.(\d+).(\d+)(-[0-9A-Za-z-.]+)?)/);
if (!match) { if (!match) {
var instructions = osIsWindows var instructions = osIsWindows
? 'Make sure your PATH environment variable contains the ' + appName + ' ' ? 'Make sure your PATH environment variable contains the ' +
+ 'application directory.' appName +
: 'Make sure you have created this file as a symbolic link to the ' + appName + ' ' ' ' +
+ 'executable (rhubarb).'; 'application directory.'
: 'Make sure you have created this file as a symbolic link to the ' +
appName +
' ' +
'executable (rhubarb).';
return 'Cannot find executable file "' + rhubarbPath + '". \n' + instructions; return 'Cannot find executable file "' + rhubarbPath + '". \n' + instructions;
} }
var versionString = match[1]; var versionString = match[1];
@ -579,15 +624,32 @@ function createDialogWindow() {
var requiredMajor = 1; var requiredMajor = 1;
var minRequiredMinor = 9; var minRequiredMinor = 9;
if (major != requiredMajor || minor < minRequiredMinor) { if (major != requiredMajor || minor < minRequiredMinor) {
return 'This script requires ' + appName + ' ' + requiredMajor + '.' + minRequiredMinor return (
+ '.0 or a later ' + requiredMajor + '.x version. ' 'This script requires ' +
+ 'Your installed version is ' + versionString + ', which is not compatible.'; appName +
' ' +
requiredMajor +
'.' +
minRequiredMinor +
'.0 or a later ' +
requiredMajor +
'.x version. ' +
'Your installed version is ' +
versionString +
', which is not compatible.'
);
} }
} }
function generateMouthCues(audioFileFootage, recognizer, dialogText, mouthComp, extendedMouthShapeNames, function generateMouthCues(
targetProjectFolder, frameRate) audioFileFootage,
{ recognizer,
dialogText,
mouthComp,
extendedMouthShapeNames,
targetProjectFolder,
frameRate
) {
var basePath = Folder.temp.fsName + '/' + createGuid(); var basePath = Folder.temp.fsName + '/' + createGuid();
var dialogFile = new File(basePath + '.txt'); var dialogFile = new File(basePath + '.txt');
var logFile = new File(basePath + '.log'); var logFile = new File(basePath + '.log');
@ -597,15 +659,16 @@ function createDialogWindow() {
writeTextFile(dialogFile, dialogText); writeTextFile(dialogFile, dialogText);
// Create command line // Create command line
var commandLine = rhubarbPath var commandLine =
+ ' --dialogFile ' + cliEscape(dialogFile.fsName) rhubarbPath +
+ ' --recognizer ' + recognizer (' --dialogFile ' + cliEscape(dialogFile.fsName)) +
+ ' --exportFormat json' (' --recognizer ' + recognizer) +
+ ' --extendedShapes ' + cliEscape(extendedMouthShapeNames.join('')) ' --exportFormat json' +
+ ' --logFile ' + cliEscape(logFile.fsName) (' --extendedShapes ' + cliEscape(extendedMouthShapeNames.join(''))) +
+ ' --logLevel fatal' (' --logFile ' + cliEscape(logFile.fsName)) +
+ ' --output ' + cliEscape(jsonFile.fsName) ' --logLevel fatal' +
+ ' ' + cliEscape(audioFileFootage.file.fsName); (' --output ' + cliEscape(jsonFile.fsName)) +
(' ' + cliEscape(audioFileFootage.file.fsName));
// Run Rhubarb // Run Rhubarb
execInWindow(commandLine); execInWindow(commandLine);
@ -635,9 +698,13 @@ function createDialogWindow() {
} }
} }
function animateMouthCues(mouthCues, audioFileFootage, mouthComp, targetProjectFolder, function animateMouthCues(
frameRate) mouthCues,
{ audioFileFootage,
mouthComp,
targetProjectFolder,
frameRate
) {
// Find an unconflicting comp name // Find an unconflicting comp name
// ... strip extension, if present // ... strip extension, if present
var baseName = audioFileFootage.name.match(/^(.*?)(\..*)?$/i)[1]; var baseName = audioFileFootage.name.match(/^(.*?)(\..*)?$/i)[1];
@ -645,14 +712,24 @@ function createDialogWindow() {
// ... add numeric suffix, if needed // ... add numeric suffix, if needed
var existingItems = toArrayBase1(targetProjectFolder.items); var existingItems = toArrayBase1(targetProjectFolder.items);
var counter = 1; var counter = 1;
while (existingItems.some(function(item) { return item.name === compName; })) { while (
existingItems.some(function (item) {
return item.name === compName;
})
) {
counter++; counter++;
compName = baseName + ' ' + counter; compName = baseName + ' ' + counter;
} }
// Create new comp // Create new comp
var comp = targetProjectFolder.items.addComp(compName, mouthComp.width, mouthComp.height, var comp = targetProjectFolder.items.addComp(
mouthComp.pixelAspect, audioFileFootage.duration, frameRate); compName,
mouthComp.width,
mouthComp.height,
mouthComp.pixelAspect,
audioFileFootage.duration,
frameRate
);
// Show new comp // Show new comp
comp.openInViewer(); comp.openInViewer();
@ -669,7 +746,7 @@ function createDialogWindow() {
var timeRemap = mouthLayer['Time Remap']; var timeRemap = mouthLayer['Time Remap'];
// Enabling time remapping automatically adds two keys. Remove the second. // Enabling time remapping automatically adds two keys. Remove the second.
timeRemap.removeKey(2); timeRemap.removeKey(2);
mouthCues.mouthCues.forEach(function(mouthCue) { mouthCues.mouthCues.forEach(function (mouthCue) {
// Round down keyframe time. In animation, earlier is better than later. // Round down keyframe time. In animation, earlier is better than later.
// Set keyframe time to *just before* the exact frame to prevent rounding errors // Set keyframe time to *just before* the exact frame to prevent rounding errors
var frame = Math.floor(timeToFrame(mouthCue.start, comp)); var frame = Math.floor(timeToFrame(mouthCue.start, comp));
@ -684,16 +761,28 @@ function createDialogWindow() {
} }
} }
function animate(audioFileFootage, recognizer, dialogText, mouthComp, extendedMouthShapeNames, function animate(
targetProjectFolder, frameRate) audioFileFootage,
{ recognizer,
dialogText,
mouthComp,
extendedMouthShapeNames,
targetProjectFolder,
frameRate
) {
try { try {
var mouthCues = generateMouthCues(audioFileFootage, recognizer, dialogText, mouthComp, var mouthCues = generateMouthCues(
extendedMouthShapeNames, targetProjectFolder, frameRate); audioFileFootage,
recognizer,
dialogText,
mouthComp,
extendedMouthShapeNames,
targetProjectFolder,
frameRate
);
app.beginUndoGroup(appName + ': Animation'); app.beginUndoGroup(appName + ': Animation');
animateMouthCues(mouthCues, audioFileFootage, mouthComp, targetProjectFolder, animateMouthCues(mouthCues, audioFileFootage, mouthComp, targetProjectFolder, frameRate);
frameRate);
app.endUndoGroup(); app.endUndoGroup();
} catch (e) { } catch (e) {
Window.alert(e.message, appName, true); Window.alert(e.message, appName, true);
@ -707,7 +796,7 @@ function createDialogWindow() {
controls.recognizer.onChange = update; controls.recognizer.onChange = update;
controls.dialogText.onChanging = update; controls.dialogText.onChanging = update;
controls.mouthComp.onChange = update; controls.mouthComp.onChange = update;
extendedMouthShapeNames.forEach(function(shapeName) { extendedMouthShapeNames.forEach(function (shapeName) {
controls['mouthShape' + shapeName].onClick = update; controls['mouthShape' + shapeName].onClick = update;
}); });
controls.targetFolder.onChange = update; controls.targetFolder.onChange = update;
@ -715,7 +804,7 @@ function createDialogWindow() {
controls.autoFrameRate.onClick = update; controls.autoFrameRate.onClick = update;
// Handle animation // Handle animation
controls.animateButton.onClick = function() { controls.animateButton.onClick = function () {
var validationError = validate(); var validationError = validate();
if (typeof validationError === 'string') { if (typeof validationError === 'string') {
if (validationError) { if (validationError) {
@ -728,7 +817,7 @@ function createDialogWindow() {
controls.recognizer.selection.value, controls.recognizer.selection.value,
controls.dialogText.text || '', controls.dialogText.text || '',
controls.mouthComp.selection.projectItem, controls.mouthComp.selection.projectItem,
extendedMouthShapeNames.filter(function(shapeName) { extendedMouthShapeNames.filter(function (shapeName) {
return controls['mouthShape' + shapeName].value; return controls['mouthShape' + shapeName].value;
}), }),
controls.targetFolder.selection.projectItem, controls.targetFolder.selection.projectItem,
@ -738,7 +827,7 @@ function createDialogWindow() {
}; };
// Handle cancelation // Handle cancelation
controls.cancelButton.onClick = function() { controls.cancelButton.onClick = function () {
window.close(); window.close();
}; };
@ -747,9 +836,12 @@ function createDialogWindow() {
function checkPreconditions() { function checkPreconditions() {
if (!canWriteFiles()) { if (!canWriteFiles()) {
Window.alert('This script requires file system access.\n\n' Window.alert(
+ 'Please enable Preferences > General > Allow Scripts to Write Files and Access Network.', 'This script requires file system access.\n\n' +
appName, true); 'Please enable Preferences > General > Allow Scripts to Write Files and Access Network.',
appName,
true
);
return false; return false;
} }
return true; return true;

View File

@ -1,18 +1,13 @@
cmake_minimum_required(VERSION 3.2) cmake_minimum_required(VERSION 3.2)
add_custom_target( add_custom_target(
rhubarbForSpine ALL rhubarbForSpine
ALL
"./gradlew" "build" "./gradlew" "build"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMENT "Building Rhubarb for Spine through Gradle." COMMENT "Building Rhubarb for Spine through Gradle."
) )
install( install(DIRECTORY "build/libs/" DESTINATION "extras/EsotericSoftwareSpine")
DIRECTORY "build/libs/"
DESTINATION "extras/EsotericSoftwareSpine"
)
install( install(FILES README.adoc DESTINATION "extras/EsotericSoftwareSpine")
FILES README.adoc
DESTINATION "extras/EsotericSoftwareSpine"
)

View File

@ -8,7 +8,4 @@ set(vegasFiles
"README.adoc" "README.adoc"
) )
install( install(FILES ${vegasFiles} DESTINATION "extras/MagixVegas")
FILES ${vegasFiles}
DESTINATION "extras/MagixVegas"
)

5
requirements.txt Normal file
View File

@ -0,0 +1,5 @@
doit==0.36.0
clang-format==19.1.5
gersemi==0.17.1
gitignore_parser==0.1.11
ruff==0.8.3

View File

@ -10,7 +10,7 @@ set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
# Enable POSIX threads # Enable POSIX threads
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pthread") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pthread")
endif() endif()
@ -59,30 +59,31 @@ include_directories(SYSTEM ${Boost_INCLUDE_DIRS})
link_libraries(${Boost_LIBRARIES}) # Just about every project needs Boost link_libraries(${Boost_LIBRARIES}) # Just about every project needs Boost
# ... C++ Format # ... C++ Format
FILE(GLOB cppFormatFiles "lib/cppformat/*.cc") file(GLOB cppFormatFiles "lib/cppformat/*.cc")
add_library(cppFormat ${cppFormatFiles}) add_library(cppFormat ${cppFormatFiles})
target_include_directories(cppFormat SYSTEM PUBLIC "lib/cppformat") target_include_directories(cppFormat SYSTEM PUBLIC "lib/cppformat")
target_compile_options(cppFormat PRIVATE ${disableWarningsFlags}) target_compile_options(cppFormat PRIVATE ${disableWarningsFlags})
set_target_properties(cppFormat PROPERTIES FOLDER lib) set_target_properties(cppFormat PROPERTIES FOLDER lib)
# ... sphinxbase # ... sphinxbase
FILE(GLOB_RECURSE sphinxbaseFiles "lib/sphinxbase-rev13216/src/libsphinxbase/*.c") file(GLOB_RECURSE sphinxbaseFiles "lib/sphinxbase-rev13216/src/libsphinxbase/*.c")
add_library(sphinxbase ${sphinxbaseFiles}) add_library(sphinxbase ${sphinxbaseFiles})
target_include_directories(sphinxbase SYSTEM PUBLIC target_include_directories(
"lib/sphinxbase-rev13216/include" sphinxbase
"lib/sphinxbase-rev13216/src" SYSTEM
"lib/sphinx_config" PUBLIC "lib/sphinxbase-rev13216/include" "lib/sphinxbase-rev13216/src" "lib/sphinx_config"
) )
target_compile_options(sphinxbase PRIVATE ${disableWarningsFlags}) target_compile_options(sphinxbase PRIVATE ${disableWarningsFlags})
target_compile_definitions(sphinxbase PUBLIC __SPHINXBASE_EXPORT_H__=1 SPHINXBASE_EXPORT=) # Compile as static lib target_compile_definitions(sphinxbase PUBLIC __SPHINXBASE_EXPORT_H__=1 SPHINXBASE_EXPORT=) # Compile as static lib
set_target_properties(sphinxbase PROPERTIES FOLDER lib) set_target_properties(sphinxbase PROPERTIES FOLDER lib)
# ... PocketSphinx # ... PocketSphinx
FILE(GLOB pocketSphinxFiles "lib/pocketsphinx-rev13216/src/libpocketsphinx/*.c") file(GLOB pocketSphinxFiles "lib/pocketsphinx-rev13216/src/libpocketsphinx/*.c")
add_library(pocketSphinx ${pocketSphinxFiles}) add_library(pocketSphinx ${pocketSphinxFiles})
target_include_directories(pocketSphinx SYSTEM PUBLIC target_include_directories(
"lib/pocketsphinx-rev13216/include" pocketSphinx
"lib/pocketsphinx-rev13216/src/libpocketsphinx" SYSTEM
PUBLIC "lib/pocketsphinx-rev13216/include" "lib/pocketsphinx-rev13216/src/libpocketsphinx"
) )
target_link_libraries(pocketSphinx sphinxbase) target_link_libraries(pocketSphinx sphinxbase)
target_compile_options(pocketSphinx PRIVATE ${disableWarningsFlags}) target_compile_options(pocketSphinx PRIVATE ${disableWarningsFlags})
@ -129,10 +130,10 @@ set(webRtcFiles
add_library(webRtc ${webRtcFiles}) add_library(webRtc ${webRtcFiles})
target_include_directories(webRtc SYSTEM PUBLIC "lib/webrtc-8d2248ff") target_include_directories(webRtc SYSTEM PUBLIC "lib/webrtc-8d2248ff")
target_compile_options(webRtc PRIVATE ${disableWarningsFlags}) target_compile_options(webRtc PRIVATE ${disableWarningsFlags})
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
target_compile_options(webRtc PRIVATE -pthread -lpthread) target_compile_options(webRtc PRIVATE -pthread -lpthread)
endif() endif()
if (NOT WIN32) if(NOT WIN32)
target_compile_definitions(webRtc PRIVATE WEBRTC_POSIX) target_compile_definitions(webRtc PRIVATE WEBRTC_POSIX)
endif() endif()
set_target_properties(webRtc PROPERTIES FOLDER lib) set_target_properties(webRtc PROPERTIES FOLDER lib)
@ -203,34 +204,26 @@ set(fliteFiles
lib/flite-1.4/src/utils/cst_val_user.c lib/flite-1.4/src/utils/cst_val_user.c
) )
add_library(flite ${fliteFiles}) add_library(flite ${fliteFiles})
target_include_directories(flite SYSTEM PUBLIC target_include_directories(flite SYSTEM PUBLIC "lib/flite-1.4/include" "lib/flite-1.4")
"lib/flite-1.4/include"
"lib/flite-1.4"
)
target_compile_options(flite PRIVATE ${disableWarningsFlags}) target_compile_options(flite PRIVATE ${disableWarningsFlags})
set_target_properties(flite PROPERTIES FOLDER lib) set_target_properties(flite PROPERTIES FOLDER lib)
# ... UTF8-CPP # ... UTF8-CPP
add_library(utfcpp add_library(utfcpp lib/header-only.c lib/utfcpp-2.3.5/source/utf8.h)
lib/header-only.c
lib/utfcpp-2.3.5/source/utf8.h
)
target_include_directories(utfcpp SYSTEM PUBLIC "lib/utfcpp-2.3.5/source") target_include_directories(utfcpp SYSTEM PUBLIC "lib/utfcpp-2.3.5/source")
target_compile_options(utfcpp PRIVATE ${disableWarningsFlags}) target_compile_options(utfcpp PRIVATE ${disableWarningsFlags})
set_target_properties(utfcpp PROPERTIES FOLDER lib) set_target_properties(utfcpp PROPERTIES FOLDER lib)
# ... utf8proc # ... utf8proc
add_library(utf8proc add_library(utf8proc lib/utf8proc-2.2.0/utf8proc.c lib/utf8proc-2.2.0/utf8proc.h)
lib/utf8proc-2.2.0/utf8proc.c
lib/utf8proc-2.2.0/utf8proc.h
)
target_include_directories(utf8proc SYSTEM PUBLIC "lib/utf8proc-2.2.0") target_include_directories(utf8proc SYSTEM PUBLIC "lib/utf8proc-2.2.0")
target_compile_options(utf8proc PRIVATE ${disableWarningsFlags}) target_compile_options(utf8proc PRIVATE ${disableWarningsFlags})
target_compile_definitions(utf8proc PUBLIC UTF8PROC_STATIC=1) # Compile as static lib target_compile_definitions(utf8proc PUBLIC UTF8PROC_STATIC=1) # Compile as static lib
set_target_properties(utf8proc PROPERTIES FOLDER lib) set_target_properties(utf8proc PROPERTIES FOLDER lib)
# ... Ogg # ... Ogg
add_library(ogg add_library(
ogg
lib/ogg-1.3.3/include/ogg/ogg.h lib/ogg-1.3.3/include/ogg/ogg.h
lib/ogg-1.3.3/src/bitwise.c lib/ogg-1.3.3/src/bitwise.c
lib/ogg-1.3.3/src/framing.c lib/ogg-1.3.3/src/framing.c
@ -240,7 +233,8 @@ target_compile_options(ogg PRIVATE ${disableWarningsFlags})
set_target_properties(ogg PROPERTIES FOLDER lib) set_target_properties(ogg PROPERTIES FOLDER lib)
# ... Vorbis # ... Vorbis
add_library(vorbis add_library(
vorbis
lib/vorbis-1.3.6/include/vorbis/vorbisfile.h lib/vorbis-1.3.6/include/vorbis/vorbisfile.h
lib/vorbis-1.3.6/lib/bitrate.c lib/vorbis-1.3.6/lib/bitrate.c
lib/vorbis-1.3.6/lib/block.c lib/vorbis-1.3.6/lib/block.c
@ -263,9 +257,7 @@ add_library(vorbis
lib/vorbis-1.3.6/lib/window.c lib/vorbis-1.3.6/lib/window.c
) )
target_include_directories(vorbis SYSTEM PUBLIC "lib/vorbis-1.3.6/include") target_include_directories(vorbis SYSTEM PUBLIC "lib/vorbis-1.3.6/include")
target_link_libraries(vorbis target_link_libraries(vorbis ogg)
ogg
)
target_compile_options(vorbis PRIVATE ${disableWarningsFlags}) target_compile_options(vorbis PRIVATE ${disableWarningsFlags})
set_target_properties(vorbis PROPERTIES FOLDER lib) set_target_properties(vorbis PROPERTIES FOLDER lib)
@ -274,7 +266,8 @@ set_target_properties(vorbis PROPERTIES FOLDER lib)
include_directories("src") include_directories("src")
# ... rhubarb-animation # ... rhubarb-animation
add_library(rhubarb-animation add_library(
rhubarb-animation
src/animation/animationRules.cpp src/animation/animationRules.cpp
src/animation/animationRules.h src/animation/animationRules.h
src/animation/mouthAnimation.cpp src/animation/mouthAnimation.cpp
@ -296,14 +289,11 @@ add_library(rhubarb-animation
src/animation/tweening.h src/animation/tweening.h
) )
target_include_directories(rhubarb-animation PRIVATE "src/animation") target_include_directories(rhubarb-animation PRIVATE "src/animation")
target_link_libraries(rhubarb-animation target_link_libraries(rhubarb-animation rhubarb-core rhubarb-logging rhubarb-time)
rhubarb-core
rhubarb-logging
rhubarb-time
)
# ... rhubarb-audio # ... rhubarb-audio
add_library(rhubarb-audio add_library(
rhubarb-audio
src/audio/AudioClip.cpp src/audio/AudioClip.cpp
src/audio/AudioClip.h src/audio/AudioClip.h
src/audio/audioFileReading.cpp src/audio/audioFileReading.cpp
@ -327,7 +317,8 @@ add_library(rhubarb-audio
src/audio/waveFileWriting.h src/audio/waveFileWriting.h
) )
target_include_directories(rhubarb-audio PRIVATE "src/audio") target_include_directories(rhubarb-audio PRIVATE "src/audio")
target_link_libraries(rhubarb-audio target_link_libraries(
rhubarb-audio
webRtc webRtc
vorbis vorbis
rhubarb-logging rhubarb-logging
@ -337,7 +328,8 @@ target_link_libraries(rhubarb-audio
# ... rhubarb-core # ... rhubarb-core
configure_file(src/core/appInfo.cpp.in appInfo.cpp ESCAPE_QUOTES) configure_file(src/core/appInfo.cpp.in appInfo.cpp ESCAPE_QUOTES)
add_library(rhubarb-core add_library(
rhubarb-core
${CMAKE_CURRENT_BINARY_DIR}/appInfo.cpp ${CMAKE_CURRENT_BINARY_DIR}/appInfo.cpp
src/core/appInfo.h src/core/appInfo.h
src/core/Phone.cpp src/core/Phone.cpp
@ -346,12 +338,11 @@ add_library(rhubarb-core
src/core/Shape.h src/core/Shape.h
) )
target_include_directories(rhubarb-core PRIVATE "src/core") target_include_directories(rhubarb-core PRIVATE "src/core")
target_link_libraries(rhubarb-core target_link_libraries(rhubarb-core rhubarb-tools)
rhubarb-tools
)
# ... rhubarb-exporters # ... rhubarb-exporters
add_library(rhubarb-exporters add_library(
rhubarb-exporters
src/exporters/DatExporter.cpp src/exporters/DatExporter.cpp
src/exporters/DatExporter.h src/exporters/DatExporter.h
src/exporters/Exporter.h src/exporters/Exporter.h
@ -365,19 +356,13 @@ add_library(rhubarb-exporters
src/exporters/XmlExporter.h src/exporters/XmlExporter.h
) )
target_include_directories(rhubarb-exporters PRIVATE "src/exporters") target_include_directories(rhubarb-exporters PRIVATE "src/exporters")
target_link_libraries(rhubarb-exporters target_link_libraries(rhubarb-exporters rhubarb-animation rhubarb-core rhubarb-time)
rhubarb-animation
rhubarb-core
rhubarb-time
)
# ... rhubarb-lib # ... rhubarb-lib
add_library(rhubarb-lib add_library(rhubarb-lib src/lib/rhubarbLib.cpp src/lib/rhubarbLib.h)
src/lib/rhubarbLib.cpp
src/lib/rhubarbLib.h
)
target_include_directories(rhubarb-lib PRIVATE "src/lib") target_include_directories(rhubarb-lib PRIVATE "src/lib")
target_link_libraries(rhubarb-lib target_link_libraries(
rhubarb-lib
rhubarb-animation rhubarb-animation
rhubarb-audio rhubarb-audio
rhubarb-core rhubarb-core
@ -387,7 +372,8 @@ target_link_libraries(rhubarb-lib
) )
# ... rhubarb-logging # ... rhubarb-logging
add_library(rhubarb-logging add_library(
rhubarb-logging
src/logging/Entry.cpp src/logging/Entry.cpp
src/logging/Entry.h src/logging/Entry.h
src/logging/Formatter.h src/logging/Formatter.h
@ -402,12 +388,11 @@ add_library(rhubarb-logging
src/logging/sinks.h src/logging/sinks.h
) )
target_include_directories(rhubarb-logging PRIVATE "src/logging") target_include_directories(rhubarb-logging PRIVATE "src/logging")
target_link_libraries(rhubarb-logging target_link_libraries(rhubarb-logging rhubarb-tools)
rhubarb-tools
)
# ... rhubarb-recognition # ... rhubarb-recognition
add_library(rhubarb-recognition add_library(
rhubarb-recognition
src/recognition/g2p.cpp src/recognition/g2p.cpp
src/recognition/g2p.h src/recognition/g2p.h
src/recognition/languageModels.cpp src/recognition/languageModels.cpp
@ -423,7 +408,8 @@ add_library(rhubarb-recognition
src/recognition/tokenization.h src/recognition/tokenization.h
) )
target_include_directories(rhubarb-recognition PRIVATE "src/recognition") target_include_directories(rhubarb-recognition PRIVATE "src/recognition")
target_link_libraries(rhubarb-recognition target_link_libraries(
rhubarb-recognition
flite flite
pocketSphinx pocketSphinx
rhubarb-audio rhubarb-audio
@ -432,7 +418,8 @@ target_link_libraries(rhubarb-recognition
) )
# ... rhubarb-time # ... rhubarb-time
add_library(rhubarb-time add_library(
rhubarb-time
src/time/BoundedTimeline.h src/time/BoundedTimeline.h
src/time/centiseconds.cpp src/time/centiseconds.cpp
src/time/centiseconds.h src/time/centiseconds.h
@ -444,13 +431,11 @@ add_library(rhubarb-time
src/time/TimeRange.h src/time/TimeRange.h
) )
target_include_directories(rhubarb-time PRIVATE "src/time") target_include_directories(rhubarb-time PRIVATE "src/time")
target_link_libraries(rhubarb-time target_link_libraries(rhubarb-time cppFormat rhubarb-logging)
cppFormat
rhubarb-logging
)
# ... rhubarb-tools # ... rhubarb-tools
add_library(rhubarb-tools add_library(
rhubarb-tools
src/tools/array.h src/tools/array.h
src/tools/EnumConverter.h src/tools/EnumConverter.h
src/tools/exceptions.cpp src/tools/exceptions.cpp
@ -481,15 +466,11 @@ add_library(rhubarb-tools
src/tools/tupleHash.h src/tools/tupleHash.h
) )
target_include_directories(rhubarb-tools PRIVATE "src/tools") target_include_directories(rhubarb-tools PRIVATE "src/tools")
target_link_libraries(rhubarb-tools target_link_libraries(rhubarb-tools cppFormat whereami utfcpp utf8proc)
cppFormat
whereami
utfcpp
utf8proc
)
# Define Rhubarb executable # Define Rhubarb executable
add_executable(rhubarb add_executable(
rhubarb
src/rhubarb/main.cpp src/rhubarb/main.cpp
src/rhubarb/ExportFormat.cpp src/rhubarb/ExportFormat.cpp
src/rhubarb/ExportFormat.h src/rhubarb/ExportFormat.h
@ -501,10 +482,7 @@ add_executable(rhubarb
src/rhubarb/sinks.h src/rhubarb/sinks.h
) )
target_include_directories(rhubarb PUBLIC "src/rhubarb") target_include_directories(rhubarb PUBLIC "src/rhubarb")
target_link_libraries(rhubarb target_link_libraries(rhubarb rhubarb-exporters rhubarb-lib)
rhubarb-exporters
rhubarb-lib
)
target_compile_options(rhubarb PUBLIC ${enableWarningsFlags}) target_compile_options(rhubarb PUBLIC ${enableWarningsFlags})
# Define test project # Define test project
@ -521,7 +499,8 @@ set(TEST_FILES
tests/WaveFileReaderTests.cpp tests/WaveFileReaderTests.cpp
) )
add_executable(runTests ${TEST_FILES}) add_executable(runTests ${TEST_FILES})
target_link_libraries(runTests target_link_libraries(
runTests
gtest gtest
gmock gmock
gmock_main gmock_main
@ -534,23 +513,24 @@ target_link_libraries(runTests
function(copy_and_install sourceGlob relativeTargetDirectory) function(copy_and_install sourceGlob relativeTargetDirectory)
# Set `sourcePaths` # Set `sourcePaths`
file(GLOB sourcePaths "${sourceGlob}") file(GLOB sourcePaths "${sourceGlob}")
foreach(sourcePath ${sourcePaths}) foreach(sourcePath ${sourcePaths})
if(NOT IS_DIRECTORY ${sourcePath}) if(NOT IS_DIRECTORY ${sourcePath})
# Set `fileName` # Set `fileName`
get_filename_component(fileName "${sourcePath}" NAME) get_filename_component(fileName "${sourcePath}" NAME)
# Copy file during build # Copy file during build
add_custom_command(TARGET rhubarb POST_BUILD add_custom_command(
COMMAND ${CMAKE_COMMAND} -E copy "${sourcePath}" "$<TARGET_FILE_DIR:rhubarb>/${relativeTargetDirectory}/${fileName}" TARGET rhubarb
POST_BUILD
COMMAND
${CMAKE_COMMAND} -E copy "${sourcePath}"
"$<TARGET_FILE_DIR:rhubarb>/${relativeTargetDirectory}/${fileName}"
COMMENT "Creating '${relativeTargetDirectory}/${fileName}'" COMMENT "Creating '${relativeTargetDirectory}/${fileName}'"
) )
# Install file # Install file
install( install(FILES "${sourcePath}" DESTINATION "${relativeTargetDirectory}")
FILES "${sourcePath}"
DESTINATION "${relativeTargetDirectory}"
)
endif() endif()
endforeach() endforeach()
endfunction() endfunction()
@ -559,15 +539,19 @@ endfunction()
function(copy sourceGlob relativeTargetDirectory) function(copy sourceGlob relativeTargetDirectory)
# Set `sourcePaths` # Set `sourcePaths`
file(GLOB sourcePaths "${sourceGlob}") file(GLOB sourcePaths "${sourceGlob}")
foreach(sourcePath ${sourcePaths}) foreach(sourcePath ${sourcePaths})
if(NOT IS_DIRECTORY ${sourcePath}) if(NOT IS_DIRECTORY ${sourcePath})
# Set `fileName` # Set `fileName`
get_filename_component(fileName "${sourcePath}" NAME) get_filename_component(fileName "${sourcePath}" NAME)
# Copy file during build # Copy file during build
add_custom_command(TARGET rhubarb POST_BUILD add_custom_command(
COMMAND ${CMAKE_COMMAND} -E copy "${sourcePath}" "$<TARGET_FILE_DIR:rhubarb>/${relativeTargetDirectory}/${fileName}" TARGET rhubarb
POST_BUILD
COMMAND
${CMAKE_COMMAND} -E copy "${sourcePath}"
"$<TARGET_FILE_DIR:rhubarb>/${relativeTargetDirectory}/${fileName}"
COMMENT "Creating '${relativeTargetDirectory}/${fileName}'" COMMENT "Creating '${relativeTargetDirectory}/${fileName}'"
) )
endif() endif()
@ -579,8 +563,4 @@ copy_and_install("lib/cmusphinx-en-us-5.2/*" "res/sphinx/acoustic-model")
copy_and_install("tests/resources/*" "tests/resources") copy_and_install("tests/resources/*" "tests/resources")
install( install(TARGETS rhubarb RUNTIME DESTINATION .)
TARGETS rhubarb
RUNTIME
DESTINATION .
)

View File

@ -1,12 +1,14 @@
#include "ShapeRule.h" #include "ShapeRule.h"
#include <boost/range/adaptor/transformed.hpp> #include <boost/range/adaptor/transformed.hpp>
#include <utility> #include <utility>
#include "time/ContinuousTimeline.h" #include "time/ContinuousTimeline.h"
using boost::optional; using boost::optional;
using boost::adaptors::transformed; using boost::adaptors::transformed;
template<typename T, bool AutoJoin> template <typename T, bool AutoJoin>
ContinuousTimeline<optional<T>, AutoJoin> boundedTimelinetoContinuousOptional( ContinuousTimeline<optional<T>, AutoJoin> boundedTimelinetoContinuousOptional(
const BoundedTimeline<T, AutoJoin>& timeline const BoundedTimeline<T, AutoJoin>& timeline
) { ) {
@ -19,18 +21,13 @@ ContinuousTimeline<optional<T>, AutoJoin> boundedTimelinetoContinuousOptional(
}; };
} }
ShapeRule::ShapeRule( ShapeRule::ShapeRule(ShapeSet shapeSet, optional<Phone> phone, TimeRange phoneTiming) :
ShapeSet shapeSet,
optional<Phone> phone,
TimeRange phoneTiming
) :
shapeSet(std::move(shapeSet)), shapeSet(std::move(shapeSet)),
phone(std::move(phone)), phone(std::move(phone)),
phoneTiming(phoneTiming) phoneTiming(phoneTiming) {}
{}
ShapeRule ShapeRule::getInvalid() { ShapeRule ShapeRule::getInvalid() {
return { {}, boost::none, { 0_cs, 0_cs } }; return {{}, boost::none, {0_cs, 0_cs}};
} }
bool ShapeRule::operator==(const ShapeRule& rhs) const { bool ShapeRule::operator==(const ShapeRule& rhs) const {
@ -42,8 +39,7 @@ bool ShapeRule::operator!=(const ShapeRule& rhs) const {
} }
bool ShapeRule::operator<(const ShapeRule& rhs) const { bool ShapeRule::operator<(const ShapeRule& rhs) const {
return shapeSet < rhs.shapeSet return shapeSet < rhs.shapeSet || phone < rhs.phone
|| phone < rhs.phone
|| phoneTiming.getStart() < rhs.phoneTiming.getStart() || phoneTiming.getStart() < rhs.phoneTiming.getStart()
|| phoneTiming.getEnd() < rhs.phoneTiming.getEnd(); || phoneTiming.getEnd() < rhs.phoneTiming.getEnd();
} }
@ -54,8 +50,7 @@ ContinuousTimeline<ShapeRule> getShapeRules(const BoundedTimeline<Phone>& phones
// Create timeline of shape rules // Create timeline of shape rules
ContinuousTimeline<ShapeRule> shapeRules( ContinuousTimeline<ShapeRule> shapeRules(
phones.getRange(), phones.getRange(), {{Shape::X}, boost::none, {0_cs, 0_cs}}
{ { Shape::X }, boost::none, { 0_cs, 0_cs } }
); );
centiseconds previousDuration = 0_cs; centiseconds previousDuration = 0_cs;
for (const auto& timedPhone : continuousPhones) { for (const auto& timedPhone : continuousPhones) {

View File

@ -1,7 +1,7 @@
#pragma once #pragma once
#include "core/Phone.h"
#include "animationRules.h" #include "animationRules.h"
#include "core/Phone.h"
#include "time/BoundedTimeline.h" #include "time/BoundedTimeline.h"
#include "time/ContinuousTimeline.h" #include "time/ContinuousTimeline.h"
#include "time/TimeRange.h" #include "time/TimeRange.h"

View File

@ -1,15 +1,17 @@
#include "animationRules.h" #include "animationRules.h"
#include <boost/algorithm/clamp.hpp>
#include "shapeShorthands.h"
#include "tools/array.h"
#include "time/ContinuousTimeline.h"
using std::chrono::duration_cast; #include <boost/algorithm/clamp.hpp>
using boost::algorithm::clamp;
#include "shapeShorthands.h"
#include "time/ContinuousTimeline.h"
#include "tools/array.h"
using boost::optional; using boost::optional;
using boost::algorithm::clamp;
using std::array; using std::array;
using std::pair;
using std::map; using std::map;
using std::pair;
using std::chrono::duration_cast;
constexpr size_t shapeValueCount = static_cast<size_t>(Shape::EndSentinel); constexpr size_t shapeValueCount = static_cast<size_t>(Shape::EndSentinel);
@ -32,17 +34,18 @@ Shape getClosestShape(Shape reference, ShapeSet shapes) {
// A matrix that for each shape contains all shapes in ascending order of effort required to // A matrix that for each shape contains all shapes in ascending order of effort required to
// move to them // move to them
constexpr static array<array<Shape, shapeValueCount>, shapeValueCount> effortMatrix = make_array( constexpr static array<array<Shape, shapeValueCount>, shapeValueCount> effortMatrix =
/* A */ make_array(A, X, G, B, C, H, E, D, F), make_array(
/* B */ make_array(B, G, A, X, C, H, E, D, F), /* A */ make_array(A, X, G, B, C, H, E, D, F),
/* C */ make_array(C, H, B, G, D, A, X, E, F), /* B */ make_array(B, G, A, X, C, H, E, D, F),
/* D */ make_array(D, C, H, B, G, A, X, E, F), /* C */ make_array(C, H, B, G, D, A, X, E, F),
/* E */ make_array(E, C, H, B, G, A, X, D, F), /* D */ make_array(D, C, H, B, G, A, X, E, F),
/* F */ make_array(F, B, G, A, X, C, H, E, D), /* E */ make_array(E, C, H, B, G, A, X, D, F),
/* G */ make_array(G, A, B, C, H, X, E, D, F), /* F */ make_array(F, B, G, A, X, C, H, E, D),
/* H */ make_array(H, C, B, G, D, A, X, E, F), // Like C /* G */ make_array(G, A, B, C, H, X, E, D, F),
/* X */ make_array(X, A, G, B, C, H, E, D, F) // Like A /* H */ make_array(H, C, B, G, D, A, X, E, F), // Like C
); /* X */ make_array(X, A, G, B, C, H, E, D, F) // Like A
);
auto& closestShapes = effortMatrix.at(static_cast<size_t>(reference)); auto& closestShapes = effortMatrix.at(static_cast<size_t>(reference));
for (Shape closestShape : closestShapes) { for (Shape closestShape : closestShapes) {
@ -58,32 +61,31 @@ optional<pair<Shape, TweenTiming>> getTween(Shape first, Shape second) {
// Note that most of the following rules work in one direction only. // Note that most of the following rules work in one direction only.
// That's because in animation, the mouth should usually "pop" open without inbetweens, // That's because in animation, the mouth should usually "pop" open without inbetweens,
// then close slowly. // then close slowly.
static const map<pair<Shape, Shape>, pair<Shape, TweenTiming>> lookup { static const map<pair<Shape, Shape>, pair<Shape, TweenTiming>> lookup{
{ { D, A }, { C, TweenTiming::Early } }, {{D, A}, {C, TweenTiming::Early}},
{ { D, B }, { C, TweenTiming::Centered } }, {{D, B}, {C, TweenTiming::Centered}},
{ { D, G }, { C, TweenTiming::Early } }, {{D, G}, {C, TweenTiming::Early}},
{ { D, X }, { C, TweenTiming::Late } }, {{D, X}, {C, TweenTiming::Late}},
{ { C, F }, { E, TweenTiming::Centered } }, { { F, C }, { E, TweenTiming::Centered } }, {{C, F}, {E, TweenTiming::Centered}},
{ { D, F }, { E, TweenTiming::Centered } }, {{F, C}, {E, TweenTiming::Centered}},
{ { H, F }, { E, TweenTiming::Late } }, { { F, H }, { E, TweenTiming::Early } } {{D, F}, {E, TweenTiming::Centered}},
{{H, F}, {E, TweenTiming::Late}},
{{F, H}, {E, TweenTiming::Early}}
}; };
const auto it = lookup.find({ first, second }); const auto it = lookup.find({first, second});
return it != lookup.end() ? it->second : optional<pair<Shape, TweenTiming>>(); return it != lookup.end() ? it->second : optional<pair<Shape, TweenTiming>>();
} }
Timeline<ShapeSet> getShapeSets(Phone phone, centiseconds duration, centiseconds previousDuration) { Timeline<ShapeSet> getShapeSets(Phone phone, centiseconds duration, centiseconds previousDuration) {
// Returns a timeline with a single shape set // Returns a timeline with a single shape set
const auto single = [duration](ShapeSet value) { const auto single = [duration](ShapeSet value) {
return Timeline<ShapeSet> { { 0_cs, duration, value } }; return Timeline<ShapeSet>{{0_cs, duration, value}};
}; };
// Returns a timeline with two shape sets, timed as a diphthong // Returns a timeline with two shape sets, timed as a diphthong
const auto diphthong = [duration](ShapeSet first, ShapeSet second) { const auto diphthong = [duration](ShapeSet first, ShapeSet second) {
const centiseconds firstDuration = duration_cast<centiseconds>(duration * 0.6); const centiseconds firstDuration = duration_cast<centiseconds>(duration * 0.6);
return Timeline<ShapeSet> { return Timeline<ShapeSet>{{0_cs, firstDuration, first}, {firstDuration, duration, second}};
{ 0_cs, firstDuration, first },
{ firstDuration, duration, second }
};
}; };
// Returns a timeline with two shape sets, timed as a plosive // Returns a timeline with two shape sets, timed as a plosive
@ -92,10 +94,7 @@ Timeline<ShapeSet> getShapeSets(Phone phone, centiseconds duration, centiseconds
const centiseconds maxOcclusionDuration = 12_cs; const centiseconds maxOcclusionDuration = 12_cs;
const centiseconds occlusionDuration = const centiseconds occlusionDuration =
clamp(previousDuration / 2, minOcclusionDuration, maxOcclusionDuration); clamp(previousDuration / 2, minOcclusionDuration, maxOcclusionDuration);
return Timeline<ShapeSet> { return Timeline<ShapeSet>{{-occlusionDuration, 0_cs, first}, {0_cs, duration, second}};
{ -occlusionDuration, 0_cs, first },
{ 0_cs, duration, second }
};
}; };
// Returns the result of `getShapeSets` when called with identical arguments // Returns the result of `getShapeSets` when called with identical arguments
@ -104,8 +103,8 @@ Timeline<ShapeSet> getShapeSets(Phone phone, centiseconds duration, centiseconds
return getShapeSets(referencePhone, duration, previousDuration); return getShapeSets(referencePhone, duration, previousDuration);
}; };
static const ShapeSet any { A, B, C, D, E, F, G, H, X }; static const ShapeSet any{A, B, C, D, E, F, G, H, X};
static const ShapeSet anyOpen { B, C, D, E, F, G, H }; static const ShapeSet anyOpen{B, C, D, E, F, G, H};
// Note: // Note:
// The shapes {A, B, G, X} are very similar. You should avoid regular shape sets containing more // The shapes {A, B, G, X} are very similar. You should avoid regular shape sets containing more
@ -114,52 +113,52 @@ Timeline<ShapeSet> getShapeSets(Phone phone, centiseconds duration, centiseconds
// As an exception, a very flexible rule may contain *all* these shapes. // As an exception, a very flexible rule may contain *all* these shapes.
switch (phone) { switch (phone) {
case Phone::AO: return single({ E }); case Phone::AO: return single({E});
case Phone::AA: return single({ D }); case Phone::AA: return single({D});
case Phone::IY: return single({ B }); case Phone::IY: return single({B});
case Phone::UW: return single({ F }); case Phone::UW: return single({F});
case Phone::EH: return single({ C }); case Phone::EH: return single({C});
case Phone::IH: return single({ B }); case Phone::IH: return single({B});
case Phone::UH: return single({ F }); case Phone::UH: return single({F});
case Phone::AH: return duration < 20_cs ? single({ C }) : single({ D }); case Phone::AH: return duration < 20_cs ? single({C}) : single({D});
case Phone::Schwa: return single({ B, C }); case Phone::Schwa: return single({B, C});
case Phone::AE: return single({ C }); case Phone::AE: return single({C});
case Phone::EY: return diphthong({ C }, { B }); case Phone::EY: return diphthong({C}, {B});
case Phone::AY: return duration < 20_cs ? diphthong({ C }, { B }) : diphthong({ D }, { B }); case Phone::AY: return duration < 20_cs ? diphthong({C}, {B}) : diphthong({D}, {B});
case Phone::OW: return diphthong({ E }, { F }); case Phone::OW: return diphthong({E}, {F});
case Phone::AW: return duration < 30_cs ? diphthong({ C }, { E }) : diphthong({ D }, { E }); case Phone::AW: return duration < 30_cs ? diphthong({C}, {E}) : diphthong({D}, {E});
case Phone::OY: return diphthong({ E }, { B }); case Phone::OY: return diphthong({E}, {B});
case Phone::ER: return duration < 7_cs ? like(Phone::Schwa) : single({ E }); case Phone::ER: return duration < 7_cs ? like(Phone::Schwa) : single({E});
case Phone::P: case Phone::P:
case Phone::B: return plosive({ A }, any); case Phone::B: return plosive({A}, any);
case Phone::T: case Phone::T:
case Phone::D: return plosive({ B, F }, anyOpen); case Phone::D: return plosive({B, F}, anyOpen);
case Phone::K: case Phone::K:
case Phone::G: return plosive({ B, C, E, F, H }, anyOpen); case Phone::G: return plosive({B, C, E, F, H}, anyOpen);
case Phone::CH: case Phone::CH:
case Phone::JH: return single({ B, F }); case Phone::JH: return single({B, F});
case Phone::F: case Phone::F:
case Phone::V: return single({ G }); case Phone::V: return single({G});
case Phone::TH: case Phone::TH:
case Phone::DH: case Phone::DH:
case Phone::S: case Phone::S:
case Phone::Z: case Phone::Z:
case Phone::SH: case Phone::SH:
case Phone::ZH: return single({ B, F }); case Phone::ZH: return single({B, F});
case Phone::HH: return single(any); // think "m-hm" case Phone::HH: return single(any); // think "m-hm"
case Phone::M: return single({ A }); case Phone::M: return single({A});
case Phone::N: return single({ B, C, F, H }); case Phone::N: return single({B, C, F, H});
case Phone::NG: return single({ B, C, E, F }); case Phone::NG: return single({B, C, E, F});
case Phone::L: return duration < 20_cs ? single({ B, E, F, H }) : single({ H }); case Phone::L: return duration < 20_cs ? single({B, E, F, H}) : single({H});
case Phone::R: return single({ B, E, F }); case Phone::R: return single({B, E, F});
case Phone::Y: return single({ B, C, F }); case Phone::Y: return single({B, C, F});
case Phone::W: return single({ F }); case Phone::W: return single({F});
case Phone::Breath: case Phone::Breath:
case Phone::Cough: case Phone::Cough:
case Phone::Smack: return single({ C }); case Phone::Smack: return single({C});
case Phone::Noise: return single({ B }); case Phone::Noise: return single({B});
default: throw std::invalid_argument("Unexpected phone."); default: throw std::invalid_argument("Unexpected phone.");
} }

View File

@ -1,9 +1,10 @@
#pragma once #pragma once
#include <set> #include <set>
#include "core/Phone.h"
#include "core/Shape.h" #include "core/Shape.h"
#include "time/Timeline.h" #include "time/Timeline.h"
#include "core/Phone.h"
// Returns the basic shape (A-F) that most closely resembles the specified shape. // Returns the basic shape (A-F) that most closely resembles the specified shape.
Shape getBasicShape(Shape shape); Shape getBasicShape(Shape shape);

View File

@ -1,16 +1,16 @@
#include "mouthAnimation.h" #include "mouthAnimation.h"
#include "time/timedLogging.h"
#include "ShapeRule.h"
#include "roughAnimation.h"
#include "pauseAnimation.h" #include "pauseAnimation.h"
#include "tweening.h" #include "roughAnimation.h"
#include "timingOptimization.h" #include "ShapeRule.h"
#include "targetShapeSet.h"
#include "staticSegments.h" #include "staticSegments.h"
#include "targetShapeSet.h"
#include "time/timedLogging.h"
#include "timingOptimization.h"
#include "tweening.h"
JoiningContinuousTimeline<Shape> animate( JoiningContinuousTimeline<Shape> animate(
const BoundedTimeline<Phone>& phones, const BoundedTimeline<Phone>& phones, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
) { ) {
// Create timeline of shape rules // Create timeline of shape rules
ContinuousTimeline<ShapeRule> shapeRules = getShapeRules(phones); ContinuousTimeline<ShapeRule> shapeRules = getShapeRules(phones);

View File

@ -2,10 +2,9 @@
#include "core/Phone.h" #include "core/Phone.h"
#include "core/Shape.h" #include "core/Shape.h"
#include "time/ContinuousTimeline.h"
#include "targetShapeSet.h" #include "targetShapeSet.h"
#include "time/ContinuousTimeline.h"
JoiningContinuousTimeline<Shape> animate( JoiningContinuousTimeline<Shape> animate(
const BoundedTimeline<Phone>& phones, const BoundedTimeline<Phone>& phones, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
); );

View File

@ -1,4 +1,5 @@
#include "pauseAnimation.h" #include "pauseAnimation.h"
#include "animationRules.h" #include "animationRules.h"
Shape getPauseShape(Shape previous, Shape next, centiseconds duration) { Shape getPauseShape(Shape previous, Shape next, centiseconds duration) {
@ -30,7 +31,7 @@ Shape getPauseShape(Shape previous, Shape next, centiseconds duration) {
JoiningContinuousTimeline<Shape> animatePauses(const JoiningContinuousTimeline<Shape>& animation) { JoiningContinuousTimeline<Shape> animatePauses(const JoiningContinuousTimeline<Shape>& animation) {
JoiningContinuousTimeline<Shape> result(animation); JoiningContinuousTimeline<Shape> result(animation);
for_each_adjacent( for_each_adjacent(
animation.begin(), animation.begin(),
animation.end(), animation.end(),

View File

@ -1,4 +1,5 @@
#include "roughAnimation.h" #include "roughAnimation.h"
#include <boost/optional.hpp> #include <boost/optional.hpp>
// Create timeline of shapes using a bidirectional algorithm. // Create timeline of shapes using a bidirectional algorithm.
@ -22,9 +23,8 @@ JoiningContinuousTimeline<Shape> animateRough(const ContinuousTimeline<ShapeRule
const ShapeRule shapeRule = it->getValue(); const ShapeRule shapeRule = it->getValue();
const Shape shape = getClosestShape(referenceShape, shapeRule.shapeSet); const Shape shape = getClosestShape(referenceShape, shapeRule.shapeSet);
animation.set(it->getTimeRange(), shape); animation.set(it->getTimeRange(), shape);
const bool anticipateShape = shapeRule.phone const bool anticipateShape =
&& isVowel(*shapeRule.phone) shapeRule.phone && isVowel(*shapeRule.phone) && shapeRule.shapeSet.size() == 1;
&& shapeRule.shapeSet.size() == 1;
if (anticipateShape) { if (anticipateShape) {
// Animate backwards a little // Animate backwards a little
const Shape anticipatedShape = shape; const Shape anticipatedShape = shape;

View File

@ -1,6 +1,8 @@
#include "staticSegments.h" #include "staticSegments.h"
#include <vector>
#include <numeric> #include <numeric>
#include <vector>
#include "tools/nextCombination.h" #include "tools/nextCombination.h"
using std::vector; using std::vector;
@ -49,7 +51,7 @@ vector<TimeRange> getStaticSegments(
result.push_back(timeRange); result.push_back(timeRange);
} }
} }
return result; return result;
} }
@ -67,8 +69,8 @@ ShapeRule getChangedShapeRule(const ShapeRule& rule) {
ShapeRule result(rule); ShapeRule result(rule);
// So far, I've only encountered B as a static shape. // So far, I've only encountered B as a static shape.
// If there is ever a problem with another static shape, this function can easily be extended. // If there is ever a problem with another static shape, this function can easily be extended.
if (rule.shapeSet == ShapeSet { Shape::B }) { if (rule.shapeSet == ShapeSet{Shape::B}) {
result.shapeSet = { Shape::C }; result.shapeSet = {Shape::C};
} }
return result; return result;
} }
@ -78,8 +80,7 @@ using RuleChanges = vector<centiseconds>;
// Replaces the indicated shape rules with slightly different ones, breaking up long static segments // Replaces the indicated shape rules with slightly different ones, breaking up long static segments
ContinuousTimeline<ShapeRule> applyChanges( ContinuousTimeline<ShapeRule> applyChanges(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const RuleChanges& changes
const RuleChanges& changes
) { ) {
ContinuousTimeline<ShapeRule> result(shapeRules); ContinuousTimeline<ShapeRule> result(shapeRules);
for (centiseconds changedRuleStart : changes) { for (centiseconds changedRuleStart : changes) {
@ -99,8 +100,7 @@ public:
) : ) :
changedRules(applyChanges(originalRules, changes)), changedRules(applyChanges(originalRules, changes)),
animation(animate(changedRules)), animation(animate(changedRules)),
staticSegments(getStaticSegments(changedRules, animation)) staticSegments(getStaticSegments(changedRules, animation)) {}
{}
bool isBetterThan(const RuleChangeScenario& rhs) const { bool isBetterThan(const RuleChangeScenario& rhs) const {
// We want zero static segments // We want zero static segments
@ -132,8 +132,9 @@ private:
0.0, 0.0,
[](const double sum, const Timed<Shape>& timedShape) { [](const double sum, const Timed<Shape>& timedShape) {
const double duration = std::chrono::duration_cast<std::chrono::duration<double>>( const double duration = std::chrono::duration_cast<std::chrono::duration<double>>(
timedShape.getDuration() timedShape.getDuration()
).count(); )
.count();
return sum + duration * duration; return sum + duration * duration;
} }
); );
@ -152,8 +153,7 @@ RuleChanges getPossibleRuleChanges(const ContinuousTimeline<ShapeRule>& shapeRul
} }
ContinuousTimeline<ShapeRule> fixStaticSegmentRules( ContinuousTimeline<ShapeRule> fixStaticSegmentRules(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const AnimationFunction& animate
const AnimationFunction& animate
) { ) {
// The complexity of this function is exponential with the number of replacements. // The complexity of this function is exponential with the number of replacements.
// So let's cap that value. // So let's cap that value.
@ -164,23 +164,26 @@ ContinuousTimeline<ShapeRule> fixStaticSegmentRules(
// Find best solution. Start with a single replacement, then increase as necessary. // Find best solution. Start with a single replacement, then increase as necessary.
RuleChangeScenario bestScenario(shapeRules, {}, animate); RuleChangeScenario bestScenario(shapeRules, {}, animate);
for ( for (int replacementCount = 1; bestScenario.getStaticSegmentCount() > 0
int replacementCount = 1; && replacementCount
bestScenario.getStaticSegmentCount() > 0 && replacementCount <= std::min(static_cast<int>(possibleRuleChanges.size()), maxReplacementCount); <= std::min(static_cast<int>(possibleRuleChanges.size()), maxReplacementCount);
++replacementCount ++replacementCount) {
) {
// Only the first <replacementCount> elements of `currentRuleChanges` count // Only the first <replacementCount> elements of `currentRuleChanges` count
auto currentRuleChanges(possibleRuleChanges); auto currentRuleChanges(possibleRuleChanges);
do { do {
RuleChangeScenario currentScenario( RuleChangeScenario currentScenario(
shapeRules, shapeRules,
{ currentRuleChanges.begin(), currentRuleChanges.begin() + replacementCount }, {currentRuleChanges.begin(), currentRuleChanges.begin() + replacementCount},
animate animate
); );
if (currentScenario.isBetterThan(bestScenario)) { if (currentScenario.isBetterThan(bestScenario)) {
bestScenario = currentScenario; bestScenario = currentScenario;
} }
} while (next_combination(currentRuleChanges.begin(), currentRuleChanges.begin() + replacementCount, currentRuleChanges.end())); } while (next_combination(
currentRuleChanges.begin(),
currentRuleChanges.begin() + replacementCount,
currentRuleChanges.end()
));
} }
return bestScenario.getChangedRules(); return bestScenario.getChangedRules();
@ -194,8 +197,7 @@ bool isFlexible(const ShapeRule& rule) {
// Extends the specified time range until it starts and ends with a non-flexible shape rule, if // Extends the specified time range until it starts and ends with a non-flexible shape rule, if
// possible // possible
TimeRange extendToFixedRules( TimeRange extendToFixedRules(
const TimeRange& timeRange, const TimeRange& timeRange, const ContinuousTimeline<ShapeRule>& shapeRules
const ContinuousTimeline<ShapeRule>& shapeRules
) { ) {
auto first = shapeRules.find(timeRange.getStart()); auto first = shapeRules.find(timeRange.getStart());
while (first != shapeRules.begin() && isFlexible(first->getValue())) { while (first != shapeRules.begin() && isFlexible(first->getValue())) {
@ -205,12 +207,11 @@ TimeRange extendToFixedRules(
while (std::next(last) != shapeRules.end() && isFlexible(last->getValue())) { while (std::next(last) != shapeRules.end() && isFlexible(last->getValue())) {
++last; ++last;
} }
return { first->getStart(), last->getEnd() }; return {first->getStart(), last->getEnd()};
} }
JoiningContinuousTimeline<Shape> avoidStaticSegments( JoiningContinuousTimeline<Shape> avoidStaticSegments(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const AnimationFunction& animate
const AnimationFunction& animate
) { ) {
const auto animation = animate(shapeRules); const auto animation = animate(shapeRules);
const vector<TimeRange> staticSegments = getStaticSegments(shapeRules, animation); const vector<TimeRange> staticSegments = getStaticSegments(shapeRules, animation);
@ -227,8 +228,7 @@ JoiningContinuousTimeline<Shape> avoidStaticSegments(
// Fix shape rules within the static segment // Fix shape rules within the static segment
const auto fixedSegmentShapeRules = fixStaticSegmentRules( const auto fixedSegmentShapeRules = fixStaticSegmentRules(
{ extendedStaticSegment, ShapeRule::getInvalid(), fixedShapeRules }, {extendedStaticSegment, ShapeRule::getInvalid(), fixedShapeRules}, animate
animate
); );
for (const auto& timedShapeRule : fixedSegmentShapeRules) { for (const auto& timedShapeRule : fixedSegmentShapeRules) {
fixedShapeRules.set(timedShapeRule); fixedShapeRules.set(timedShapeRule);

View File

@ -1,18 +1,20 @@
#pragma once #pragma once
#include "core/Shape.h"
#include "time/ContinuousTimeline.h"
#include "ShapeRule.h"
#include <functional> #include <functional>
using AnimationFunction = std::function<JoiningContinuousTimeline<Shape>(const ContinuousTimeline<ShapeRule>&)>; #include "core/Shape.h"
#include "ShapeRule.h"
#include "time/ContinuousTimeline.h"
using AnimationFunction =
std::function<JoiningContinuousTimeline<Shape>(const ContinuousTimeline<ShapeRule>&)>;
// Calls the specified animation function with the specified shape rules. // Calls the specified animation function with the specified shape rules.
// If the resulting animation contains long static segments, the shape rules are tweaked and // If the resulting animation contains long static segments, the shape rules are tweaked and
// animated again. // animated again.
// Static segments happen rather often. // Static segments happen rather often.
// See http://animateducated.blogspot.de/2016/10/lip-sync-animation-2.html?showComment=1478861729702#c2940729096183546458. // See
// http://animateducated.blogspot.de/2016/10/lip-sync-animation-2.html?showComment=1478861729702#c2940729096183546458.
JoiningContinuousTimeline<Shape> avoidStaticSegments( JoiningContinuousTimeline<Shape> avoidStaticSegments(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const AnimationFunction& animate
const AnimationFunction& animate
); );

View File

@ -7,7 +7,8 @@ Shape convertToTargetShapeSet(Shape shape, const ShapeSet& targetShapeSet) {
const Shape basicShape = getBasicShape(shape); const Shape basicShape = getBasicShape(shape);
if (targetShapeSet.find(basicShape) == targetShapeSet.end()) { if (targetShapeSet.find(basicShape) == targetShapeSet.end()) {
throw std::invalid_argument( throw std::invalid_argument(
fmt::format("Target shape set must contain basic shape {}.", basicShape)); fmt::format("Target shape set must contain basic shape {}.", basicShape)
);
} }
return basicShape; return basicShape;
} }
@ -21,8 +22,7 @@ ShapeSet convertToTargetShapeSet(const ShapeSet& shapes, const ShapeSet& targetS
} }
ContinuousTimeline<ShapeRule> convertToTargetShapeSet( ContinuousTimeline<ShapeRule> convertToTargetShapeSet(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
) { ) {
ContinuousTimeline<ShapeRule> result(shapeRules); ContinuousTimeline<ShapeRule> result(shapeRules);
for (const auto& timedShapeRule : shapeRules) { for (const auto& timedShapeRule : shapeRules) {
@ -34,8 +34,7 @@ ContinuousTimeline<ShapeRule> convertToTargetShapeSet(
} }
JoiningContinuousTimeline<Shape> convertToTargetShapeSet( JoiningContinuousTimeline<Shape> convertToTargetShapeSet(
const JoiningContinuousTimeline<Shape>& animation, const JoiningContinuousTimeline<Shape>& animation, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
) { ) {
JoiningContinuousTimeline<Shape> result(animation); JoiningContinuousTimeline<Shape> result(animation);
for (const auto& timedShape : animation) { for (const auto& timedShape : animation) {

View File

@ -12,13 +12,11 @@ ShapeSet convertToTargetShapeSet(const ShapeSet& shapes, const ShapeSet& targetS
// Replaces each shape in each rule with the closest shape that occurs in the target shape set. // Replaces each shape in each rule with the closest shape that occurs in the target shape set.
ContinuousTimeline<ShapeRule> convertToTargetShapeSet( ContinuousTimeline<ShapeRule> convertToTargetShapeSet(
const ContinuousTimeline<ShapeRule>& shapeRules, const ContinuousTimeline<ShapeRule>& shapeRules, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
); );
// Replaces each shape in the specified animation with the closest shape that occurs in the target // Replaces each shape in the specified animation with the closest shape that occurs in the target
// shape set. // shape set.
JoiningContinuousTimeline<Shape> convertToTargetShapeSet( JoiningContinuousTimeline<Shape> convertToTargetShapeSet(
const JoiningContinuousTimeline<Shape>& animation, const JoiningContinuousTimeline<Shape>& animation, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
); );

View File

@ -1,12 +1,14 @@
#include "timingOptimization.h" #include "timingOptimization.h"
#include "time/timedLogging.h"
#include <algorithm>
#include <boost/lexical_cast.hpp> #include <boost/lexical_cast.hpp>
#include <map> #include <map>
#include <algorithm>
#include "ShapeRule.h"
using std::string; #include "ShapeRule.h"
#include "time/timedLogging.h"
using std::map; using std::map;
using std::string;
string getShapesString(const JoiningContinuousTimeline<Shape>& shapes) { string getShapesString(const JoiningContinuousTimeline<Shape>& shapes) {
string result; string result;
@ -32,8 +34,9 @@ Shape getRepresentativeShape(const JoiningTimeline<Shape>& timeline) {
// Select shape with highest total duration within the candidate range // Select shape with highest total duration within the candidate range
const Shape bestShape = std::max_element( const Shape bestShape = std::max_element(
candidateShapeWeights.begin(), candidateShapeWeights.end(), candidateShapeWeights.begin(),
[](auto a, auto b) { return a.second < b.second; } candidateShapeWeights.end(),
[](auto a, auto b) { return a.second < b.second; }
)->first; )->first;
// Shapes C and D are similar, but D is more interesting. // Shapes C and D are similar, but D is more interesting.
@ -55,8 +58,11 @@ struct ShapeReduction {
// Returns a time range of candidate shapes for the next shape to draw. // Returns a time range of candidate shapes for the next shape to draw.
// Guaranteed to be non-empty. // Guaranteed to be non-empty.
TimeRange getNextMinimalCandidateRange(const JoiningContinuousTimeline<Shape>& sourceShapes, TimeRange getNextMinimalCandidateRange(
const TimeRange targetRange, const centiseconds writePosition) { const JoiningContinuousTimeline<Shape>& sourceShapes,
const TimeRange targetRange,
const centiseconds writePosition
) {
if (sourceShapes.empty()) { if (sourceShapes.empty()) {
throw std::invalid_argument("Cannot determine candidate range for empty source timeline."); throw std::invalid_argument("Cannot determine candidate range for empty source timeline.");
} }
@ -69,9 +75,8 @@ TimeRange getNextMinimalCandidateRange(const JoiningContinuousTimeline<Shape>& s
const centiseconds remainingTargetDuration = writePosition - targetRange.getStart(); const centiseconds remainingTargetDuration = writePosition - targetRange.getStart();
const bool canFitOneOrLess = remainingTargetDuration <= minShapeDuration; const bool canFitOneOrLess = remainingTargetDuration <= minShapeDuration;
const bool canFitTwo = remainingTargetDuration >= 2 * minShapeDuration; const bool canFitTwo = remainingTargetDuration >= 2 * minShapeDuration;
const centiseconds duration = canFitOneOrLess || canFitTwo const centiseconds duration =
? minShapeDuration canFitOneOrLess || canFitTwo ? minShapeDuration : remainingTargetDuration / 2;
: remainingTargetDuration / 2;
TimeRange candidateRange(writePosition - duration, writePosition); TimeRange candidateRange(writePosition - duration, writePosition);
if (writePosition == targetRange.getEnd()) { if (writePosition == targetRange.getEnd()) {
@ -102,22 +107,24 @@ ShapeReduction getNextShapeReduction(
// Determine the next time range of candidate shapes. Consider two scenarios: // Determine the next time range of candidate shapes. Consider two scenarios:
// ... the shortest-possible candidate range // ... the shortest-possible candidate range
const ShapeReduction minReduction(sourceShapes, const ShapeReduction minReduction(
getNextMinimalCandidateRange(sourceShapes, targetRange, writePosition)); sourceShapes, getNextMinimalCandidateRange(sourceShapes, targetRange, writePosition)
);
// ... a candidate range extended to the left to fully encompass its left-most shape // ... a candidate range extended to the left to fully encompass its left-most shape
const ShapeReduction extendedReduction(sourceShapes, const ShapeReduction extendedReduction(
{ sourceShapes,
minReduction.sourceShapes.begin()->getStart(), {minReduction.sourceShapes.begin()->getStart(),
minReduction.sourceShapes.getRange().getEnd() minReduction.sourceShapes.getRange().getEnd()}
}
); );
// Determine the shape that might be picked *next* if we choose the shortest-possible candidate // Determine the shape that might be picked *next* if we choose the shortest-possible candidate
// range now // range now
const ShapeReduction nextReduction( const ShapeReduction nextReduction(
sourceShapes, sourceShapes,
getNextMinimalCandidateRange(sourceShapes, targetRange, minReduction.sourceShapes.getRange().getStart()) getNextMinimalCandidateRange(
sourceShapes, targetRange, minReduction.sourceShapes.getRange().getStart()
)
); );
const bool minEqualsExtended = minReduction.shape == extendedReduction.shape; const bool minEqualsExtended = minReduction.shape == extendedReduction.shape;
@ -129,8 +136,9 @@ ShapeReduction getNextShapeReduction(
// Modifies the timing of the given animation to fit into the specified target time range without // Modifies the timing of the given animation to fit into the specified target time range without
// jitter. // jitter.
JoiningContinuousTimeline<Shape> retime(const JoiningContinuousTimeline<Shape>& sourceShapes, JoiningContinuousTimeline<Shape> retime(
const TimeRange targetRange) { const JoiningContinuousTimeline<Shape>& sourceShapes, const TimeRange targetRange
) {
logTimedEvent("segment", targetRange, getShapesString(sourceShapes)); logTimedEvent("segment", targetRange, getShapesString(sourceShapes));
JoiningContinuousTimeline<Shape> result(targetRange, Shape::X); JoiningContinuousTimeline<Shape> result(targetRange, Shape::X);
@ -139,7 +147,6 @@ JoiningContinuousTimeline<Shape> retime(const JoiningContinuousTimeline<Shape>&
// Animate backwards // Animate backwards
centiseconds writePosition = targetRange.getEnd(); centiseconds writePosition = targetRange.getEnd();
while (writePosition > targetRange.getStart()) { while (writePosition > targetRange.getStart()) {
// Decide which shape to show next, possibly discarding short shapes // Decide which shape to show next, possibly discarding short shapes
const ShapeReduction shapeReduction = const ShapeReduction shapeReduction =
getNextShapeReduction(sourceShapes, targetRange, writePosition); getNextShapeReduction(sourceShapes, targetRange, writePosition);
@ -162,31 +169,22 @@ JoiningContinuousTimeline<Shape> retime(const JoiningContinuousTimeline<Shape>&
} }
JoiningContinuousTimeline<Shape> retime( JoiningContinuousTimeline<Shape> retime(
const JoiningContinuousTimeline<Shape>& animation, const JoiningContinuousTimeline<Shape>& animation, TimeRange sourceRange, TimeRange targetRange
TimeRange sourceRange,
TimeRange targetRange
) { ) {
const auto sourceShapes = JoiningContinuousTimeline<Shape>(sourceRange, Shape::X, animation); const auto sourceShapes = JoiningContinuousTimeline<Shape>(sourceRange, Shape::X, animation);
return retime(sourceShapes, targetRange); return retime(sourceShapes, targetRange);
} }
enum class MouthState { enum class MouthState { Idle, Closed, Open };
Idle,
Closed,
Open
};
JoiningContinuousTimeline<Shape> optimizeTiming(const JoiningContinuousTimeline<Shape>& animation) { JoiningContinuousTimeline<Shape> optimizeTiming(const JoiningContinuousTimeline<Shape>& animation) {
// Identify segments with idle, closed, and open mouth shapes // Identify segments with idle, closed, and open mouth shapes
JoiningContinuousTimeline<MouthState> segments(animation.getRange(), MouthState::Idle); JoiningContinuousTimeline<MouthState> segments(animation.getRange(), MouthState::Idle);
for (const auto& timedShape : animation) { for (const auto& timedShape : animation) {
const Shape shape = timedShape.getValue(); const Shape shape = timedShape.getValue();
const MouthState mouthState = const MouthState mouthState = shape == Shape::X ? MouthState::Idle
shape == Shape::X : shape == Shape::A ? MouthState::Closed
? MouthState::Idle : MouthState::Open;
: shape == Shape::A
? MouthState::Closed
: MouthState::Open;
segments.set(timedShape.getTimeRange(), mouthState); segments.set(timedShape.getTimeRange(), mouthState);
} }
@ -219,11 +217,8 @@ JoiningContinuousTimeline<Shape> optimizeTiming(const JoiningContinuousTimeline<
// evenly. // evenly.
const auto begin = segmentIt; const auto begin = segmentIt;
auto end = std::next(begin); auto end = std::next(begin);
while ( while (end != segments.rend() && end->getValue() != MouthState::Idle
end != segments.rend() && end->getDuration() < minSegmentDuration) {
&& end->getValue() != MouthState::Idle
&& end->getDuration() < minSegmentDuration
) {
++end; ++end;
} }
@ -232,20 +227,19 @@ JoiningContinuousTimeline<Shape> optimizeTiming(const JoiningContinuousTimeline<
const centiseconds desiredDuration = minSegmentDuration * shortSegmentCount; const centiseconds desiredDuration = minSegmentDuration * shortSegmentCount;
const centiseconds currentDuration = begin->getEnd() - std::prev(end)->getStart(); const centiseconds currentDuration = begin->getEnd() - std::prev(end)->getStart();
const centiseconds desiredExtensionDuration = desiredDuration - currentDuration; const centiseconds desiredExtensionDuration = desiredDuration - currentDuration;
const centiseconds availableExtensionDuration = end != segments.rend() const centiseconds availableExtensionDuration =
? end->getDuration() - 1_cs end != segments.rend() ? end->getDuration() - 1_cs : 0_cs;
: 0_cs; const centiseconds extensionDuration = std::min(
const centiseconds extensionDuration = std::min({ {desiredExtensionDuration, availableExtensionDuration, maxExtensionDuration}
desiredExtensionDuration, availableExtensionDuration, maxExtensionDuration );
});
// Distribute available time range evenly among all short segments // Distribute available time range evenly among all short segments
const centiseconds shortSegmentsTargetStart = const centiseconds shortSegmentsTargetStart =
std::prev(end)->getStart() - extensionDuration; std::prev(end)->getStart() - extensionDuration;
for (auto shortSegmentIt = begin; shortSegmentIt != end; ++shortSegmentIt) { for (auto shortSegmentIt = begin; shortSegmentIt != end; ++shortSegmentIt) {
size_t remainingShortSegmentCount = std::distance(shortSegmentIt, end); size_t remainingShortSegmentCount = std::distance(shortSegmentIt, end);
const centiseconds segmentDuration = (resultStart - shortSegmentsTargetStart) / const centiseconds segmentDuration =
remainingShortSegmentCount; (resultStart - shortSegmentsTargetStart) / remainingShortSegmentCount;
const TimeRange segmentTargetRange(resultStart - segmentDuration, resultStart); const TimeRange segmentTargetRange(resultStart - segmentDuration, resultStart);
const auto retimedSegment = const auto retimedSegment =
retime(animation, shortSegmentIt->getTimeRange(), segmentTargetRange); retime(animation, shortSegmentIt->getTimeRange(), segmentTargetRange);

View File

@ -1,4 +1,5 @@
#include "tweening.h" #include "tweening.h"
#include "animationRules.h" #include "animationRules.h"
JoiningContinuousTimeline<Shape> insertTweens(const JoiningContinuousTimeline<Shape>& animation) { JoiningContinuousTimeline<Shape> insertTweens(const JoiningContinuousTimeline<Shape>& animation) {
@ -7,48 +8,50 @@ JoiningContinuousTimeline<Shape> insertTweens(const JoiningContinuousTimeline<Sh
JoiningContinuousTimeline<Shape> result(animation); JoiningContinuousTimeline<Shape> result(animation);
for_each_adjacent(animation.begin(), animation.end(), [&](const auto& first, const auto& second) { for_each_adjacent(
auto pair = getTween(first.getValue(), second.getValue()); animation.begin(),
if (!pair) return; animation.end(),
[&](const auto& first, const auto& second) {
auto pair = getTween(first.getValue(), second.getValue());
if (!pair) return;
Shape tweenShape; Shape tweenShape;
TweenTiming tweenTiming; TweenTiming tweenTiming;
std::tie(tweenShape, tweenTiming) = *pair; std::tie(tweenShape, tweenTiming) = *pair;
TimeRange firstTimeRange = first.getTimeRange(); TimeRange firstTimeRange = first.getTimeRange();
TimeRange secondTimeRange = second.getTimeRange(); TimeRange secondTimeRange = second.getTimeRange();
centiseconds tweenStart, tweenDuration; centiseconds tweenStart, tweenDuration;
switch (tweenTiming) { switch (tweenTiming) {
case TweenTiming::Early: case TweenTiming::Early: {
{ tweenDuration = std::min(firstTimeRange.getDuration() / 3, maxTweenDuration);
tweenDuration = std::min(firstTimeRange.getDuration() / 3, maxTweenDuration); tweenStart = firstTimeRange.getEnd() - tweenDuration;
tweenStart = firstTimeRange.getEnd() - tweenDuration; break;
break; }
} case TweenTiming::Centered: {
case TweenTiming::Centered: tweenDuration = std::min(
{ {firstTimeRange.getDuration() / 4,
tweenDuration = std::min({ secondTimeRange.getDuration() / 4,
firstTimeRange.getDuration() / 4, secondTimeRange.getDuration() / 4, maxTweenDuration maxTweenDuration}
}); );
tweenStart = firstTimeRange.getEnd() - tweenDuration / 2; tweenStart = firstTimeRange.getEnd() - tweenDuration / 2;
break; break;
} }
case TweenTiming::Late: case TweenTiming::Late: {
{ tweenDuration = std::min(secondTimeRange.getDuration() / 3, maxTweenDuration);
tweenDuration = std::min(secondTimeRange.getDuration() / 3, maxTweenDuration); tweenStart = secondTimeRange.getStart();
tweenStart = secondTimeRange.getStart(); break;
break; }
} default: {
default: throw std::runtime_error("Unexpected tween timing.");
{ }
throw std::runtime_error("Unexpected tween timing.");
} }
if (tweenDuration < minTweenDuration) return;
result.set(tweenStart, tweenStart + tweenDuration, tweenShape);
} }
);
if (tweenDuration < minTweenDuration) return;
result.set(tweenStart, tweenStart + tweenDuration, tweenShape);
});
return result; return result;
} }

View File

@ -1,4 +1,5 @@
#include "AudioClip.h" #include "AudioClip.h"
#include <format.h> #include <format.h>
using std::invalid_argument; using std::invalid_argument;
@ -11,6 +12,7 @@ class SafeSampleReader {
public: public:
SafeSampleReader(SampleReader unsafeRead, AudioClip::size_type size); SafeSampleReader(SampleReader unsafeRead, AudioClip::size_type size);
AudioClip::value_type operator()(AudioClip::size_type index); AudioClip::value_type operator()(AudioClip::size_type index);
private: private:
SampleReader unsafeRead; SampleReader unsafeRead;
AudioClip::size_type size; AudioClip::size_type size;
@ -20,19 +22,16 @@ private:
SafeSampleReader::SafeSampleReader(SampleReader unsafeRead, AudioClip::size_type size) : SafeSampleReader::SafeSampleReader(SampleReader unsafeRead, AudioClip::size_type size) :
unsafeRead(unsafeRead), unsafeRead(unsafeRead),
size(size) size(size) {}
{}
inline AudioClip::value_type SafeSampleReader::operator()(AudioClip::size_type index) { inline AudioClip::value_type SafeSampleReader::operator()(AudioClip::size_type index) {
if (index < 0) { if (index < 0) {
throw invalid_argument(fmt::format("Cannot read from sample index {}. Index < 0.", index)); throw invalid_argument(fmt::format("Cannot read from sample index {}. Index < 0.", index));
} }
if (index >= size) { if (index >= size) {
throw invalid_argument(fmt::format( throw invalid_argument(
"Cannot read from sample index {}. Clip size is {}.", fmt::format("Cannot read from sample index {}. Clip size is {}.", index, size)
index, );
size
));
} }
if (index == lastIndex) { if (index == lastIndex) {
return lastSample; return lastSample;
@ -60,10 +59,8 @@ std::unique_ptr<AudioClip> operator|(std::unique_ptr<AudioClip> clip, const Audi
} }
SampleIterator::SampleIterator() : SampleIterator::SampleIterator() :
sampleIndex(0) sampleIndex(0) {}
{}
SampleIterator::SampleIterator(const AudioClip& audioClip, size_type sampleIndex) : SampleIterator::SampleIterator(const AudioClip& audioClip, size_type sampleIndex) :
sampleReader([&audioClip] { return audioClip.createSampleReader(); }), sampleReader([&audioClip] { return audioClip.createSampleReader(); }),
sampleIndex(sampleIndex) sampleIndex(sampleIndex) {}
{}

View File

@ -1,8 +1,9 @@
#pragma once #pragma once
#include <memory>
#include "time/TimeRange.h"
#include <functional> #include <functional>
#include <memory>
#include "time/TimeRange.h"
#include "tools/Lazy.h" #include "tools/Lazy.h"
class AudioClip; class AudioClip;
@ -17,6 +18,7 @@ public:
using SampleReader = std::function<value_type(size_type)>; using SampleReader = std::function<value_type(size_type)>;
virtual ~AudioClip() {} virtual ~AudioClip() {}
virtual std::unique_ptr<AudioClip> clone() const = 0; virtual std::unique_ptr<AudioClip> clone() const = 0;
virtual int getSampleRate() const = 0; virtual int getSampleRate() const = 0;
virtual size_type size() const = 0; virtual size_type size() const = 0;
@ -24,6 +26,7 @@ public:
SampleReader createSampleReader() const; SampleReader createSampleReader() const;
iterator begin() const; iterator begin() const;
iterator end() const; iterator end() const;
private: private:
virtual SampleReader createUnsafeSampleReader() const = 0; virtual SampleReader createUnsafeSampleReader() const = 0;
}; };
@ -137,6 +140,8 @@ inline SampleIterator operator-(const SampleIterator& it, SampleIterator::differ
return result; return result;
} }
inline SampleIterator::difference_type operator-(const SampleIterator& lhs, const SampleIterator& rhs) { inline SampleIterator::difference_type operator-(
const SampleIterator& lhs, const SampleIterator& rhs
) {
return lhs.getSampleIndex() - rhs.getSampleIndex(); return lhs.getSampleIndex() - rhs.getSampleIndex();
} }

View File

@ -1,13 +1,16 @@
#include "AudioSegment.h" #include "AudioSegment.h"
using std::unique_ptr;
using std::make_unique; using std::make_unique;
using std::unique_ptr;
AudioSegment::AudioSegment(std::unique_ptr<AudioClip> inputClip, const TimeRange& range) : AudioSegment::AudioSegment(std::unique_ptr<AudioClip> inputClip, const TimeRange& range) :
inputClip(std::move(inputClip)), inputClip(std::move(inputClip)),
sampleOffset(static_cast<int64_t>(range.getStart().count()) * this->inputClip->getSampleRate() / 100), sampleOffset(
sampleCount(static_cast<int64_t>(range.getDuration().count()) * this->inputClip->getSampleRate() / 100) static_cast<int64_t>(range.getStart().count()) * this->inputClip->getSampleRate() / 100
{ ),
sampleCount(
static_cast<int64_t>(range.getDuration().count()) * this->inputClip->getSampleRate() / 100
) {
if (sampleOffset < 0 || sampleOffset + sampleCount > this->inputClip->size()) { if (sampleOffset < 0 || sampleOffset + sampleCount > this->inputClip->size()) {
throw std::invalid_argument("Segment extends beyond input clip."); throw std::invalid_argument("Segment extends beyond input clip.");
} }

View File

@ -1,28 +1,26 @@
#include "DcOffset.h" #include "DcOffset.h"
#include <cmath> #include <cmath>
using std::unique_ptr;
using std::make_unique; using std::make_unique;
using std::unique_ptr;
DcOffset::DcOffset(unique_ptr<AudioClip> inputClip, float offset) : DcOffset::DcOffset(unique_ptr<AudioClip> inputClip, float offset) :
inputClip(std::move(inputClip)), inputClip(std::move(inputClip)),
offset(offset), offset(offset),
factor(1 / (1 + std::abs(offset))) factor(1 / (1 + std::abs(offset))) {}
{}
unique_ptr<AudioClip> DcOffset::clone() const { unique_ptr<AudioClip> DcOffset::clone() const {
return make_unique<DcOffset>(*this); return make_unique<DcOffset>(*this);
} }
SampleReader DcOffset::createUnsafeSampleReader() const { SampleReader DcOffset::createUnsafeSampleReader() const {
return [ return
read = inputClip->createSampleReader(), [read = inputClip->createSampleReader(), factor = factor, offset = offset](size_type index
factor = factor, ) {
offset = offset const float sample = read(index);
](size_type index) { return sample * factor + offset;
const float sample = read(index); };
return sample * factor + offset;
};
} }
float getDcOffset(const AudioClip& audioClip) { float getDcOffset(const AudioClip& audioClip) {

View File

@ -10,6 +10,7 @@ public:
std::unique_ptr<AudioClip> clone() const override; std::unique_ptr<AudioClip> clone() const override;
int getSampleRate() const override; int getSampleRate() const override;
size_type size() const override; size_type size() const override;
private: private:
SampleReader createUnsafeSampleReader() const override; SampleReader createUnsafeSampleReader() const override;

View File

@ -1,47 +1,40 @@
#include "OggVorbisFileReader.h" #include "OggVorbisFileReader.h"
#include <format.h>
#include "tools/fileTools.h"
#include "tools/tools.h"
#include "vorbis/codec.h" #include "vorbis/codec.h"
#include "vorbis/vorbisfile.h" #include "vorbis/vorbisfile.h"
#include "tools/tools.h"
#include <format.h>
#include "tools/fileTools.h"
using std::filesystem::path;
using std::vector;
using std::make_shared;
using std::ifstream; using std::ifstream;
using std::ios_base; using std::ios_base;
using std::make_shared;
using std::vector;
using std::filesystem::path;
std::string vorbisErrorToString(int64_t errorCode) { std::string vorbisErrorToString(int64_t errorCode) {
switch (errorCode) { switch (errorCode) {
case OV_EREAD: case OV_EREAD: return "Read error while fetching compressed data for decode.";
return "Read error while fetching compressed data for decode."; case OV_EFAULT: return "Internal logic fault; indicates a bug or heap/stack corruption.";
case OV_EFAULT: case OV_EIMPL: return "Feature not implemented";
return "Internal logic fault; indicates a bug or heap/stack corruption.";
case OV_EIMPL:
return "Feature not implemented";
case OV_EINVAL: case OV_EINVAL:
return "Either an invalid argument, or incompletely initialized argument passed to a call."; return "Either an invalid argument, or incompletely initialized argument passed to a call.";
case OV_ENOTVORBIS: case OV_ENOTVORBIS: return "The given file/data was not recognized as Ogg Vorbis data.";
return "The given file/data was not recognized as Ogg Vorbis data.";
case OV_EBADHEADER: case OV_EBADHEADER:
return "The file/data is apparently an Ogg Vorbis stream, but contains a corrupted or undecipherable header."; return "The file/data is apparently an Ogg Vorbis stream, but contains a corrupted or undecipherable header.";
case OV_EVERSION: case OV_EVERSION:
return "The bitstream format revision of the given Vorbis stream is not supported."; return "The bitstream format revision of the given Vorbis stream is not supported.";
case OV_ENOTAUDIO: case OV_ENOTAUDIO: return "Packet is not an audio packet.";
return "Packet is not an audio packet."; case OV_EBADPACKET: return "Error in packet.";
case OV_EBADPACKET:
return "Error in packet.";
case OV_EBADLINK: case OV_EBADLINK:
return "The given link exists in the Vorbis data stream, but is not decipherable due to garbage or corruption."; return "The given link exists in the Vorbis data stream, but is not decipherable due to garbage or corruption.";
case OV_ENOSEEK: case OV_ENOSEEK: return "The given stream is not seekable.";
return "The given stream is not seekable."; default: return "An unexpected Vorbis error occurred.";
default:
return "An unexpected Vorbis error occurred.";
} }
} }
template<typename T> template <typename T>
T throwOnError(T code) { T throwOnError(T code) {
// OV_HOLE, though technically an error code, is only informational // OV_HOLE, though technically an error code, is only informational
const bool error = code < 0 && code != OV_HOLE; const bool error = code < 0 && code != OV_HOLE;
@ -64,7 +57,7 @@ size_t readCallback(void* buffer, size_t elementSize, size_t elementCount, void*
} }
int seekCallback(void* dataSource, ogg_int64_t offset, int origin) { int seekCallback(void* dataSource, ogg_int64_t offset, int origin) {
static const vector<ios_base::seekdir> seekDirections { static const vector<ios_base::seekdir> seekDirections{
ios_base::beg, ios_base::cur, ios_base::end ios_base::beg, ios_base::cur, ios_base::end
}; };
@ -104,8 +97,7 @@ private:
OggVorbisFile::OggVorbisFile(const path& filePath) : OggVorbisFile::OggVorbisFile(const path& filePath) :
oggVorbisHandle(), oggVorbisHandle(),
stream(openFile(filePath)) stream(openFile(filePath)) {
{
// Throw only on badbit, not on failbit. // Throw only on badbit, not on failbit.
// Ogg Vorbis expects read operations past the end of the file to // Ogg Vorbis expects read operations past the end of the file to
// succeed, not to throw. // succeed, not to throw.
@ -114,19 +106,18 @@ OggVorbisFile::OggVorbisFile(const path& filePath) :
// Ogg Vorbis normally uses the `FILE` API from the C standard library. // Ogg Vorbis normally uses the `FILE` API from the C standard library.
// This doesn't handle Unicode paths on Windows. // This doesn't handle Unicode paths on Windows.
// Use wrapper functions around `ifstream` instead. // Use wrapper functions around `ifstream` instead.
const ov_callbacks callbacks { readCallback, seekCallback, nullptr, tellCallback }; const ov_callbacks callbacks{readCallback, seekCallback, nullptr, tellCallback};
throwOnError(ov_open_callbacks(&stream, &oggVorbisHandle, nullptr, 0, callbacks)); throwOnError(ov_open_callbacks(&stream, &oggVorbisHandle, nullptr, 0, callbacks));
} }
OggVorbisFileReader::OggVorbisFileReader(const path& filePath) : OggVorbisFileReader::OggVorbisFileReader(const path& filePath) :
filePath(filePath) filePath(filePath) {
{
OggVorbisFile file(filePath); OggVorbisFile file(filePath);
vorbis_info* vorbisInfo = ov_info(file.get(), -1); vorbis_info* vorbisInfo = ov_info(file.get(), -1);
sampleRate = vorbisInfo->rate; sampleRate = vorbisInfo->rate;
channelCount = vorbisInfo->channels; channelCount = vorbisInfo->channels;
sampleCount = throwOnError(ov_pcm_total(file.get(), -1)); sampleCount = throwOnError(ov_pcm_total(file.get(), -1));
} }
@ -135,13 +126,11 @@ std::unique_ptr<AudioClip> OggVorbisFileReader::clone() const {
} }
SampleReader OggVorbisFileReader::createUnsafeSampleReader() const { SampleReader OggVorbisFileReader::createUnsafeSampleReader() const {
return [ return [channelCount = channelCount,
channelCount = channelCount, file = make_shared<OggVorbisFile>(filePath),
file = make_shared<OggVorbisFile>(filePath), buffer = static_cast<value_type**>(nullptr),
buffer = static_cast<value_type**>(nullptr), bufferStart = size_type(0),
bufferStart = size_type(0), bufferSize = size_type(0)](size_type index) mutable {
bufferSize = size_type(0)
](size_type index) mutable {
if (index < bufferStart || index >= bufferStart + bufferSize) { if (index < bufferStart || index >= bufferStart + bufferSize) {
// Seek // Seek
throwOnError(ov_pcm_seek(file->get(), index)); throwOnError(ov_pcm_seek(file->get(), index));

View File

@ -1,14 +1,21 @@
#pragma once #pragma once
#include "AudioClip.h"
#include <filesystem> #include <filesystem>
#include "AudioClip.h"
class OggVorbisFileReader : public AudioClip { class OggVorbisFileReader : public AudioClip {
public: public:
OggVorbisFileReader(const std::filesystem::path& filePath); OggVorbisFileReader(const std::filesystem::path& filePath);
std::unique_ptr<AudioClip> clone() const override; std::unique_ptr<AudioClip> clone() const override;
int getSampleRate() const override { return sampleRate; }
size_type size() const override { return sampleCount; } int getSampleRate() const override {
return sampleRate;
}
size_type size() const override {
return sampleCount;
}
private: private:
SampleReader createUnsafeSampleReader() const override; SampleReader createUnsafeSampleReader() const override;

View File

@ -1,25 +1,25 @@
#include <cmath>
#include "SampleRateConverter.h" #include "SampleRateConverter.h"
#include <stdexcept>
#include <format.h> #include <format.h>
#include <cmath>
#include <stdexcept>
using std::invalid_argument; using std::invalid_argument;
using std::unique_ptr;
using std::make_unique; using std::make_unique;
using std::unique_ptr;
SampleRateConverter::SampleRateConverter(unique_ptr<AudioClip> inputClip, int outputSampleRate) : SampleRateConverter::SampleRateConverter(unique_ptr<AudioClip> inputClip, int outputSampleRate) :
inputClip(std::move(inputClip)), inputClip(std::move(inputClip)),
downscalingFactor(static_cast<double>(this->inputClip->getSampleRate()) / outputSampleRate), downscalingFactor(static_cast<double>(this->inputClip->getSampleRate()) / outputSampleRate),
outputSampleRate(outputSampleRate), outputSampleRate(outputSampleRate),
outputSampleCount(std::lround(this->inputClip->size() / downscalingFactor)) outputSampleCount(std::lround(this->inputClip->size() / downscalingFactor)) {
{
if (outputSampleRate <= 0) { if (outputSampleRate <= 0) {
throw invalid_argument("Sample rate must be positive."); throw invalid_argument("Sample rate must be positive.");
} }
if (this->inputClip->getSampleRate() < outputSampleRate) { if (this->inputClip->getSampleRate() < outputSampleRate) {
throw invalid_argument(fmt::format( throw invalid_argument(fmt::format(
"Upsampling not supported. Input sample rate must not be below {}Hz.", "Upsampling not supported. Input sample rate must not be below {}Hz.", outputSampleRate
outputSampleRate
)); ));
} }
} }
@ -51,11 +51,9 @@ float mean(double inputStart, double inputEnd, const SampleReader& read) {
} }
SampleReader SampleRateConverter::createUnsafeSampleReader() const { SampleReader SampleRateConverter::createUnsafeSampleReader() const {
return [ return [read = inputClip->createSampleReader(),
read = inputClip->createSampleReader(), downscalingFactor = downscalingFactor,
downscalingFactor = downscalingFactor, size = inputClip->size()](size_type index) {
size = inputClip->size()
](size_type index) {
const double inputStart = index * downscalingFactor; const double inputStart = index * downscalingFactor;
const double inputEnd = const double inputEnd =
std::min((index + 1) * downscalingFactor, static_cast<double>(size)); std::min((index + 1) * downscalingFactor, static_cast<double>(size));

View File

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <memory> #include <memory>
#include "AudioClip.h" #include "AudioClip.h"
class SampleRateConverter : public AudioClip { class SampleRateConverter : public AudioClip {
@ -9,6 +10,7 @@ public:
std::unique_ptr<AudioClip> clone() const override; std::unique_ptr<AudioClip> clone() const override;
int getSampleRate() const override; int getSampleRate() const override;
size_type size() const override; size_type size() const override;
private: private:
SampleReader createUnsafeSampleReader() const override; SampleReader createUnsafeSampleReader() const override;

View File

@ -1,19 +1,22 @@
#include <format.h>
#include "WaveFileReader.h" #include "WaveFileReader.h"
#include "ioTools.h"
#include <iostream>
#include "tools/platformTools.h"
#include "tools/fileTools.h"
using std::runtime_error; #include <format.h>
#include <iostream>
#include "ioTools.h"
#include "tools/fileTools.h"
#include "tools/platformTools.h"
using fmt::format; using fmt::format;
using std::runtime_error;
using std::string; using std::string;
using namespace little_endian; using namespace little_endian;
using std::unique_ptr;
using std::make_unique;
using std::make_shared; using std::make_shared;
using std::filesystem::path; using std::make_unique;
using std::streamoff; using std::streamoff;
using std::unique_ptr;
using std::filesystem::path;
#define INT24_MIN (-8388608) #define INT24_MIN (-8388608)
#define INT24_MAX 8388607 #define INT24_MAX 8388607
@ -31,15 +34,15 @@ streamoff roundUpToEven(streamoff i) {
} }
namespace Codec { namespace Codec {
constexpr int Pcm = 0x01; constexpr int Pcm = 0x01;
constexpr int Float = 0x03; constexpr int Float = 0x03;
constexpr int Extensible = 0xFFFE; constexpr int Extensible = 0xFFFE;
}; }; // namespace Codec
string codecToString(int codec); string codecToString(int codec);
WaveFormatInfo getWaveFormatInfo(const path& filePath) { WaveFormatInfo getWaveFormatInfo(const path& filePath) {
WaveFormatInfo formatInfo {}; WaveFormatInfo formatInfo{};
auto file = openFile(filePath); auto file = openFile(filePath);
@ -74,8 +77,7 @@ WaveFormatInfo getWaveFormatInfo(const path& filePath) {
const streamoff chunkSize = read<int32_t>(file); const streamoff chunkSize = read<int32_t>(file);
const streamoff chunkEnd = roundUpToEven(file.tellg() + chunkSize); const streamoff chunkEnd = roundUpToEven(file.tellg() + chunkSize);
switch (chunkId) { switch (chunkId) {
case fourcc('f', 'm', 't', ' '): case fourcc('f', 'm', 't', ' '): {
{
// Read relevant data // Read relevant data
uint16_t codec = read<uint16_t>(file); uint16_t codec = read<uint16_t>(file);
formatInfo.channelCount = read<uint16_t>(file); formatInfo.channelCount = read<uint16_t>(file);
@ -118,7 +120,8 @@ WaveFormatInfo getWaveFormatInfo(const path& filePath) {
bytesPerSample = 4; bytesPerSample = 4;
} else { } else {
throw runtime_error( throw runtime_error(
format("Unsupported sample format: {}-bit PCM.", bitsPerSample)); format("Unsupported sample format: {}-bit PCM.", bitsPerSample)
);
} }
if (bytesPerSample != bytesPerFrame / formatInfo.channelCount) { if (bytesPerSample != bytesPerFrame / formatInfo.channelCount) {
throw runtime_error("Unsupported sample organization."); throw runtime_error("Unsupported sample organization.");
@ -132,30 +135,30 @@ WaveFormatInfo getWaveFormatInfo(const path& filePath) {
formatInfo.sampleFormat = SampleFormat::Float64; formatInfo.sampleFormat = SampleFormat::Float64;
bytesPerSample = 8; bytesPerSample = 8;
} else { } else {
throw runtime_error( throw runtime_error(format(
format("Unsupported sample format: {}-bit IEEE Float.", bitsPerSample) "Unsupported sample format: {}-bit IEEE Float.", bitsPerSample
); ));
} }
break; break;
default: default:
throw runtime_error(format( throw runtime_error(format(
"Unsupported audio codec: '{}'. Only uncompressed codecs ('{}' and '{}') are supported.", "Unsupported audio codec: '{}'. Only uncompressed codecs ('{}' and '{}') are supported.",
codecToString(codec), codecToString(Codec::Pcm), codecToString(Codec::Float) codecToString(codec),
codecToString(Codec::Pcm),
codecToString(Codec::Float)
)); ));
} }
formatInfo.bytesPerFrame = bytesPerSample * formatInfo.channelCount; formatInfo.bytesPerFrame = bytesPerSample * formatInfo.channelCount;
processedFormatChunk = true; processedFormatChunk = true;
break; break;
} }
case fourcc('d', 'a', 't', 'a'): case fourcc('d', 'a', 't', 'a'): {
{
formatInfo.dataOffset = file.tellg(); formatInfo.dataOffset = file.tellg();
formatInfo.frameCount = chunkSize / formatInfo.bytesPerFrame; formatInfo.frameCount = chunkSize / formatInfo.bytesPerFrame;
processedDataChunk = true; processedDataChunk = true;
break; break;
} }
default: default: {
{
// Ignore unknown chunk // Ignore unknown chunk
break; break;
} }
@ -180,45 +183,37 @@ unique_ptr<AudioClip> WaveFileReader::clone() const {
} }
inline AudioClip::value_type readSample( inline AudioClip::value_type readSample(
std::ifstream& file, std::ifstream& file, SampleFormat sampleFormat, int channelCount
SampleFormat sampleFormat,
int channelCount
) { ) {
float sum = 0; float sum = 0;
for (int channelIndex = 0; channelIndex < channelCount; channelIndex++) { for (int channelIndex = 0; channelIndex < channelCount; channelIndex++) {
switch (sampleFormat) { switch (sampleFormat) {
case SampleFormat::UInt8: case SampleFormat::UInt8: {
{
const uint8_t raw = read<uint8_t>(file); const uint8_t raw = read<uint8_t>(file);
sum += toNormalizedFloat(raw, 0, UINT8_MAX); sum += toNormalizedFloat(raw, 0, UINT8_MAX);
break; break;
} }
case SampleFormat::Int16: case SampleFormat::Int16: {
{
const int16_t raw = read<int16_t>(file); const int16_t raw = read<int16_t>(file);
sum += toNormalizedFloat(raw, INT16_MIN, INT16_MAX); sum += toNormalizedFloat(raw, INT16_MIN, INT16_MAX);
break; break;
} }
case SampleFormat::Int24: case SampleFormat::Int24: {
{
int raw = read<int, 24>(file); int raw = read<int, 24>(file);
if (raw & 0x800000) raw |= 0xFF000000; // Fix two's complement if (raw & 0x800000) raw |= 0xFF000000; // Fix two's complement
sum += toNormalizedFloat(raw, INT24_MIN, INT24_MAX); sum += toNormalizedFloat(raw, INT24_MIN, INT24_MAX);
break; break;
} }
case SampleFormat::Int32: case SampleFormat::Int32: {
{
const int32_t raw = read<int32_t>(file); const int32_t raw = read<int32_t>(file);
sum += toNormalizedFloat(raw, INT32_MIN, INT32_MAX); sum += toNormalizedFloat(raw, INT32_MIN, INT32_MAX);
break; break;
} }
case SampleFormat::Float32: case SampleFormat::Float32: {
{
sum += read<float>(file); sum += read<float>(file);
break; break;
} }
case SampleFormat::Float64: case SampleFormat::Float64: {
{
sum += static_cast<float>(read<double>(file)); sum += static_cast<float>(read<double>(file));
break; break;
} }
@ -229,14 +224,11 @@ inline AudioClip::value_type readSample(
} }
SampleReader WaveFileReader::createUnsafeSampleReader() const { SampleReader WaveFileReader::createUnsafeSampleReader() const {
return return [formatInfo = formatInfo,
[
formatInfo = formatInfo,
file = std::make_shared<std::ifstream>(openFile(filePath)), file = std::make_shared<std::ifstream>(openFile(filePath)),
filePos = std::streampos(0) filePos = std::streampos(0)](size_type index) mutable {
](size_type index) mutable { const std::streampos newFilePos =
const std::streampos newFilePos = formatInfo.dataOffset formatInfo.dataOffset + static_cast<streamoff>(index * formatInfo.bytesPerFrame);
+ static_cast<streamoff>(index * formatInfo.bytesPerFrame);
if (newFilePos != filePos) { if (newFilePos != filePos) {
file->seekg(newFilePos); file->seekg(newFilePos);
} }
@ -491,7 +483,6 @@ string codecToString(int codec) {
case 0xf1ac: return "Free Lossless Audio Codec FLAC"; case 0xf1ac: return "Free Lossless Audio Codec FLAC";
case 0xfffe: return "Extensible"; case 0xfffe: return "Extensible";
case 0xffff: return "Development"; case 0xffff: return "Development";
default: default: return format("{0:#x}", codec);
return format("{0:#x}", codec);
} }
} }

View File

@ -1,16 +1,10 @@
#pragma once #pragma once
#include <filesystem> #include <filesystem>
#include "AudioClip.h" #include "AudioClip.h"
enum class SampleFormat { enum class SampleFormat { UInt8, Int16, Int24, Int32, Float32, Float64 };
UInt8,
Int16,
Int24,
Int32,
Float32,
Float64
};
struct WaveFormatInfo { struct WaveFormatInfo {
int bytesPerFrame; int bytesPerFrame;

View File

@ -1,18 +1,20 @@
#include "audioFileReading.h" #include "audioFileReading.h"
#include <format.h>
#include "WaveFileReader.h"
#include <boost/algorithm/string.hpp>
#include "OggVorbisFileReader.h"
using std::filesystem::path; #include <format.h>
using std::string;
using std::runtime_error; #include <boost/algorithm/string.hpp>
#include "OggVorbisFileReader.h"
#include "WaveFileReader.h"
using fmt::format; using fmt::format;
using std::runtime_error;
using std::string;
using std::filesystem::path;
std::unique_ptr<AudioClip> createAudioFileClip(path filePath) { std::unique_ptr<AudioClip> createAudioFileClip(path filePath) {
try { try {
const string extension = const string extension = boost::algorithm::to_lower_copy(filePath.extension().u8string());
boost::algorithm::to_lower_copy(filePath.extension().u8string());
if (extension == ".wav") { if (extension == ".wav") {
return std::make_unique<WaveFileReader>(filePath); return std::make_unique<WaveFileReader>(filePath);
} }
@ -24,6 +26,8 @@ std::unique_ptr<AudioClip> createAudioFileClip(path filePath) {
extension extension
)); ));
} catch (...) { } catch (...) {
std::throw_with_nested(runtime_error(format("Could not open sound file {}.", filePath.u8string()))); std::throw_with_nested(
runtime_error(format("Could not open sound file {}.", filePath.u8string()))
);
} }
} }

View File

@ -1,7 +1,8 @@
#pragma once #pragma once
#include <memory>
#include "AudioClip.h"
#include <filesystem> #include <filesystem>
#include <memory>
#include "AudioClip.h"
std::unique_ptr<AudioClip> createAudioFileClip(std::filesystem::path filePath); std::unique_ptr<AudioClip> createAudioFileClip(std::filesystem::path filePath);

View File

@ -4,43 +4,38 @@
namespace little_endian { namespace little_endian {
template<typename Type, int bitsToRead = 8 * sizeof(Type)> template <typename Type, int bitsToRead = 8 * sizeof(Type)>
Type read(std::istream& stream) { Type read(std::istream& stream) {
static_assert(bitsToRead % 8 == 0, "Cannot read fractional bytes."); static_assert(bitsToRead % 8 == 0, "Cannot read fractional bytes.");
static_assert(bitsToRead <= sizeof(Type) * 8, "Bits to read exceed target type size."); static_assert(bitsToRead <= sizeof(Type) * 8, "Bits to read exceed target type size.");
Type result = 0; Type result = 0;
char* p = reinterpret_cast<char*>(&result); char* p = reinterpret_cast<char*>(&result);
const int bytesToRead = bitsToRead / 8; const int bytesToRead = bitsToRead / 8;
for (int byteIndex = 0; byteIndex < bytesToRead; byteIndex++) { for (int byteIndex = 0; byteIndex < bytesToRead; byteIndex++) {
*(p + byteIndex) = static_cast<char>(stream.get()); *(p + byteIndex) = static_cast<char>(stream.get());
}
return result;
} }
return result;
template<typename Type, int bitsToWrite = 8 * sizeof(Type)>
void write(Type value, std::ostream& stream) {
static_assert(bitsToWrite % 8 == 0, "Cannot write fractional bytes.");
static_assert(bitsToWrite <= sizeof(Type) * 8, "Bits to write exceed target type size.");
char* p = reinterpret_cast<char*>(&value);
const int bytesToWrite = bitsToWrite / 8;
for (int byteIndex = 0; byteIndex < bytesToWrite; byteIndex++) {
stream.put(*(p + byteIndex));
}
}
constexpr uint32_t fourcc(
unsigned char c0,
unsigned char c1,
unsigned char c2,
unsigned char c3
) {
return c0 | (c1 << 8) | (c2 << 16) | (c3 << 24);
}
inline std::string fourccToString(uint32_t fourcc) {
return std::string(reinterpret_cast<char*>(&fourcc), 4);
}
} }
template <typename Type, int bitsToWrite = 8 * sizeof(Type)>
void write(Type value, std::ostream& stream) {
static_assert(bitsToWrite % 8 == 0, "Cannot write fractional bytes.");
static_assert(bitsToWrite <= sizeof(Type) * 8, "Bits to write exceed target type size.");
char* p = reinterpret_cast<char*>(&value);
const int bytesToWrite = bitsToWrite / 8;
for (int byteIndex = 0; byteIndex < bytesToWrite; byteIndex++) {
stream.put(*(p + byteIndex));
}
}
constexpr uint32_t fourcc(unsigned char c0, unsigned char c1, unsigned char c2, unsigned char c3) {
return c0 | (c1 << 8) | (c2 << 16) | (c3 << 24);
}
inline std::string fourccToString(uint32_t fourcc) {
return std::string(reinterpret_cast<char*>(&fourcc), 4);
}
} // namespace little_endian

View File

@ -1,4 +1,5 @@
#include "processing.h" #include "processing.h"
#include <algorithm> #include <algorithm>
using std::function; using std::function;
@ -35,7 +36,9 @@ void process16bitAudioClip(
processBuffer(buffer); processBuffer(buffer);
sampleCount += buffer.size(); sampleCount += buffer.size();
progressSink.reportProgress(static_cast<double>(sampleCount) / static_cast<double>(audioClip.size())); progressSink.reportProgress(
static_cast<double>(sampleCount) / static_cast<double>(audioClip.size())
);
} while (!buffer.empty()); } while (!buffer.empty());
} }

View File

@ -1,7 +1,8 @@
#pragma once #pragma once
#include <vector>
#include <functional> #include <functional>
#include <vector>
#include "AudioClip.h" #include "AudioClip.h"
#include "tools/progress.h" #include "tools/progress.h"
@ -18,4 +19,4 @@ void process16bitAudioClip(
ProgressSink& progressSink ProgressSink& progressSink
); );
std::vector<int16_t> copyTo16bitBuffer(const AudioClip& audioClip); std::vector<int16_t> copyTo16bitBuffer(const AudioClip& audioClip);

View File

@ -1,30 +1,31 @@
#include "voiceActivityDetection.h" #include "voiceActivityDetection.h"
#include "DcOffset.h"
#include "SampleRateConverter.h"
#include "logging/logging.h"
#include "tools/pairs.h"
#include <boost/range/adaptor/transformed.hpp>
#include <webrtc/common_audio/vad/include/webrtc_vad.h>
#include "processing.h"
#include <gsl_util.h> #include <gsl_util.h>
#include "tools/parallel.h" #include <webrtc/common_audio/vad/include/webrtc_vad.h>
#include <webrtc/common_audio/vad/vad_core.h> #include <webrtc/common_audio/vad/vad_core.h>
using std::vector; #include <boost/range/adaptor/transformed.hpp>
#include "DcOffset.h"
#include "logging/logging.h"
#include "processing.h"
#include "SampleRateConverter.h"
#include "tools/pairs.h"
#include "tools/parallel.h"
using boost::adaptors::transformed; using boost::adaptors::transformed;
using fmt::format; using fmt::format;
using std::runtime_error; using std::runtime_error;
using std::unique_ptr; using std::unique_ptr;
using std::vector;
JoiningBoundedTimeline<void> detectVoiceActivity( JoiningBoundedTimeline<void> detectVoiceActivity(
const AudioClip& inputAudioClip, const AudioClip& inputAudioClip, ProgressSink& progressSink
ProgressSink& progressSink
) { ) {
// Prepare audio for VAD // Prepare audio for VAD
constexpr int webRtcSamplingRate = 8000; constexpr int webRtcSamplingRate = 8000;
const unique_ptr<AudioClip> audioClip = inputAudioClip.clone() const unique_ptr<AudioClip> audioClip =
| resample(webRtcSamplingRate) inputAudioClip.clone() | resample(webRtcSamplingRate) | removeDcOffset();
| removeDcOffset();
VadInst* vadHandle = WebRtcVad_Create(); VadInst* vadHandle = WebRtcVad_Create();
if (!vadHandle) throw runtime_error("Error creating WebRTC VAD handle."); if (!vadHandle) throw runtime_error("Error creating WebRTC VAD handle.");
@ -46,12 +47,8 @@ JoiningBoundedTimeline<void> detectVoiceActivity(
// WebRTC is picky regarding buffer size // WebRTC is picky regarding buffer size
if (buffer.size() < frameSize) return; if (buffer.size() < frameSize) return;
const int result = WebRtcVad_Process( const int result =
vadHandle, WebRtcVad_Process(vadHandle, webRtcSamplingRate, buffer.data(), buffer.size());
webRtcSamplingRate,
buffer.data(),
buffer.size()
);
if (result == -1) throw runtime_error("Error processing audio buffer using WebRTC VAD."); if (result == -1) throw runtime_error("Error processing audio buffer using WebRTC VAD.");
// Ignore the result of WebRtcVad_Process, instead directly interpret the internal VAD flag. // Ignore the result of WebRtcVad_Process, instead directly interpret the internal VAD flag.
@ -86,9 +83,12 @@ JoiningBoundedTimeline<void> detectVoiceActivity(
logging::debugFormat( logging::debugFormat(
"Found {} sections of voice activity: {}", "Found {} sections of voice activity: {}",
activity.size(), activity.size(),
join(activity | transformed([](const Timed<void>& t) { join(
return format("{0}-{1}", t.getStart(), t.getEnd()); activity | transformed([](const Timed<void>& t) {
}), ", ") return format("{0}-{1}", t.getStart(), t.getEnd());
}),
", "
)
); );
return activity; return activity;

View File

@ -4,6 +4,5 @@
#include "tools/progress.h" #include "tools/progress.h"
JoiningBoundedTimeline<void> detectVoiceActivity( JoiningBoundedTimeline<void> detectVoiceActivity(
const AudioClip& audioClip, const AudioClip& audioClip, ProgressSink& progressSink
ProgressSink& progressSink
); );

View File

@ -1,5 +1,7 @@
#include <fstream>
#include "waveFileWriting.h" #include "waveFileWriting.h"
#include <fstream>
#include "ioTools.h" #include "ioTools.h"
using namespace little_endian; using namespace little_endian;

View File

@ -1,7 +1,7 @@
#include "Phone.h" #include "Phone.h"
using std::string;
using boost::optional; using boost::optional;
using std::string;
PhoneConverter& PhoneConverter::get() { PhoneConverter& PhoneConverter::get() {
static PhoneConverter converter; static PhoneConverter converter;
@ -13,54 +13,24 @@ string PhoneConverter::getTypeName() {
} }
EnumConverter<Phone>::member_data PhoneConverter::getMemberData() { EnumConverter<Phone>::member_data PhoneConverter::getMemberData() {
return member_data { return member_data{{Phone::AO, "AO"}, {Phone::AA, "AA"}, {Phone::IY, "IY"},
{ Phone::AO, "AO" }, {Phone::UW, "UW"}, {Phone::EH, "EH"}, {Phone::IH, "IH"},
{ Phone::AA, "AA" }, {Phone::UH, "UH"}, {Phone::AH, "AH"}, {Phone::Schwa, "Schwa"},
{ Phone::IY, "IY" }, {Phone::AE, "AE"}, {Phone::EY, "EY"}, {Phone::AY, "AY"},
{ Phone::UW, "UW" }, {Phone::OW, "OW"}, {Phone::AW, "AW"}, {Phone::OY, "OY"},
{ Phone::EH, "EH" }, {Phone::ER, "ER"},
{ Phone::IH, "IH" },
{ Phone::UH, "UH" },
{ Phone::AH, "AH" },
{ Phone::Schwa, "Schwa" },
{ Phone::AE, "AE" },
{ Phone::EY, "EY" },
{ Phone::AY, "AY" },
{ Phone::OW, "OW" },
{ Phone::AW, "AW" },
{ Phone::OY, "OY" },
{ Phone::ER, "ER" },
{ Phone::P, "P" }, {Phone::P, "P"}, {Phone::B, "B"}, {Phone::T, "T"},
{ Phone::B, "B" }, {Phone::D, "D"}, {Phone::K, "K"}, {Phone::G, "G"},
{ Phone::T, "T" }, {Phone::CH, "CH"}, {Phone::JH, "JH"}, {Phone::F, "F"},
{ Phone::D, "D" }, {Phone::V, "V"}, {Phone::TH, "TH"}, {Phone::DH, "DH"},
{ Phone::K, "K" }, {Phone::S, "S"}, {Phone::Z, "Z"}, {Phone::SH, "SH"},
{ Phone::G, "G" }, {Phone::ZH, "ZH"}, {Phone::HH, "HH"}, {Phone::M, "M"},
{ Phone::CH, "CH" }, {Phone::N, "N"}, {Phone::NG, "NG"}, {Phone::L, "L"},
{ Phone::JH, "JH" }, {Phone::R, "R"}, {Phone::Y, "Y"}, {Phone::W, "W"},
{ Phone::F, "F" },
{ Phone::V, "V" },
{ Phone::TH, "TH" },
{ Phone::DH, "DH" },
{ Phone::S, "S" },
{ Phone::Z, "Z" },
{ Phone::SH, "SH" },
{ Phone::ZH, "ZH" },
{ Phone::HH, "HH" },
{ Phone::M, "M" },
{ Phone::N, "N" },
{ Phone::NG, "NG" },
{ Phone::L, "L" },
{ Phone::R, "R" },
{ Phone::Y, "Y" },
{ Phone::W, "W" },
{ Phone::Breath, "Breath" }, {Phone::Breath, "Breath"}, {Phone::Cough, "Cough"}, {Phone::Smack, "Smack"},
{ Phone::Cough, "Cough" }, {Phone::Noise, "Noise"}};
{ Phone::Smack, "Smack" },
{ Phone::Noise, "Noise" }
};
} }
optional<Phone> PhoneConverter::tryParse(const string& s) { optional<Phone> PhoneConverter::tryParse(const string& s) {

View File

@ -8,66 +8,66 @@ enum class Phone {
// Vowels // Vowels
// ... monophthongs // ... monophthongs
AO, // [ɔ] as in [o]ff, f[a]ll, fr[o]st AO, // [ɔ] as in [o]ff, f[a]ll, fr[o]st
AA, // [ɑ] as in f[a]ther AA, // [ɑ] as in f[a]ther
IY, // [i] as in b[ee], sh[e] IY, // [i] as in b[ee], sh[e]
UW, // [u] as in y[ou], n[ew], f[oo]d UW, // [u] as in y[ou], n[ew], f[oo]d
EH, // [ɛ] as in r[e]d, m[e]n EH, // [ɛ] as in r[e]d, m[e]n
IH, // [ɪ] as in b[i]g, w[i]n IH, // [ɪ] as in b[i]g, w[i]n
UH, // [ʊ] as in sh[ou]ld, c[ou]ld UH, // [ʊ] as in sh[ou]ld, c[ou]ld
AH, // [ʌ] as in b[u]t, s[u]n AH, // [ʌ] as in b[u]t, s[u]n
Schwa, // [ə] as in [a]lone, disc[u]s Schwa, // [ə] as in [a]lone, disc[u]s
AE, // [æ] as in [a]t, b[a]t AE, // [æ] as in [a]t, b[a]t
// ... diphthongs // ... diphthongs
EY, // [eɪ] as in s[ay], [ei]ght EY, // [eɪ] as in s[ay], [ei]ght
AY, // [aɪ] as in m[y], wh[y], r[i]de AY, // [aɪ] as in m[y], wh[y], r[i]de
OW, // [oʊ] as in sh[ow], c[oa]t OW, // [oʊ] as in sh[ow], c[oa]t
AW, // [aʊ] as in h[ow], n[ow] AW, // [aʊ] as in h[ow], n[ow]
OY, // [ɔɪ] as in b[oy], t[oy] OY, // [ɔɪ] as in b[oy], t[oy]
// ... r-colored // ... r-colored
ER, // [ɝ] as in h[er], b[ir]d, h[ur]t ER, // [ɝ] as in h[er], b[ir]d, h[ur]t
LastVowel = ER, LastVowel = ER,
///////////// /////////////
// Consonants // Consonants
// ... stops // ... stops
P, // [p] as in [p]ay P, // [p] as in [p]ay
B, // [b] as in [b]uy B, // [b] as in [b]uy
T, // [t] as in [t]ake T, // [t] as in [t]ake
D, // [d] as in [d]ay D, // [d] as in [d]ay
K, // [k] as in [k]ey K, // [k] as in [k]ey
G, // [g] as in [g]o G, // [g] as in [g]o
// ... affricates // ... affricates
CH, // [tʃ] as in [ch]air CH, // [tʃ] as in [ch]air
JH, // [dʒ] as in [j]ust JH, // [dʒ] as in [j]ust
// ... fricatives // ... fricatives
F, // [f] as in [f]or F, // [f] as in [f]or
V, // [v] as in [v]ery V, // [v] as in [v]ery
TH, // [θ] as in [th]anks TH, // [θ] as in [th]anks
DH, // [ð] as in [th]at DH, // [ð] as in [th]at
S, // [s] as in [s]ay S, // [s] as in [s]ay
Z, // [z] as in [z]oo Z, // [z] as in [z]oo
SH, // [ʃ] as in [sh]ow SH, // [ʃ] as in [sh]ow
ZH, // [ʒ] as in mea[s]ure, plea[s]ure ZH, // [ʒ] as in mea[s]ure, plea[s]ure
HH, // [h] as in [h]ouse HH, // [h] as in [h]ouse
// ... nasals // ... nasals
M, // [m] as in [m]an M, // [m] as in [m]an
N, // [n] as in [no] N, // [n] as in [no]
NG, // [ŋ] as in si[ng] NG, // [ŋ] as in si[ng]
// ... liquids // ... liquids
L, // [ɫ] as in [l]ate L, // [ɫ] as in [l]ate
R, // [r, ɹ] as in [r]un R, // [r, ɹ] as in [r]un
// ... semivowels // ... semivowels
Y, // [j] as in [y]es Y, // [j] as in [y]es
W, // [w] as in [w]ay W, // [w] as in [w]ay
///////////// /////////////
// Misc. // Misc.
@ -81,9 +81,11 @@ enum class Phone {
class PhoneConverter : public EnumConverter<Phone> { class PhoneConverter : public EnumConverter<Phone> {
public: public:
static PhoneConverter& get(); static PhoneConverter& get();
protected: protected:
std::string getTypeName() override; std::string getTypeName() override;
member_data getMemberData() override; member_data getMemberData() override;
public: public:
boost::optional<Phone> tryParse(const std::string& s) override; boost::optional<Phone> tryParse(const std::string& s) override;
}; };
@ -92,4 +94,4 @@ std::ostream& operator<<(std::ostream& stream, Phone value);
std::istream& operator>>(std::istream& stream, Phone& value); std::istream& operator>>(std::istream& stream, Phone& value);
bool isVowel(Phone phone); bool isVowel(Phone phone);

View File

@ -1,7 +1,7 @@
#include "Shape.h" #include "Shape.h"
using std::string;
using std::set; using std::set;
using std::string;
ShapeConverter& ShapeConverter::get() { ShapeConverter& ShapeConverter::get() {
static ShapeConverter converter; static ShapeConverter converter;
@ -22,7 +22,9 @@ set<Shape> ShapeConverter::getBasicShapes() {
set<Shape> ShapeConverter::getExtendedShapes() { set<Shape> ShapeConverter::getExtendedShapes() {
static const set<Shape> result = [] { static const set<Shape> result = [] {
set<Shape> result; set<Shape> result;
for (int i = static_cast<int>(Shape::LastBasicShape) + 1; i < static_cast<int>(Shape::EndSentinel); ++i) { for (int i = static_cast<int>(Shape::LastBasicShape) + 1;
i < static_cast<int>(Shape::EndSentinel);
++i) {
result.insert(static_cast<Shape>(i)); result.insert(static_cast<Shape>(i));
} }
return result; return result;
@ -35,16 +37,16 @@ string ShapeConverter::getTypeName() {
} }
EnumConverter<Shape>::member_data ShapeConverter::getMemberData() { EnumConverter<Shape>::member_data ShapeConverter::getMemberData() {
return member_data { return member_data{
{ Shape::A, "A" }, {Shape::A, "A"},
{ Shape::B, "B" }, {Shape::B, "B"},
{ Shape::C, "C" }, {Shape::C, "C"},
{ Shape::D, "D" }, {Shape::D, "D"},
{ Shape::E, "E" }, {Shape::E, "E"},
{ Shape::F, "F" }, {Shape::F, "F"},
{ Shape::G, "G" }, {Shape::G, "G"},
{ Shape::H, "H" }, {Shape::H, "H"},
{ Shape::X, "X" } {Shape::X, "X"}
}; };
} }

View File

@ -1,27 +1,28 @@
#pragma once #pragma once
#include "tools/EnumConverter.h"
#include <set> #include <set>
#include "tools/EnumConverter.h"
// The classic Hanna-Barbera mouth shapes A-F plus the common supplements G-H // The classic Hanna-Barbera mouth shapes A-F plus the common supplements G-H
// For reference, see http://sunewatts.dk/lipsync/lipsync/article_02.php // For reference, see http://sunewatts.dk/lipsync/lipsync/article_02.php
// For visual examples, see https://flic.kr/s/aHsj86KR4J. Their shapes "BMP".."L" map to A..H. // For visual examples, see https://flic.kr/s/aHsj86KR4J. Their shapes "BMP".."L" map to A..H.
enum class Shape { enum class Shape {
// Basic shapes // Basic shapes
A, // Closed mouth (M, B, P) A, // Closed mouth (M, B, P)
B, // Clenched teeth (most consonants, some vowels like EE as in b[ee]) B, // Clenched teeth (most consonants, some vowels like EE as in b[ee])
C, // Open mouth (vowels like m[e]n, s[u]n, s[a]y) C, // Open mouth (vowels like m[e]n, s[u]n, s[a]y)
D, // Mouth wide open (vowels like f[a]ther, b[a]t, wh[y]) D, // Mouth wide open (vowels like f[a]ther, b[a]t, wh[y])
E, // Rounded mouth (vowels like [o]ff) E, // Rounded mouth (vowels like [o]ff)
F, // Puckered lips (y[ou], b[o]y, [w]ay) F, // Puckered lips (y[ou], b[o]y, [w]ay)
LastBasicShape = F, LastBasicShape = F,
// Extended shapes // Extended shapes
G, // "F", "V" G, // "F", "V"
H, // "L" H, // "L"
X, // Idle X, // Idle
EndSentinel EndSentinel
}; };
@ -31,6 +32,7 @@ public:
static ShapeConverter& get(); static ShapeConverter& get();
static std::set<Shape> getBasicShapes(); static std::set<Shape> getBasicShapes();
static std::set<Shape> getExtendedShapes(); static std::set<Shape> getExtendedShapes();
protected: protected:
std::string getTypeName() override; std::string getTypeName() override;
member_data getMemberData() override; member_data getMemberData() override;

View File

@ -1,26 +1,29 @@
#include "DatExporter.h" #include "DatExporter.h"
#include "animation/targetShapeSet.h"
#include <boost/lexical_cast.hpp> #include <boost/lexical_cast.hpp>
#include "animation/targetShapeSet.h"
using std::string;
using std::chrono::duration; using std::chrono::duration;
using std::chrono::duration_cast; using std::chrono::duration_cast;
using std::string;
DatExporter::DatExporter(const ShapeSet& targetShapeSet, double frameRate, bool convertToPrestonBlair) : DatExporter::DatExporter(
const ShapeSet& targetShapeSet, double frameRate, bool convertToPrestonBlair
) :
frameRate(frameRate), frameRate(frameRate),
convertToPrestonBlair(convertToPrestonBlair), convertToPrestonBlair(convertToPrestonBlair),
prestonBlairShapeNames { prestonBlairShapeNames{
{ Shape::A, "MBP" }, {Shape::A, "MBP"},
{ Shape::B, "etc" }, {Shape::B, "etc"},
{ Shape::C, "E" }, {Shape::C, "E"},
{ Shape::D, "AI" }, {Shape::D, "AI"},
{ Shape::E, "O" }, {Shape::E, "O"},
{ Shape::F, "U" }, {Shape::F, "U"},
{ Shape::G, "FV" }, {Shape::G, "FV"},
{ Shape::H, "L" }, {Shape::H, "L"},
{ Shape::X, "rest" }, {Shape::X, "rest"},
} } {
{
// Animation works with a fixed frame rate of 100. // Animation works with a fixed frame rate of 100.
// Downsampling to much less than 25 fps may result in dropped frames. // Downsampling to much less than 25 fps may result in dropped frames.
// Upsampling to more than 100 fps doesn't make sense. // Upsampling to more than 100 fps doesn't make sense.
@ -28,13 +31,17 @@ DatExporter::DatExporter(const ShapeSet& targetShapeSet, double frameRate, bool
const double maxFrameRate = 100.0; const double maxFrameRate = 100.0;
if (frameRate < minFrameRate || frameRate > maxFrameRate) { if (frameRate < minFrameRate || frameRate > maxFrameRate) {
throw std::runtime_error(fmt::format("Frame rate must be between {} and {} fps.", minFrameRate, maxFrameRate)); throw std::runtime_error(
fmt::format("Frame rate must be between {} and {} fps.", minFrameRate, maxFrameRate)
);
} }
if (convertToPrestonBlair) { if (convertToPrestonBlair) {
for (Shape shape : targetShapeSet) { for (Shape shape : targetShapeSet) {
if (prestonBlairShapeNames.find(shape) == prestonBlairShapeNames.end()) { if (prestonBlairShapeNames.find(shape) == prestonBlairShapeNames.end()) {
throw std::runtime_error(fmt::format("Mouth shape {} cannot be converted to Preston Blair shape names.")); throw std::runtime_error(
fmt::format("Mouth shape {} cannot be converted to Preston Blair shape names.")
);
} }
} }
} }
@ -62,9 +69,8 @@ void DatExporter::exportAnimation(const ExporterInput& input, std::ostream& outp
} }
string DatExporter::toString(Shape shape) const { string DatExporter::toString(Shape shape) const {
return convertToPrestonBlair return convertToPrestonBlair ? prestonBlairShapeNames.at(shape)
? prestonBlairShapeNames.at(shape) : boost::lexical_cast<std::string>(shape);
: boost::lexical_cast<std::string>(shape);
} }
int DatExporter::toFrameNumber(centiseconds time) const { int DatExporter::toFrameNumber(centiseconds time) const {

View File

@ -1,10 +1,11 @@
#pragma once #pragma once
#include "Exporter.h"
#include "core/Shape.h"
#include <map> #include <map>
#include <string> #include <string>
#include "core/Shape.h"
#include "Exporter.h"
// Exporter for Moho's switch data file format // Exporter for Moho's switch data file format
class DatExporter : public Exporter { class DatExporter : public Exporter {
public: public:

View File

@ -1,15 +1,17 @@
#pragma once #pragma once
#include <filesystem>
#include "core/Shape.h" #include "core/Shape.h"
#include "time/ContinuousTimeline.h" #include "time/ContinuousTimeline.h"
#include <filesystem>
class ExporterInput { class ExporterInput {
public: public:
ExporterInput( ExporterInput(
const std::filesystem::path& inputFilePath, const std::filesystem::path& inputFilePath,
const JoiningContinuousTimeline<Shape>& animation, const JoiningContinuousTimeline<Shape>& animation,
const ShapeSet& targetShapeSet) : const ShapeSet& targetShapeSet
) :
inputFilePath(inputFilePath), inputFilePath(inputFilePath),
animation(animation), animation(animation),
targetShapeSet(targetShapeSet) {} targetShapeSet(targetShapeSet) {}
@ -22,5 +24,6 @@ public:
class Exporter { class Exporter {
public: public:
virtual ~Exporter() {} virtual ~Exporter() {}
virtual void exportAnimation(const ExporterInput& input, std::ostream& outputStream) = 0; virtual void exportAnimation(const ExporterInput& input, std::ostream& outputStream) = 0;
}; };

View File

@ -1,4 +1,5 @@
#include "JsonExporter.h" #include "JsonExporter.h"
#include "exporterTools.h" #include "exporterTools.h"
#include "tools/stringTools.h" #include "tools/stringTools.h"
@ -10,8 +11,10 @@ void JsonExporter::exportAnimation(const ExporterInput& input, std::ostream& out
// the formatting. // the formatting.
outputStream << "{\n"; outputStream << "{\n";
outputStream << " \"metadata\": {\n"; outputStream << " \"metadata\": {\n";
outputStream << " \"soundFile\": \"" << escapeJsonString(absolute(input.inputFilePath).u8string()) << "\",\n"; outputStream << " \"soundFile\": \""
outputStream << " \"duration\": " << formatDuration(input.animation.getRange().getDuration()) << "\n"; << escapeJsonString(absolute(input.inputFilePath).u8string()) << "\",\n";
outputStream << " \"duration\": " << formatDuration(input.animation.getRange().getDuration())
<< "\n";
outputStream << " },\n"; outputStream << " },\n";
outputStream << " \"mouthCues\": [\n"; outputStream << " \"mouthCues\": [\n";
bool isFirst = true; bool isFirst = true;
@ -19,8 +22,8 @@ void JsonExporter::exportAnimation(const ExporterInput& input, std::ostream& out
if (!isFirst) outputStream << ",\n"; if (!isFirst) outputStream << ",\n";
isFirst = false; isFirst = false;
outputStream << " { \"start\": " << formatDuration(timedShape.getStart()) outputStream << " { \"start\": " << formatDuration(timedShape.getStart())
<< ", \"end\": " << formatDuration(timedShape.getEnd()) << ", \"end\": " << formatDuration(timedShape.getEnd()) << ", \"value\": \""
<< ", \"value\": \"" << timedShape.getValue() << "\" }"; << timedShape.getValue() << "\" }";
} }
outputStream << "\n"; outputStream << "\n";
outputStream << " ]\n"; outputStream << " ]\n";

View File

@ -1,20 +1,15 @@
#include "TsvExporter.h" #include "TsvExporter.h"
#include "animation/targetShapeSet.h" #include "animation/targetShapeSet.h"
void TsvExporter::exportAnimation(const ExporterInput& input, std::ostream& outputStream) { void TsvExporter::exportAnimation(const ExporterInput& input, std::ostream& outputStream) {
// Output shapes with start times // Output shapes with start times
for (auto& timedShape : input.animation) { for (auto& timedShape : input.animation) {
outputStream outputStream << formatDuration(timedShape.getStart()) << "\t" << timedShape.getValue()
<< formatDuration(timedShape.getStart()) << "\n";
<< "\t"
<< timedShape.getValue()
<< "\n";
} }
// Output closed mouth with end time // Output closed mouth with end time
outputStream outputStream << formatDuration(input.animation.getRange().getEnd()) << "\t"
<< formatDuration(input.animation.getRange().getEnd()) << convertToTargetShapeSet(Shape::X, input.targetShapeSet) << "\n";
<< "\t"
<< convertToTargetShapeSet(Shape::X, input.targetShapeSet)
<< "\n";
} }

View File

@ -6,4 +6,3 @@ class TsvExporter : public Exporter {
public: public:
void exportAnimation(const ExporterInput& input, std::ostream& outputStream) override; void exportAnimation(const ExporterInput& input, std::ostream& outputStream) override;
}; };

View File

@ -1,11 +1,13 @@
#include "XmlExporter.h" #include "XmlExporter.h"
#include <boost/property_tree/ptree.hpp> #include <boost/property_tree/ptree.hpp>
#include <boost/property_tree/xml_parser.hpp> #include <boost/property_tree/xml_parser.hpp>
#include <boost/version.hpp> #include <boost/version.hpp>
#include "exporterTools.h" #include "exporterTools.h"
using std::string;
using boost::property_tree::ptree; using boost::property_tree::ptree;
using std::string;
void XmlExporter::exportAnimation(const ExporterInput& input, std::ostream& outputStream) { void XmlExporter::exportAnimation(const ExporterInput& input, std::ostream& outputStream) {
ptree tree; ptree tree;
@ -13,22 +15,19 @@ void XmlExporter::exportAnimation(const ExporterInput& input, std::ostream& outp
// Add metadata // Add metadata
tree.put("rhubarbResult.metadata.soundFile", absolute(input.inputFilePath).u8string()); tree.put("rhubarbResult.metadata.soundFile", absolute(input.inputFilePath).u8string());
tree.put( tree.put(
"rhubarbResult.metadata.duration", "rhubarbResult.metadata.duration", formatDuration(input.animation.getRange().getDuration())
formatDuration(input.animation.getRange().getDuration())
); );
// Add mouth cues // Add mouth cues
for (auto& timedShape : dummyShapeIfEmpty(input.animation, input.targetShapeSet)) { for (auto& timedShape : dummyShapeIfEmpty(input.animation, input.targetShapeSet)) {
ptree& mouthCueElement = tree.add( ptree& mouthCueElement =
"rhubarbResult.mouthCues.mouthCue", tree.add("rhubarbResult.mouthCues.mouthCue", timedShape.getValue());
timedShape.getValue()
);
mouthCueElement.put("<xmlattr>.start", formatDuration(timedShape.getStart())); mouthCueElement.put("<xmlattr>.start", formatDuration(timedShape.getStart()));
mouthCueElement.put("<xmlattr>.end", formatDuration(timedShape.getEnd())); mouthCueElement.put("<xmlattr>.end", formatDuration(timedShape.getEnd()));
} }
#ifndef BOOST_VERSION //present in version.hpp #ifndef BOOST_VERSION // present in version.hpp
#error "Could not detect Boost version." #error "Could not detect Boost version."
#endif #endif
#if BOOST_VERSION < 105600 // Support legacy syntax #if BOOST_VERSION < 105600 // Support legacy syntax

View File

@ -1,10 +1,10 @@
#include "exporterTools.h" #include "exporterTools.h"
#include "animation/targetShapeSet.h" #include "animation/targetShapeSet.h"
// Makes sure there is at least one mouth shape // Makes sure there is at least one mouth shape
std::vector<Timed<Shape>> dummyShapeIfEmpty( std::vector<Timed<Shape>> dummyShapeIfEmpty(
const JoiningTimeline<Shape>& animation, const JoiningTimeline<Shape>& animation, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
) { ) {
std::vector<Timed<Shape>> result; std::vector<Timed<Shape>> result;
std::copy(animation.begin(), animation.end(), std::back_inserter(result)); std::copy(animation.begin(), animation.end(), std::back_inserter(result));

View File

@ -5,6 +5,5 @@
// Makes sure there is at least one mouth shape // Makes sure there is at least one mouth shape
std::vector<Timed<Shape>> dummyShapeIfEmpty( std::vector<Timed<Shape>> dummyShapeIfEmpty(
const JoiningTimeline<Shape>& animation, const JoiningTimeline<Shape>& animation, const ShapeSet& targetShapeSet
const ShapeSet& targetShapeSet
); );

View File

@ -1,8 +1,9 @@
#include "rhubarbLib.h" #include "rhubarbLib.h"
#include "core/Phone.h"
#include "tools/textFiles.h"
#include "animation/mouthAnimation.h" #include "animation/mouthAnimation.h"
#include "audio/audioFileReading.h" #include "audio/audioFileReading.h"
#include "core/Phone.h"
#include "tools/textFiles.h"
using boost::optional; using boost::optional;
using std::string; using std::string;
@ -14,8 +15,8 @@ JoiningContinuousTimeline<Shape> animateAudioClip(
const Recognizer& recognizer, const Recognizer& recognizer,
const ShapeSet& targetShapeSet, const ShapeSet& targetShapeSet,
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink) ProgressSink& progressSink
{ ) {
const BoundedTimeline<Phone> phones = const BoundedTimeline<Phone> phones =
recognizer.recognizePhones(audioClip, dialog, maxThreadCount, progressSink); recognizer.recognizePhones(audioClip, dialog, maxThreadCount, progressSink);
JoiningContinuousTimeline<Shape> result = animate(phones, targetShapeSet); JoiningContinuousTimeline<Shape> result = animate(phones, targetShapeSet);
@ -28,8 +29,10 @@ JoiningContinuousTimeline<Shape> animateWaveFile(
const Recognizer& recognizer, const Recognizer& recognizer,
const ShapeSet& targetShapeSet, const ShapeSet& targetShapeSet,
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink) ProgressSink& progressSink
{ ) {
const auto audioClip = createAudioFileClip(filePath); const auto audioClip = createAudioFileClip(filePath);
return animateAudioClip(*audioClip, dialog, recognizer, targetShapeSet, maxThreadCount, progressSink); return animateAudioClip(
*audioClip, dialog, recognizer, targetShapeSet, maxThreadCount, progressSink
);
} }

View File

@ -1,12 +1,13 @@
#pragma once #pragma once
#include "core/Shape.h"
#include "time/ContinuousTimeline.h"
#include "audio/AudioClip.h"
#include "tools/progress.h"
#include <filesystem> #include <filesystem>
#include "animation/targetShapeSet.h" #include "animation/targetShapeSet.h"
#include "audio/AudioClip.h"
#include "core/Shape.h"
#include "recognition/Recognizer.h" #include "recognition/Recognizer.h"
#include "time/ContinuousTimeline.h"
#include "tools/progress.h"
JoiningContinuousTimeline<Shape> animateAudioClip( JoiningContinuousTimeline<Shape> animateAudioClip(
const AudioClip& audioClip, const AudioClip& audioClip,
@ -14,7 +15,8 @@ JoiningContinuousTimeline<Shape> animateAudioClip(
const Recognizer& recognizer, const Recognizer& recognizer,
const ShapeSet& targetShapeSet, const ShapeSet& targetShapeSet,
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink); ProgressSink& progressSink
);
JoiningContinuousTimeline<Shape> animateWaveFile( JoiningContinuousTimeline<Shape> animateWaveFile(
std::filesystem::path filePath, std::filesystem::path filePath,
@ -22,4 +24,5 @@ JoiningContinuousTimeline<Shape> animateWaveFile(
const Recognizer& recognizer, const Recognizer& recognizer,
const ShapeSet& targetShapeSet, const ShapeSet& targetShapeSet,
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink); ProgressSink& progressSink
);

View File

@ -1,39 +1,38 @@
#include "Entry.h" #include "Entry.h"
#include <thread>
#include <mutex> #include <mutex>
#include <thread>
#include <unordered_map> #include <unordered_map>
using std::lock_guard; using std::lock_guard;
using std::unordered_map;
using std::string; using std::string;
using std::unordered_map;
namespace logging { namespace logging {
// Returns an int representing the current thread. // Returns an int representing the current thread.
// This used to be a simple thread_local variable, but Xcode doesn't support that yet // This used to be a simple thread_local variable, but Xcode doesn't support that yet
int getThreadCounter() { int getThreadCounter() {
using thread_id = std::thread::id; using thread_id = std::thread::id;
static std::mutex counterMutex; static std::mutex counterMutex;
lock_guard<std::mutex> lock(counterMutex); lock_guard<std::mutex> lock(counterMutex);
static unordered_map<thread_id, int> threadCounters; static unordered_map<thread_id, int> threadCounters;
static int lastThreadId = 0; static int lastThreadId = 0;
thread_id threadId = std::this_thread::get_id(); thread_id threadId = std::this_thread::get_id();
if (threadCounters.find(threadId) == threadCounters.end()) { if (threadCounters.find(threadId) == threadCounters.end()) {
threadCounters.insert({ threadId, ++lastThreadId }); threadCounters.insert({threadId, ++lastThreadId});
}
return threadCounters.find(threadId)->second;
} }
return threadCounters.find(threadId)->second;
Entry::Entry(Level level, const string& message) :
timestamp(),
level(level),
message(message)
{
time(&timestamp);
this->threadCounter = getThreadCounter();
}
} }
Entry::Entry(Level level, const string& message) :
timestamp(),
level(level),
message(message) {
time(&timestamp);
this->threadCounter = getThreadCounter();
}
} // namespace logging

View File

@ -3,15 +3,15 @@
#include "Level.h" #include "Level.h"
namespace logging { namespace logging {
struct Entry {
Entry(Level level, const std::string& message);
virtual ~Entry() = default;
time_t timestamp; struct Entry {
int threadCounter; Entry(Level level, const std::string& message);
Level level; virtual ~Entry() = default;
std::string message;
};
} time_t timestamp;
int threadCounter;
Level level;
std::string message;
};
} // namespace logging

View File

@ -1,14 +1,15 @@
#pragma once #pragma once
#include <string> #include <string>
#include "Entry.h" #include "Entry.h"
namespace logging { namespace logging {
class Formatter { class Formatter {
public: public:
virtual ~Formatter() = default; virtual ~Formatter() = default;
virtual std::string format(const Entry& entry) = 0; virtual std::string format(const Entry& entry) = 0;
}; };
} } // namespace logging

View File

@ -4,32 +4,32 @@ using std::string;
namespace logging { namespace logging {
LevelConverter& LevelConverter::get() { LevelConverter& LevelConverter::get() {
static LevelConverter converter; static LevelConverter converter;
return converter; return converter;
}
string LevelConverter::getTypeName() {
return "Level";
}
EnumConverter<Level>::member_data LevelConverter::getMemberData() {
return member_data {
{ Level::Trace, "Trace" },
{ Level::Debug, "Debug" },
{ Level::Info, "Info" },
{ Level::Warn, "Warn" },
{ Level::Error, "Error" },
{ Level::Fatal, "Fatal" }
};
}
std::ostream& operator<<(std::ostream& stream, Level value) {
return LevelConverter::get().write(stream, value);
}
std::istream& operator >>(std::istream& stream, Level& value) {
return LevelConverter::get().read(stream, value);
}
} }
string LevelConverter::getTypeName() {
return "Level";
}
EnumConverter<Level>::member_data LevelConverter::getMemberData() {
return member_data{
{Level::Trace, "Trace"},
{Level::Debug, "Debug"},
{Level::Info, "Info"},
{Level::Warn, "Warn"},
{Level::Error, "Error"},
{Level::Fatal, "Fatal"}
};
}
std::ostream& operator<<(std::ostream& stream, Level value) {
return LevelConverter::get().write(stream, value);
}
std::istream& operator>>(std::istream& stream, Level& value) {
return LevelConverter::get().read(stream, value);
}
} // namespace logging

View File

@ -4,26 +4,19 @@
namespace logging { namespace logging {
enum class Level { enum class Level { Trace, Debug, Info, Warn, Error, Fatal, EndSentinel };
Trace,
Debug,
Info,
Warn,
Error,
Fatal,
EndSentinel
};
class LevelConverter : public EnumConverter<Level> { class LevelConverter : public EnumConverter<Level> {
public: public:
static LevelConverter& get(); static LevelConverter& get();
protected:
std::string getTypeName() override;
member_data getMemberData() override;
};
std::ostream& operator<<(std::ostream& stream, Level value); protected:
std::string getTypeName() override;
member_data getMemberData() override;
};
std::istream& operator >>(std::istream& stream, Level& value); std::ostream& operator<<(std::ostream& stream, Level value);
} std::istream& operator>>(std::istream& stream, Level& value);
} // namespace logging

View File

@ -4,10 +4,10 @@
namespace logging { namespace logging {
class Sink { class Sink {
public: public:
virtual ~Sink() = default; virtual ~Sink() = default;
virtual void receive(const Entry& entry) = 0; virtual void receive(const Entry& entry) = 0;
}; };
} } // namespace logging

View File

@ -1,5 +1,7 @@
#include "formatters.h" #include "formatters.h"
#include <format.h> #include <format.h>
#include "Entry.h" #include "Entry.h"
#include "tools/tools.h" #include "tools/tools.h"
@ -7,17 +9,17 @@ using std::string;
namespace logging { namespace logging {
string SimpleConsoleFormatter::format(const Entry& entry) { string SimpleConsoleFormatter::format(const Entry& entry) {
return fmt::format("[{0}] {1}", entry.level, entry.message); return fmt::format("[{0}] {1}", entry.level, entry.message);
}
string SimpleFileFormatter::format(const Entry& entry) {
return fmt::format(
"[{0}] {1} {2}",
formatTime(entry.timestamp, "%F %H:%M:%S"),
entry.threadCounter,
consoleFormatter.format(entry)
);
}
} }
string SimpleFileFormatter::format(const Entry& entry) {
return fmt::format(
"[{0}] {1} {2}",
formatTime(entry.timestamp, "%F %H:%M:%S"),
entry.threadCounter,
consoleFormatter.format(entry)
);
}
} // namespace logging

View File

@ -4,16 +4,17 @@
namespace logging { namespace logging {
class SimpleConsoleFormatter : public Formatter { class SimpleConsoleFormatter : public Formatter {
public: public:
std::string format(const Entry& entry) override; std::string format(const Entry& entry) override;
}; };
class SimpleFileFormatter : public Formatter { class SimpleFileFormatter : public Formatter {
public: public:
std::string format(const Entry& entry) override; std::string format(const Entry& entry) override;
private:
SimpleConsoleFormatter consoleFormatter;
};
} private:
SimpleConsoleFormatter consoleFormatter;
};
} // namespace logging

View File

@ -1,13 +1,15 @@
#include "logging.h" #include "logging.h"
#include "tools/tools.h"
#include <mutex> #include <mutex>
#include "Entry.h" #include "Entry.h"
#include "tools/tools.h"
using namespace logging; using namespace logging;
using std::lock_guard;
using std::shared_ptr;
using std::string; using std::string;
using std::vector; using std::vector;
using std::shared_ptr;
using std::lock_guard;
std::mutex& getLogMutex() { std::mutex& getLogMutex() {
static std::mutex mutex; static std::mutex mutex;

View File

@ -1,37 +1,37 @@
#pragma once #pragma once
#include "tools/EnumConverter.h"
#include "Sink.h"
#include "Level.h" #include "Level.h"
#include "Sink.h"
#include "tools/EnumConverter.h"
namespace logging { namespace logging {
bool addSink(std::shared_ptr<Sink> sink); bool addSink(std::shared_ptr<Sink> sink);
bool removeSink(std::shared_ptr<Sink> sink); bool removeSink(std::shared_ptr<Sink> sink);
void log(const Entry& entry); void log(const Entry& entry);
void log(Level level, const std::string& message); void log(Level level, const std::string& message);
template<typename... Args> template <typename... Args>
void logFormat(Level level, fmt::CStringRef format, const Args&... args) { void logFormat(Level level, fmt::CStringRef format, const Args&... args) {
log(level, fmt::format(format, args...)); log(level, fmt::format(format, args...));
}
#define LOG_WITH_LEVEL(levelName, levelEnum) \
inline void levelName(const std::string& message) { \
log(Level::levelEnum, message); \
} \
template <typename... Args> \
void levelName ## Format(fmt::CStringRef format, const Args&... args) { \
logFormat(Level::levelEnum, format, args...); \
}
LOG_WITH_LEVEL(trace, Trace)
LOG_WITH_LEVEL(debug, Debug)
LOG_WITH_LEVEL(info, Info)
LOG_WITH_LEVEL(warn, Warn)
LOG_WITH_LEVEL(error, Error)
LOG_WITH_LEVEL(fatal, Fatal)
} }
#define LOG_WITH_LEVEL(levelName, levelEnum) \
inline void levelName(const std::string& message) { \
log(Level::levelEnum, message); \
} \
template <typename... Args> \
void levelName##Format(fmt::CStringRef format, const Args&... args) { \
logFormat(Level::levelEnum, format, args...); \
}
LOG_WITH_LEVEL(trace, Trace)
LOG_WITH_LEVEL(debug, Debug)
LOG_WITH_LEVEL(info, Info)
LOG_WITH_LEVEL(warn, Warn)
LOG_WITH_LEVEL(error, Error)
LOG_WITH_LEVEL(fatal, Fatal)
} // namespace logging

View File

@ -1,35 +1,34 @@
#include "sinks.h" #include "sinks.h"
#include <iostream> #include <iostream>
#include "Entry.h" #include "Entry.h"
using std::string;
using std::shared_ptr; using std::shared_ptr;
using std::string;
namespace logging { namespace logging {
LevelFilter::LevelFilter(shared_ptr<Sink> innerSink, Level minLevel) : LevelFilter::LevelFilter(shared_ptr<Sink> innerSink, Level minLevel) :
innerSink(innerSink), innerSink(innerSink),
minLevel(minLevel) minLevel(minLevel) {}
{}
void LevelFilter::receive(const Entry& entry) { void LevelFilter::receive(const Entry& entry) {
if (entry.level >= minLevel) { if (entry.level >= minLevel) {
innerSink->receive(entry); innerSink->receive(entry);
}
} }
StreamSink::StreamSink(shared_ptr<std::ostream> stream, shared_ptr<Formatter> formatter) :
stream(stream),
formatter(formatter)
{}
void StreamSink::receive(const Entry& entry) {
const string line = formatter->format(entry);
*stream << line << std::endl;
}
StdErrSink::StdErrSink(shared_ptr<Formatter> formatter) :
StreamSink(std::shared_ptr<std::ostream>(&std::cerr, [](void*) {}), formatter)
{}
} }
StreamSink::StreamSink(shared_ptr<std::ostream> stream, shared_ptr<Formatter> formatter) :
stream(stream),
formatter(formatter) {}
void StreamSink::receive(const Entry& entry) {
const string line = formatter->format(entry);
*stream << line << std::endl;
}
StdErrSink::StdErrSink(shared_ptr<Formatter> formatter) :
StreamSink(std::shared_ptr<std::ostream>(&std::cerr, [](void*) {}), formatter) {}
} // namespace logging

View File

@ -1,33 +1,36 @@
#pragma once #pragma once
#include "Sink.h"
#include <memory> #include <memory>
#include "Formatter.h" #include "Formatter.h"
#include "Sink.h"
namespace logging { namespace logging {
enum class Level; enum class Level;
class LevelFilter : public Sink { class LevelFilter : public Sink {
public: public:
LevelFilter(std::shared_ptr<Sink> innerSink, Level minLevel); LevelFilter(std::shared_ptr<Sink> innerSink, Level minLevel);
void receive(const Entry& entry) override; void receive(const Entry& entry) override;
private:
std::shared_ptr<Sink> innerSink;
Level minLevel;
};
class StreamSink : public Sink { private:
public: std::shared_ptr<Sink> innerSink;
StreamSink(std::shared_ptr<std::ostream> stream, std::shared_ptr<Formatter> formatter); Level minLevel;
void receive(const Entry& entry) override; };
private:
std::shared_ptr<std::ostream> stream;
std::shared_ptr<Formatter> formatter;
};
class StdErrSink : public StreamSink { class StreamSink : public Sink {
public: public:
explicit StdErrSink(std::shared_ptr<Formatter> formatter); StreamSink(std::shared_ptr<std::ostream> stream, std::shared_ptr<Formatter> formatter);
}; void receive(const Entry& entry) override;
} private:
std::shared_ptr<std::ostream> stream;
std::shared_ptr<Formatter> formatter;
};
class StdErrSink : public StreamSink {
public:
explicit StdErrSink(std::shared_ptr<Formatter> formatter);
};
} // namespace logging

View File

@ -1,52 +1,66 @@
#include "PhoneticRecognizer.h" #include "PhoneticRecognizer.h"
#include "time/Timeline.h"
#include "audio/AudioSegment.h"
#include "audio/SampleRateConverter.h"
#include "audio/processing.h"
#include "time/timedLogging.h"
using std::runtime_error; #include "audio/AudioSegment.h"
using std::unique_ptr; #include "audio/processing.h"
using std::string; #include "audio/SampleRateConverter.h"
#include "time/timedLogging.h"
#include "time/Timeline.h"
using boost::optional; using boost::optional;
using std::runtime_error;
using std::string;
using std::unique_ptr;
static lambda_unique_ptr<ps_decoder_t> createDecoder(optional<std::string> dialog) { static lambda_unique_ptr<ps_decoder_t> createDecoder(optional<std::string> dialog) {
UNUSED(dialog); UNUSED(dialog);
lambda_unique_ptr<cmd_ln_t> config( lambda_unique_ptr<cmd_ln_t> config(
cmd_ln_init( cmd_ln_init(
nullptr, ps_args(), true, nullptr,
ps_args(),
true,
// Set acoustic model // Set acoustic model
"-hmm", (getSphinxModelDirectory() / "acoustic-model").u8string().c_str(), "-hmm",
(getSphinxModelDirectory() / "acoustic-model").u8string().c_str(),
// Set phonetic language model // Set phonetic language model
"-allphone", (getSphinxModelDirectory() / "en-us-phone.lm.bin").u8string().c_str(), "-allphone",
"-allphone_ci", "yes", (getSphinxModelDirectory() / "en-us-phone.lm.bin").u8string().c_str(),
"-allphone_ci",
"yes",
// Set language model probability weight. // Set language model probability weight.
// Low values (<= 0.4) can lead to fluttering animation. // Low values (<= 0.4) can lead to fluttering animation.
// High values (>= 1.0) can lead to imprecise or freezing animation. // High values (>= 1.0) can lead to imprecise or freezing animation.
"-lw", "0.8", "-lw",
"0.8",
// Add noise against zero silence // Add noise against zero silence
// (see http://cmusphinx.sourceforge.net/wiki/faq#qwhy_my_accuracy_is_poor) // (see http://cmusphinx.sourceforge.net/wiki/faq#qwhy_my_accuracy_is_poor)
"-dither", "yes", "-dither",
"yes",
// Disable VAD -- we're doing that ourselves // Disable VAD -- we're doing that ourselves
"-remove_silence", "no", "-remove_silence",
"no",
// Perform per-utterance cepstral mean normalization // Perform per-utterance cepstral mean normalization
"-cmn", "batch", "-cmn",
"batch",
// The following settings are recommended at // The following settings are recommended at
// http://cmusphinx.sourceforge.net/wiki/phonemerecognition // http://cmusphinx.sourceforge.net/wiki/phonemerecognition
// Set beam width applied to every frame in Viterbi search // Set beam width applied to every frame in Viterbi search
"-beam", "1e-20", "-beam",
"1e-20",
// Set beam width applied to phone transitions // Set beam width applied to phone transitions
"-pbeam", "1e-20", "-pbeam",
nullptr), "1e-20",
[](cmd_ln_t* config) { cmd_ln_free_r(config); }); nullptr
),
[](cmd_ln_t* config) { cmd_ln_free_r(config); }
);
if (!config) throw runtime_error("Error creating configuration."); if (!config) throw runtime_error("Error creating configuration.");
lambda_unique_ptr<ps_decoder_t> decoder( lambda_unique_ptr<ps_decoder_t> decoder(ps_init(config.get()), [](ps_decoder_t* recognizer) {
ps_init(config.get()), ps_free(recognizer);
[](ps_decoder_t* recognizer) { ps_free(recognizer); }); });
if (!decoder) throw runtime_error("Error creating speech decoder."); if (!decoder) throw runtime_error("Error creating speech decoder.");
return decoder; return decoder;
@ -64,9 +78,8 @@ static Timeline<Phone> utteranceToPhones(
paddedTimeRange.grow(padding); paddedTimeRange.grow(padding);
paddedTimeRange.trim(audioClip.getTruncatedRange()); paddedTimeRange.trim(audioClip.getTruncatedRange());
const unique_ptr<AudioClip> clipSegment = audioClip.clone() const unique_ptr<AudioClip> clipSegment =
| segment(paddedTimeRange) audioClip.clone() | segment(paddedTimeRange) | resample(sphinxSampleRate);
| resample(sphinxSampleRate);
const auto audioBuffer = copyTo16bitBuffer(*clipSegment); const auto audioBuffer = copyTo16bitBuffer(*clipSegment);
// Detect phones (returned as words) // Detect phones (returned as words)
@ -109,5 +122,7 @@ BoundedTimeline<Phone> PhoneticRecognizer::recognizePhones(
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink ProgressSink& progressSink
) const { ) const {
return ::recognizePhones(inputAudioClip, dialog, &createDecoder, &utteranceToPhones, maxThreadCount, progressSink); return ::recognizePhones(
inputAudioClip, dialog, &createDecoder, &utteranceToPhones, maxThreadCount, progressSink
);
} }

View File

@ -1,7 +1,7 @@
#pragma once #pragma once
#include "Recognizer.h"
#include "pocketSphinxTools.h" #include "pocketSphinxTools.h"
#include "Recognizer.h"
class PhoneticRecognizer : public Recognizer { class PhoneticRecognizer : public Recognizer {
public: public:

View File

@ -1,30 +1,33 @@
#include "PocketSphinxRecognizer.h" #include "PocketSphinxRecognizer.h"
#include <regex>
#include <gsl_util.h> #include <gsl_util.h>
#include <regex>
#include "audio/AudioSegment.h" #include "audio/AudioSegment.h"
#include "audio/SampleRateConverter.h"
#include "languageModels.h"
#include "tokenization.h"
#include "g2p.h"
#include "time/ContinuousTimeline.h"
#include "audio/processing.h" #include "audio/processing.h"
#include "audio/SampleRateConverter.h"
#include "g2p.h"
#include "languageModels.h"
#include "time/ContinuousTimeline.h"
#include "time/timedLogging.h" #include "time/timedLogging.h"
#include "tokenization.h"
extern "C" { extern "C" {
#include <state_align_search.h> #include <state_align_search.h>
} }
using std::runtime_error;
using std::invalid_argument;
using std::unique_ptr;
using std::string;
using std::vector;
using std::map;
using std::filesystem::path;
using std::regex;
using std::regex_replace;
using boost::optional; using boost::optional;
using std::array; using std::array;
using std::invalid_argument;
using std::map;
using std::regex;
using std::regex_replace;
using std::runtime_error;
using std::string;
using std::unique_ptr;
using std::vector;
using std::filesystem::path;
bool dictionaryContains(dict_t& dictionary, const string& word) { bool dictionaryContains(dict_t& dictionary, const string& word) {
return dict_wordid(&dictionary, word.c_str()) != BAD_S3WID; return dict_wordid(&dictionary, word.c_str()) != BAD_S3WID;
@ -50,7 +53,9 @@ void addMissingDictionaryWords(const vector<string>& words, ps_decoder_t& decode
} }
for (auto it = missingPronunciations.begin(); it != missingPronunciations.end(); ++it) { for (auto it = missingPronunciations.begin(); it != missingPronunciations.end(); ++it) {
const bool isLast = it == --missingPronunciations.end(); const bool isLast = it == --missingPronunciations.end();
logging::infoFormat("Unknown word '{}'. Guessing pronunciation '{}'.", it->first, it->second); logging::infoFormat(
"Unknown word '{}'. Guessing pronunciation '{}'.", it->first, it->second
);
ps_add_word(&decoder, it->first.c_str(), it->second.c_str(), isLast); ps_add_word(&decoder, it->first.c_str(), it->second.c_str(), isLast);
} }
} }
@ -59,23 +64,24 @@ lambda_unique_ptr<ngram_model_t> createDefaultLanguageModel(ps_decoder_t& decode
path modelPath = getSphinxModelDirectory() / "en-us.lm.bin"; path modelPath = getSphinxModelDirectory() / "en-us.lm.bin";
lambda_unique_ptr<ngram_model_t> result( lambda_unique_ptr<ngram_model_t> result(
ngram_model_read(decoder.config, modelPath.u8string().c_str(), NGRAM_AUTO, decoder.lmath), ngram_model_read(decoder.config, modelPath.u8string().c_str(), NGRAM_AUTO, decoder.lmath),
[](ngram_model_t* lm) { ngram_model_free(lm); }); [](ngram_model_t* lm) { ngram_model_free(lm); }
);
if (!result) { if (!result) {
throw runtime_error(fmt::format("Error reading language model from {}.", modelPath.u8string())); throw runtime_error(
fmt::format("Error reading language model from {}.", modelPath.u8string())
);
} }
return result; return result;
} }
lambda_unique_ptr<ngram_model_t> createDialogLanguageModel( lambda_unique_ptr<ngram_model_t> createDialogLanguageModel(
ps_decoder_t& decoder, ps_decoder_t& decoder, const string& dialog
const string& dialog
) { ) {
// Split dialog into normalized words // Split dialog into normalized words
vector<string> words = tokenizeText( vector<string> words = tokenizeText(dialog, [&](const string& word) {
dialog, return dictionaryContains(*decoder.dict, word);
[&](const string& word) { return dictionaryContains(*decoder.dict, word); } });
);
// Add dialog-specific words to the dictionary // Add dialog-specific words to the dictionary
addMissingDictionaryWords(words, decoder); addMissingDictionaryWords(words, decoder);
@ -87,18 +93,16 @@ lambda_unique_ptr<ngram_model_t> createDialogLanguageModel(
} }
lambda_unique_ptr<ngram_model_t> createBiasedLanguageModel( lambda_unique_ptr<ngram_model_t> createBiasedLanguageModel(
ps_decoder_t& decoder, ps_decoder_t& decoder, const string& dialog
const string& dialog
) { ) {
auto defaultLanguageModel = createDefaultLanguageModel(decoder); auto defaultLanguageModel = createDefaultLanguageModel(decoder);
auto dialogLanguageModel = createDialogLanguageModel(decoder, dialog); auto dialogLanguageModel = createDialogLanguageModel(decoder, dialog);
constexpr int modelCount = 2; constexpr int modelCount = 2;
array<ngram_model_t*, modelCount> languageModels { array<ngram_model_t*, modelCount> languageModels{
defaultLanguageModel.get(), defaultLanguageModel.get(), dialogLanguageModel.get()
dialogLanguageModel.get()
}; };
array<const char*, modelCount> modelNames { "defaultLM", "dialogLM" }; array<const char*, modelCount> modelNames{"defaultLM", "dialogLM"};
array<float, modelCount> modelWeights { 0.1f, 0.9f }; array<float, modelCount> modelWeights{0.1f, 0.9f};
lambda_unique_ptr<ngram_model_t> result( lambda_unique_ptr<ngram_model_t> result(
ngram_model_set_init( ngram_model_set_init(
nullptr, nullptr,
@ -107,7 +111,8 @@ lambda_unique_ptr<ngram_model_t> createBiasedLanguageModel(
modelWeights.data(), modelWeights.data(),
modelCount modelCount
), ),
[](ngram_model_t* lm) { ngram_model_free(lm); }); [](ngram_model_t* lm) { ngram_model_free(lm); }
);
if (!result) { if (!result) {
throw runtime_error("Error creating biased language model."); throw runtime_error("Error creating biased language model.");
} }
@ -118,31 +123,40 @@ lambda_unique_ptr<ngram_model_t> createBiasedLanguageModel(
static lambda_unique_ptr<ps_decoder_t> createDecoder(optional<std::string> dialog) { static lambda_unique_ptr<ps_decoder_t> createDecoder(optional<std::string> dialog) {
lambda_unique_ptr<cmd_ln_t> config( lambda_unique_ptr<cmd_ln_t> config(
cmd_ln_init( cmd_ln_init(
nullptr, ps_args(), true, nullptr,
ps_args(),
true,
// Set acoustic model // Set acoustic model
"-hmm", (getSphinxModelDirectory() / "acoustic-model").u8string().c_str(), "-hmm",
(getSphinxModelDirectory() / "acoustic-model").u8string().c_str(),
// Set pronunciation dictionary // Set pronunciation dictionary
"-dict", (getSphinxModelDirectory() / "cmudict-en-us.dict").u8string().c_str(), "-dict",
(getSphinxModelDirectory() / "cmudict-en-us.dict").u8string().c_str(),
// Add noise against zero silence // Add noise against zero silence
// (see http://cmusphinx.sourceforge.net/wiki/faq#qwhy_my_accuracy_is_poor) // (see http://cmusphinx.sourceforge.net/wiki/faq#qwhy_my_accuracy_is_poor)
"-dither", "yes", "-dither",
"yes",
// Disable VAD -- we're doing that ourselves // Disable VAD -- we're doing that ourselves
"-remove_silence", "no", "-remove_silence",
"no",
// Perform per-utterance cepstral mean normalization // Perform per-utterance cepstral mean normalization
"-cmn", "batch", "-cmn",
nullptr), "batch",
[](cmd_ln_t* config) { cmd_ln_free_r(config); }); nullptr
),
[](cmd_ln_t* config) { cmd_ln_free_r(config); }
);
if (!config) throw runtime_error("Error creating configuration."); if (!config) throw runtime_error("Error creating configuration.");
lambda_unique_ptr<ps_decoder_t> decoder( lambda_unique_ptr<ps_decoder_t> decoder(ps_init(config.get()), [](ps_decoder_t* recognizer) {
ps_init(config.get()), ps_free(recognizer);
[](ps_decoder_t* recognizer) { ps_free(recognizer); }); });
if (!decoder) throw runtime_error("Error creating speech decoder."); if (!decoder) throw runtime_error("Error creating speech decoder.");
// Set language model // Set language model
lambda_unique_ptr<ngram_model_t> languageModel(dialog lambda_unique_ptr<ngram_model_t> languageModel(
? createBiasedLanguageModel(*decoder, *dialog) dialog ? createBiasedLanguageModel(*decoder, *dialog) : createDefaultLanguageModel(*decoder)
: createDefaultLanguageModel(*decoder)); );
ps_set_lm(decoder.get(), "lm", languageModel.get()); ps_set_lm(decoder.get(), "lm", languageModel.get());
ps_set_search(decoder.get(), "lm"); ps_set_search(decoder.get(), "lm");
@ -150,16 +164,15 @@ static lambda_unique_ptr<ps_decoder_t> createDecoder(optional<std::string> dialo
} }
optional<Timeline<Phone>> getPhoneAlignment( optional<Timeline<Phone>> getPhoneAlignment(
const vector<s3wid_t>& wordIds, const vector<s3wid_t>& wordIds, const vector<int16_t>& audioBuffer, ps_decoder_t& decoder
const vector<int16_t>& audioBuffer, ) {
ps_decoder_t& decoder)
{
if (wordIds.empty()) return boost::none; if (wordIds.empty()) return boost::none;
// Create alignment list // Create alignment list
lambda_unique_ptr<ps_alignment_t> alignment( lambda_unique_ptr<ps_alignment_t> alignment(
ps_alignment_init(decoder.d2p), ps_alignment_init(decoder.d2p),
[](ps_alignment_t* alignment) { ps_alignment_free(alignment); }); [](ps_alignment_t* alignment) { ps_alignment_free(alignment); }
);
if (!alignment) throw runtime_error("Error creating alignment."); if (!alignment) throw runtime_error("Error creating alignment.");
for (s3wid_t wordId : wordIds) { for (s3wid_t wordId : wordIds) {
// Add word. Initial value for duration is ignored. // Add word. Initial value for duration is ignored.
@ -172,7 +185,8 @@ optional<Timeline<Phone>> getPhoneAlignment(
acmod_t* acousticModel = decoder.acmod; acmod_t* acousticModel = decoder.acmod;
lambda_unique_ptr<ps_search_t> search( lambda_unique_ptr<ps_search_t> search(
state_align_search_init("state_align", decoder.config, acousticModel, alignment.get()), state_align_search_init("state_align", decoder.config, acousticModel, alignment.get()),
[](ps_search_t* search) { ps_search_free(search); }); [](ps_search_t* search) { ps_search_free(search); }
);
if (!search) throw runtime_error("Error creating search."); if (!search) throw runtime_error("Error creating search.");
// Start recognition // Start recognition
@ -190,7 +204,8 @@ optional<Timeline<Phone>> getPhoneAlignment(
const int16* nextSample = audioBuffer.data(); const int16* nextSample = audioBuffer.data();
size_t remainingSamples = audioBuffer.size(); size_t remainingSamples = audioBuffer.size();
const bool fullUtterance = true; const bool fullUtterance = true;
while (acmod_process_raw(acousticModel, &nextSample, &remainingSamples, fullUtterance) > 0) { while (acmod_process_raw(acousticModel, &nextSample, &remainingSamples, fullUtterance) > 0
) {
while (acousticModel->n_feat_frame > 0) { while (acousticModel->n_feat_frame > 0) {
ps_search_step(search.get(), acousticModel->output_frame); ps_search_step(search.get(), acousticModel->output_frame);
acmod_advance(acousticModel); acmod_advance(acousticModel);
@ -205,11 +220,8 @@ optional<Timeline<Phone>> getPhoneAlignment(
// Extract phones with timestamps // Extract phones with timestamps
char** phoneNames = decoder.dict->mdef->ciname; char** phoneNames = decoder.dict->mdef->ciname;
Timeline<Phone> result; Timeline<Phone> result;
for ( for (ps_alignment_iter_t* it = ps_alignment_phones(alignment.get()); it;
ps_alignment_iter_t* it = ps_alignment_phones(alignment.get()); it = ps_alignment_iter_next(it)) {
it;
it = ps_alignment_iter_next(it)
) {
// Get phone // Get phone
ps_alignment_entry_t* phoneEntry = ps_alignment_iter_get(it); ps_alignment_entry_t* phoneEntry = ps_alignment_iter_get(it);
const s3cipid_t phoneId = phoneEntry->id.pid.cipid; const s3cipid_t phoneId = phoneEntry->id.pid.cipid;
@ -231,16 +243,16 @@ optional<Timeline<Phone>> getPhoneAlignment(
return result; return result;
} }
// Some words have multiple pronunciations, one of which results in better animation than the others. // Some words have multiple pronunciations, one of which results in better animation than the
// This function returns the optimal pronunciation for a select set of these words. // others. This function returns the optimal pronunciation for a select set of these words.
string fixPronunciation(const string& word) { string fixPronunciation(const string& word) {
const static map<string, string> replacements { const static map<string, string> replacements{
{ "into(2)", "into" }, {"into(2)", "into"},
{ "to(2)", "to" }, {"to(2)", "to"},
{ "to(3)", "to" }, {"to(3)", "to"},
{ "today(2)", "today" }, {"today(2)", "today"},
{ "tomorrow(2)", "tomorrow" }, {"tomorrow(2)", "tomorrow"},
{ "tonight(2)", "tonight" } {"tonight(2)", "tonight"}
}; };
const auto pair = replacements.find(word); const auto pair = replacements.find(word);
@ -265,9 +277,8 @@ static Timeline<Phone> utteranceToPhones(
paddedTimeRange.grow(padding); paddedTimeRange.grow(padding);
paddedTimeRange.trim(audioClip.getTruncatedRange()); paddedTimeRange.trim(audioClip.getTruncatedRange());
const unique_ptr<AudioClip> clipSegment = audioClip.clone() const unique_ptr<AudioClip> clipSegment =
| segment(paddedTimeRange) audioClip.clone() | segment(paddedTimeRange) | resample(sphinxSampleRate);
| resample(sphinxSampleRate);
const auto audioBuffer = copyTo16bitBuffer(*clipSegment); const auto audioBuffer = copyTo16bitBuffer(*clipSegment);
// Get words // Get words
@ -307,8 +318,9 @@ static Timeline<Phone> utteranceToPhones(
#if BOOST_VERSION < 105600 // Support legacy syntax #if BOOST_VERSION < 105600 // Support legacy syntax
#define value_or get_value_or #define value_or get_value_or
#endif #endif
Timeline<Phone> utterancePhones = getPhoneAlignment(wordIds, audioBuffer, decoder) Timeline<Phone> utterancePhones =
.value_or(ContinuousTimeline<Phone>(clipSegment->getTruncatedRange(), Phone::Noise)); getPhoneAlignment(wordIds, audioBuffer, decoder)
.value_or(ContinuousTimeline<Phone>(clipSegment->getTruncatedRange(), Phone::Noise));
alignmentProgressSink.reportProgress(1.0); alignmentProgressSink.reportProgress(1.0);
utterancePhones.shift(paddedTimeRange.getStart()); utterancePhones.shift(paddedTimeRange.getStart());
@ -338,5 +350,6 @@ BoundedTimeline<Phone> PocketSphinxRecognizer::recognizePhones(
ProgressSink& progressSink ProgressSink& progressSink
) const { ) const {
return ::recognizePhones( return ::recognizePhones(
inputAudioClip, dialog, &createDecoder, &utteranceToPhones, maxThreadCount, progressSink); inputAudioClip, dialog, &createDecoder, &utteranceToPhones, maxThreadCount, progressSink
);
} }

View File

@ -1,7 +1,7 @@
#pragma once #pragma once
#include "Recognizer.h"
#include "pocketSphinxTools.h" #include "pocketSphinxTools.h"
#include "Recognizer.h"
class PocketSphinxRecognizer : public Recognizer { class PocketSphinxRecognizer : public Recognizer {
public: public:

View File

@ -2,8 +2,8 @@
#include "audio/AudioClip.h" #include "audio/AudioClip.h"
#include "core/Phone.h" #include "core/Phone.h"
#include "tools/progress.h"
#include "time/BoundedTimeline.h" #include "time/BoundedTimeline.h"
#include "tools/progress.h"
class Recognizer { class Recognizer {
public: public:
@ -15,4 +15,4 @@ public:
int maxThreadCount, int maxThreadCount,
ProgressSink& progressSink ProgressSink& progressSink
) const = 0; ) const = 0;
}; };

View File

@ -1,23 +1,25 @@
#include <g2p.h> #include <g2p.h>
#include <regex>
#include "tools/stringTools.h"
#include "logging/logging.h"
using std::vector; #include <regex>
using std::wstring;
using std::regex; #include "logging/logging.h"
using std::wregex; #include "tools/stringTools.h"
using std::invalid_argument; using std::invalid_argument;
using std::pair; using std::pair;
using std::regex;
using std::vector;
using std::wregex;
using std::wstring;
const vector<pair<wregex, wstring>>& getReplacementRules() { const vector<pair<wregex, wstring>>& getReplacementRules() {
static vector<pair<wregex, wstring>> rules { static vector<pair<wregex, wstring>> rules{
#include "g2pRules.cpp" #include "g2pRules.cpp"
// Turn bigrams into unigrams for easier conversion // Turn bigrams into unigrams for easier conversion
{ wregex(L"ôw"), L"Ω" }, {wregex(L"ôw"), L"Ω"},
{ wregex(L"öy"), L"ω" }, {wregex(L"öy"), L"ω"},
{ wregex(L"@r"), L"ɝ" } {wregex(L"@r"), L"ɝ"}
}; };
return rules; return rules;
} }
@ -64,8 +66,7 @@ Phone charToPhone(wchar_t c) {
case L'r': return Phone::R; case L'r': return Phone::R;
case L'l': return Phone::L; case L'l': return Phone::L;
case L'h': return Phone::HH; case L'h': return Phone::HH;
default: default: return Phone::Noise;
return Phone::Noise;
} }
} }

View File

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <vector> #include <vector>
#include "core/Phone.h" #include "core/Phone.h"
std::vector<Phone> wordToPhones(const std::string& word); std::vector<Phone> wordToPhones(const std::string& word);

View File

@ -4,212 +4,253 @@
// Rules // Rules
// //
// get rid of some digraphs // get rid of some digraphs
{ wregex(L"ch"), L"ç" }, {wregex(L"ch"), L"ç"}, {wregex(L"sh"), L"$$"}, {wregex(L"ph"), L"f"}, {wregex(L"th"), L"+"},
{ wregex(L"sh"), L"$$" }, {wregex(L"qu"), L"kw"},
{ wregex(L"ph"), L"f" }, // and other spelling-level changes
{ wregex(L"th"), L"+" }, {wregex(L"w(r)"), L"$1"}, {wregex(L"w(ho)"), L"$1"}, {wregex(L"(w)h"), L"$1"},
{ wregex(L"qu"), L"kw" }, {wregex(L"(^r)h"), L"$1"}, {wregex(L"(x)h"), L"$1"},
// and other spelling-level changes {wregex(L"([aeiouäëïöüâêîôûùò@])h($)"), L"$1$2"},
{ wregex(L"w(r)"), L"$1" }, {wregex(L"(^e)x([aeiouäëïöüâêîôûùò@])"), L"$1gz$2"}, {wregex(L"x"), L"ks"}, {wregex(L"'"), L""},
{ wregex(L"w(ho)"), L"$1" }, // gh is particularly variable
{ wregex(L"(w)h"), L"$1" }, {wregex(L"gh([aeiouäëïöüâêîôûùò@])"), L"g$1"},
{ wregex(L"(^r)h"), L"$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a(gh)"), L"$1ä$2"},
{ wregex(L"(x)h"), L"$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e(gh)"), L"$1ë$2"},
{ wregex(L"([aeiouäëïöüâêîôûùò@])h($)"), L"$1$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i(gh)"), L"$1ï$2"},
{ wregex(L"(^e)x([aeiouäëïöüâêîôûùò@])"), L"$1gz$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o(gh)"), L"$1ö$2"},
{ wregex(L"x"), L"ks" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u(gh)"), L"$1ü$2"},
{ wregex(L"'"), L"" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])â(gh)"), L"$1ä$2"},
// gh is particularly variable {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])ê(gh)"), L"$1ë$2"},
{ wregex(L"gh([aeiouäëïöüâêîôûùò@])"), L"g$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])î(gh)"), L"$1ï$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a(gh)"), L"$1ä$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e(gh)"), L"$1ë$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i(gh)"), L"$1ï$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o(gh)"), L"$1ö$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u(gh)"), L"$1ü$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])â(gh)"), L"$1ä$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])ê(gh)"), L"$1ë$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])î(gh)"), L"$1ï$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])ô(gh)"), L"$1ö$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])û(gh)"), L"$1ü$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])ô(gh)"), L"$1ö$2"},
{ wregex(L"ough(t)"), L"ò$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])û(gh)"), L"$1ü$2"}, {wregex(L"ough(t)"), L"ò$1"},
{ wregex(L"augh(t)"), L"ò$1" }, {wregex(L"augh(t)"), L"ò$1"}, {wregex(L"ough"), L"ö"}, {wregex(L"gh"), L""},
{ wregex(L"ough"), L"ö" }, // unpronounceable combinations
{ wregex(L"gh"), L"" }, {wregex(L"(^)g(n)"), L"$1$2"}, {wregex(L"(^)k(n)"), L"$1$2"}, {wregex(L"(^)m(n)"), L"$1$2"},
// unpronounceable combinations {wregex(L"(^)p(t)"), L"$1$2"}, {wregex(L"(^)p(s)"), L"$1$2"}, {wregex(L"(^)t(m)"), L"$1$2"},
{ wregex(L"(^)g(n)"), L"$1$2" }, // medial y = i
{ wregex(L"(^)k(n)"), L"$1$2" }, {wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ])y($)"), L"$1ï$2"},
{ wregex(L"(^)m(n)"), L"$1$2" }, {wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ]{2})y($)"), L"$1ï$2"},
{ wregex(L"(^)p(t)"), L"$1$2" }, {wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ]{3})y($)"), L"$1ï$2"}, {wregex(L"ey"), L"ë"},
{ wregex(L"(^)p(s)"), L"$1$2" }, {wregex(L"ay"), L"ä"}, {wregex(L"oy"), L"öy"},
{ wregex(L"(^)t(m)"), L"$1$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y([bcdfghjklmnpqrstvwxyzç+$ñ])"), L"$1i$2"},
// medial y = i {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y($)"), L"$1i$2"},
{ wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ])y($)"), L"$1ï$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y(e$)"), L"$1i$2"},
{ wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ]{2})y($)"), L"$1ï$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ]{2})ie($)"), L"$1ï$2"},
{ wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ]{3})y($)"), L"$1ï$2" }, {wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ])ie($)"), L"$1ï$2"},
{ wregex(L"ey"), L"ë" }, // sSl can simplify
{ wregex(L"ay"), L"ä" }, {wregex(L"(s)t(l[aeiouäëïöüâêîôûùò@]$)"), L"$1$2"},
{ wregex(L"oy"), L"öy" }, // affrication of t + front vowel
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y([bcdfghjklmnpqrstvwxyzç+$ñ])"), L"$1i$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ci([aeiouäëïöüâêîôûùò@])"), L"$1$$$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y($)"), L"$1i$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ti([aeiouäëïöüâêîôûùò@])"), L"$1$$$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])y(e$)"), L"$1i$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])tu([aeiouäëïöüâêîôûùò@])"), L"$1çu$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ]{2})ie($)"), L"$1ï$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])tu([rl][aeiouäëïöüâêîôûùò@])"),
{ wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ])ie($)"), L"$1ï$2" }, L"$1çu$2"},
// sSl can simplify {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])si(o)"), L"$1$$$2"},
{ wregex(L"(s)t(l[aeiouäëïöüâêîôûùò@]$)"), L"$1$2" }, {wregex(L"([aeiouäëïöüâêîôûùò@])si(o)"), L"$1j$2"},
// affrication of t + front vowel {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])s(ur)"), L"$1$$$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ci([aeiouäëïöüâêîôûùò@])"), L"$1$$$2" }, {wregex(L"([aeiouäëïöüâêîôûùò@])s(ur)"), L"$1j$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ti([aeiouäëïöüâêîôûùò@])"), L"$1$$$2" }, {wregex(L"(k)s(u[aeiouäëïöüâêîôûùò@])"), L"$1$$$2"}, {wregex(L"(k)s(u[rl])"), L"$1$$$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])tu([aeiouäëïöüâêîôûùò@])"), L"$1çu$2" }, // intervocalic s
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])tu([rl][aeiouäëïöüâêîôûùò@])"), L"$1çu$2" }, {wregex(L"([eiou])s([aeiouäëïöüâêîôûùò@])"), L"$1z$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])si(o)"), L"$1$$$2" }, // al to ol (do this before respelling)
{ wregex(L"([aeiouäëïöüâêîôûùò@])si(o)"), L"$1j$2" }, {wregex(L"a(ls)"), L"ò$1"}, {wregex(L"a(lr)"), L"ò$1"}, {wregex(L"a(l{2}$)"), L"ò$1"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])s(ur)"), L"$1$$$2" }, {wregex(L"a(lm(?:[aeiouäëïöüâêîôûùò@])?$)"), L"ò$1"},
{ wregex(L"([aeiouäëïöüâêîôûùò@])s(ur)"), L"$1j$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a(l[td+])"), L"$1ò$2"},
{ wregex(L"(k)s(u[aeiouäëïöüâêîôûùò@])"), L"$1$$$2" }, {wregex(L"(^)a(l[td+])"), L"$1ò$2"},
{ wregex(L"(k)s(u[rl])"), L"$1$$$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])al(k)"), L"$1ò$2"},
// intervocalic s // soft c and g
{ wregex(L"([eiou])s([aeiouäëïöüâêîôûùò@])"), L"$1z$2" }, {wregex(L"c([eiêîy])"), L"s$1"}, {wregex(L"c"), L"k"},
// al to ol (do this before respelling) {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ge(a)"), L"$1j$2"},
{ wregex(L"a(ls)"), L"ò$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ge(o)"), L"$1j$2"},
{ wregex(L"a(lr)"), L"ò$1" }, {wregex(L"g([eiêîy])"), L"j$1"},
{ wregex(L"a(l{2}$)"), L"ò$1" }, // init/final guF was there just to harden the g
{ wregex(L"a(lm(?:[aeiouäëïöüâêîôûùò@])?$)"), L"ò$1" }, {wregex(L"(^)gu([eiêîy])"), L"$1g$2"}, {wregex(L"gu(e$)"), L"g$1"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a(l[td+])"), L"$1ò$2" }, // untangle reverse-written final liquids
{ wregex(L"(^)a(l[td+])"), L"$1ò$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])re($)"), L"$1@r$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])al(k)"), L"$1ò$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])le($)"), L"$1@l$2"},
// soft c and g // vowels are long medially
{ wregex(L"c([eiêîy])"), L"s$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"),
{ wregex(L"c"), L"k" }, L"$1ä$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ge(a)"), L"$1j$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"),
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])ge(o)"), L"$1j$2" }, L"$1ë$2"},
{ wregex(L"g([eiêîy])"), L"j$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"),
// init/final guF was there just to harden the g L"$1ï$2"},
{ wregex(L"(^)gu([eiêîy])"), L"$1g$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"),
{ wregex(L"gu(e$)"), L"g$1" }, L"$1ö$2"},
// untangle reverse-written final liquids {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"),
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])re($)"), L"$1@r$2" }, L"$1ü$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])le($)"), L"$1@l$2" }, {wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ä$2"},
// vowels are long medially {wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ë$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ä$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ë$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ï$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ö$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ü$2" }, {wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ï$2"},
{ wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ä$2" }, { wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ë$2" }, { wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ï$2" }, { wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ö$2" }, { wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ü$2" }, {wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ö$2"},
// and short before 2 consonants or a final one {wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@])"), L"$1ü$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1â$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ê$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1î$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ô$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1û$2" }, // and short before 2 consonants or a final one
{ wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1â$2" }, { wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ê$2" }, { wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1î$2" }, { wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ô$2" }, { wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1û$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1â$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1â$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ê$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1î$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ô$2" }, { wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1û$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ê$2"},
{ wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1â$2" }, { wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ê$2" }, { wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1î$2" }, { wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ô$2" }, { wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1û$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1î$2"},
// special but general rules {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ô$2"},
{ wregex(L"î(nd$)"), L"ï$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1û$2"},
{ wregex(L"ô(s{2}$)"), L"ò$1" }, {wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1â$2"},
{ wregex(L"ô(g$)"), L"ò$1" }, {wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ê$2"},
{ wregex(L"ô(f[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ò$1" }, {wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1î$2"},
{ wregex(L"ô(l[td+])"), L"ö$1" }, {wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1ô$2"},
{ wregex(L"(w)â(\\$)"), L"$1ò$2" }, {wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ]{2})"), L"$1û$2"},
{ wregex(L"(w)â((?:t)?ç)"), L"$1ò$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])a([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1â$2"},
{ wregex(L"(w)â([tdns+])"), L"$1ô$2" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])e([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ê$2"},
// soft gn {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])i([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1î$2"},
{ wregex(L"îg([mnñ]$)"), L"ï$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])o([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ô$2"},
{ wregex(L"îg([mnñ][bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ï$1" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñ])u([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1û$2"},
{ wregex(L"(ei)g(n)"), L"$1$2" }, {wregex(L"(^)a([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1â$2"},
// handle ous before removing -e {wregex(L"(^)e([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ê$2"},
{ wregex(L"ou(s$)"), L"@$1" }, {wregex(L"(^)i([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1î$2"},
{ wregex(L"ou(s[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"@$1" }, {wregex(L"(^)o([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1ô$2"},
// remove silent -e {wregex(L"(^)u([bcdfghjklmnpqrstvwxyzç+$ñ]$)"), L"$1û$2"},
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)e($)"), L"$1$2" }, // special but general rules
// common suffixes that hide a silent e {wregex(L"î(nd$)"), L"ï$1"}, {wregex(L"ô(s{2}$)"), L"ò$1"}, {wregex(L"ô(g$)"), L"ò$1"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(mênt$)"), L"$1$2" }, {wregex(L"ô(f[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ò$1"}, {wregex(L"ô(l[td+])"), L"ö$1"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(nês{2}$)"), L"$1$2" }, {wregex(L"(w)â(\\$)"), L"$1ò$2"}, {wregex(L"(w)â((?:t)?ç)"), L"$1ò$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(li$)"), L"$1$2" }, {wregex(L"(w)â([tdns+])"), L"$1ô$2"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(fûl$)"), L"$1$2" }, // soft gn
// another common suffix {wregex(L"îg([mnñ]$)"), L"ï$1"}, {wregex(L"îg([mnñ][bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ï$1"},
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ï(nês{2}$)"), L"$1ë$2" }, {wregex(L"(ei)g(n)"), L"$1$2"},
// shorten (1-char) weak penults after a long // handle ous before removing -e
// note: this error breaks almost as many words as it fixes... {wregex(L"ou(s$)"), L"@$1"}, {wregex(L"ou(s[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"@$1"},
{ wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1â$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ê$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1î$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ô$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1û$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1â$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ê$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1î$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ô$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1û$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1â$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ê$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1î$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1ô$2" }, { wregex(L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"), L"$1û$2" }, // remove silent -e
// double vowels {wregex(
{ wregex(L"eau"), L"ö" }, L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)e($)"
{ wregex(L"ai"), L"ä" }, ),
{ wregex(L"au"), L"ò" }, L"$1$2"},
{ wregex(L"âw"), L"ò" }, // common suffixes that hide a silent e
{ wregex(L"e{2}"), L"ë" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(mênt$)"), L"$1$2"},
{ wregex(L"ea"), L"ë" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(nês{2}$)"), L"$1$2"},
{ wregex(L"(s)ei"), L"$1ë" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(li$)"), L"$1$2"},
{ wregex(L"ei"), L"ä" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ë(fûl$)"), L"$1$2"},
{ wregex(L"eo"), L"ë@" }, // another common suffix
{ wregex(L"êw"), L"ü" }, {wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})ï(nês{2}$)"), L"$1ë$2"},
{ wregex(L"eu"), L"ü" }, // shorten (1-char) weak penults after a long
{ wregex(L"ie"), L"ë" }, // note: this error breaks almost as many words as it fixes...
{ wregex(L"(i)[aeiouäëïöüâêîôûùò@]"), L"$1@" }, {wregex(
{ wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)i"), L"$1ï" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"i(@)"), L"ë$1" }, ),
{ wregex(L"oa"), L"ö" }, L"$1â$2"},
{ wregex(L"oe($)"), L"ö$1" }, {wregex(
{ wregex(L"o{2}(k)"), L"ù$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"o{2}"), L"u" }, ),
{ wregex(L"oul(d$)"), L"ù$1" }, L"$1ê$2"},
{ wregex(L"ou"), L"ôw" }, {wregex(
{ wregex(L"oi"), L"öy" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"ua"), L"ü@" }, ),
{ wregex(L"ue"), L"u" }, L"$1î$2"},
{ wregex(L"ui"), L"u" }, {wregex(
{ wregex(L"ôw($)"), L"ö$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
// those pesky final syllabics ),
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[aeiouäëïöüâêîôûùò@])?)[aeiouäëïöüâêîôûùò@](l$)"), L"$1@$2" }, L"$1ô$2"},
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ê(n$)"), L"$1@$2" }, {wregex(
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)î(n$)"), L"$1@$2" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)â(n$)"), L"$1@$2" }, ),
{ wregex(L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ô(n$)"), L"$1@$2" }, L"$1û$2"},
// suffix simplifications {wregex(
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})[aâä](b@l$)"), L"$1@$2" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]l)ë(@n$)"), L"$1y$2" }, ),
{ wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]n)ë(@n$)"), L"$1y$2" }, L"$1â$2"},
// unpronounceable finals {wregex(
{ wregex(L"(m)b($)"), L"$1$2" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"(m)n($)"), L"$1$2" }, ),
// color the final vowels L"$1ê$2"},
{ wregex(L"a($)"), L"@$1" }, {wregex(
{ wregex(L"e($)"), L"ë$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"i($)"), L"ë$1" }, ),
{ wregex(L"o($)"), L"ö$1" }, L"$1î$2"},
// vowels before r V=aeiouäëïöüâêîôûùò@ {wregex(
{ wregex(L"ôw(r[bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])"), L"ö$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"ô(r)"), L"ö$1" }, ),
{ wregex(L"ò(r)"), L"ö$1" }, L"$1ô$2"},
{ wregex(L"(w)â(r[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"$1ö$2" }, {wregex(
{ wregex(L"(w)â(r$)"), L"$1ö$2" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"ê(r{2})"), L"ä$1" }, ),
{ wregex(L"ë(r[iîï][bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ä$1" }, L"$1û$2"},
{ wregex(L"â(r{2})"), L"ä$1" }, {wregex(
{ wregex(L"â(r[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ô$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ä([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"â(r$)"), L"ô$1" }, ),
{ wregex(L"â(r)"), L"ä$1" }, L"$1â$2"},
{ wregex(L"ê(r)"), L"@$1" }, {wregex(
{ wregex(L"î(r)"), L"@$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ë([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"û(r)"), L"@$1" }, ),
{ wregex(L"ù(r)"), L"@$1" }, L"$1ê$2"},
// handle ng {wregex(
{ wregex(L"ng([fs$+])"), L"ñ$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ï([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"ng([bdg])"), L"ñ$1" }, ),
{ wregex(L"ng([ptk])"), L"ñ$1" }, L"$1î$2"},
{ wregex(L"ng($)"), L"ñ$1" }, {wregex(
{ wregex(L"n(g)"), L"ñ$1" }, L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ö([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"n(k)"), L"ñ$1" }, ),
{ wregex(L"ô(ñ)"), L"ò$1" }, L"$1ô$2"},
{ wregex(L"â(ñ)"), L"ä$1" }, {wregex(
// really a morphophonological rule, but it's cute L"([äëïöüäëïöüäëïöüùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?(?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ü([bcdfghjklmnpqrstvwxyzç+$ñ][aeiouäëïöüâêîôûùò@]$)"
{ wregex(L"([bdg])s($)"), L"$1z$2" }, ),
{ wregex(L"s(m$)"), L"z$1" }, L"$1û$2"},
// double consonants // double vowels
{ wregex(L"s(s)"), L"$1" }, {wregex(L"eau"), L"ö"}, {wregex(L"ai"), L"ä"}, {wregex(L"au"), L"ò"}, {wregex(L"âw"), L"ò"},
{ wregex(L"s(\\$)"), L"$1" }, {wregex(L"e{2}"), L"ë"}, {wregex(L"ea"), L"ë"}, {wregex(L"(s)ei"), L"$1ë"},
{ wregex(L"t(t)"), L"$1" }, {wregex(L"ei"), L"ä"}, {wregex(L"eo"), L"ë@"}, {wregex(L"êw"), L"ü"}, {wregex(L"eu"), L"ü"},
{ wregex(L"t(ç)"), L"$1" }, {wregex(L"ie"), L"ë"}, {wregex(L"(i)[aeiouäëïöüâêîôûùò@]"), L"$1@"},
{ wregex(L"p(p)"), L"$1" }, {wregex(L"(^[bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)i"), L"$1ï"},
{ wregex(L"k(k)"), L"$1" }, {wregex(L"i(@)"), L"ë$1"}, {wregex(L"oa"), L"ö"}, {wregex(L"oe($)"), L"ö$1"},
{ wregex(L"b(b)"), L"$1" }, {wregex(L"o{2}(k)"), L"ù$1"}, {wregex(L"o{2}"), L"u"}, {wregex(L"oul(d$)"), L"ù$1"},
{ wregex(L"d(d)"), L"$1" }, {wregex(L"ou"), L"ôw"}, {wregex(L"oi"), L"öy"}, {wregex(L"ua"), L"ü@"}, {wregex(L"ue"), L"u"},
{ wregex(L"d(j)"), L"$1" }, {wregex(L"ui"), L"u"}, {wregex(L"ôw($)"), L"ö$1"},
{ wregex(L"g(g)"), L"$1" }, // those pesky final syllabics
{ wregex(L"n(n)"), L"$1" }, {wregex(
{ wregex(L"m(m)"), L"$1" }, L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[aeiouäëïöüâêîôûùò@])?)[aeiouäëïöüâêîôûùò@](l$)"
{ wregex(L"r(r)"), L"$1" }, ),
{ wregex(L"l(l)"), L"$1" }, L"$1@$2"},
{ wregex(L"f(f)"), L"$1" }, {wregex(
{ wregex(L"z(z)"), L"$1" }, L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ê(n$)"
// There are a number of cases not covered by these rules. ),
// Let's add some reasonable fallback rules. L"$1@$2"},
{ wregex(L"a"), L"â" }, {wregex(
{ wregex(L"e"), L"@" }, L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)î(n$)"
{ wregex(L"i"), L"ë" }, ),
{ wregex(L"o"), L"ö" }, L"$1@$2"},
{ wregex(L"q"), L"k" }, {wregex(
L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)â(n$)"
),
L"$1@$2"},
{wregex(
L"([aeiouäëïöüâêîôûùò@][bcdfghjklmnpqrstvwxyzç+$ñ](?:[bcdfghjklmnpqrstvwxyzç+$ñ])?)ô(n$)"
),
L"$1@$2"},
// suffix simplifications
{wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]{3})[aâä](b@l$)"), L"$1@$2"},
{wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]l)ë(@n$)"), L"$1y$2"},
{wregex(L"([bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@]n)ë(@n$)"), L"$1y$2"},
// unpronounceable finals
{wregex(L"(m)b($)"), L"$1$2"}, {wregex(L"(m)n($)"), L"$1$2"},
// color the final vowels
{wregex(L"a($)"), L"@$1"}, {wregex(L"e($)"), L"ë$1"}, {wregex(L"i($)"), L"ë$1"},
{wregex(L"o($)"), L"ö$1"},
// vowels before r V=aeiouäëïöüâêîôûùò@
{wregex(L"ôw(r[bcdfghjklmnpqrstvwxyzç+$ñaeiouäëïöüâêîôûùò@])"), L"ö$1"},
{wregex(L"ô(r)"), L"ö$1"}, {wregex(L"ò(r)"), L"ö$1"},
{wregex(L"(w)â(r[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"$1ö$2"}, {wregex(L"(w)â(r$)"), L"$1ö$2"},
{wregex(L"ê(r{2})"), L"ä$1"}, {wregex(L"ë(r[iîï][bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ä$1"},
{wregex(L"â(r{2})"), L"ä$1"}, {wregex(L"â(r[bcdfghjklmnpqrstvwxyzç+$ñ])"), L"ô$1"},
{wregex(L"â(r$)"), L"ô$1"}, {wregex(L"â(r)"), L"ä$1"}, {wregex(L"ê(r)"), L"@$1"},
{wregex(L"î(r)"), L"@$1"}, {wregex(L"û(r)"), L"@$1"}, {wregex(L"ù(r)"), L"@$1"},
// handle ng
{wregex(L"ng([fs$+])"), L"ñ$1"}, {wregex(L"ng([bdg])"), L"ñ$1"}, {wregex(L"ng([ptk])"), L"ñ$1"},
{wregex(L"ng($)"), L"ñ$1"}, {wregex(L"n(g)"), L"ñ$1"}, {wregex(L"n(k)"), L"ñ$1"},
{wregex(L"ô(ñ)"), L"ò$1"}, {wregex(L"â(ñ)"), L"ä$1"},
// really a morphophonological rule, but it's cute
{wregex(L"([bdg])s($)"), L"$1z$2"}, {wregex(L"s(m$)"), L"z$1"},
// double consonants
{wregex(L"s(s)"), L"$1"}, {wregex(L"s(\\$)"), L"$1"}, {wregex(L"t(t)"), L"$1"},
{wregex(L"t(ç)"), L"$1"}, {wregex(L"p(p)"), L"$1"}, {wregex(L"k(k)"), L"$1"},
{wregex(L"b(b)"), L"$1"}, {wregex(L"d(d)"), L"$1"}, {wregex(L"d(j)"), L"$1"},
{wregex(L"g(g)"), L"$1"}, {wregex(L"n(n)"), L"$1"}, {wregex(L"m(m)"), L"$1"},
{wregex(L"r(r)"), L"$1"}, {wregex(L"l(l)"), L"$1"}, {wregex(L"f(f)"), L"$1"},
{wregex(L"z(z)"), L"$1"},
// There are a number of cases not covered by these rules.
// Let's add some reasonable fallback rules.
{wregex(L"a"), L"â"}, {wregex(L"e"), L"@"}, {wregex(L"i"), L"ë"}, {wregex(L"o"), L"ö"},
{wregex(L"q"), L"k"},

View File

@ -1,22 +1,25 @@
#include "languageModels.h" #include "languageModels.h"
#include <boost/range/adaptor/map.hpp>
#include <vector>
#include <regex>
#include <map>
#include <tuple>
#include "tools/platformTools.h"
#include <fstream>
#include "core/appInfo.h"
#include <cmath>
#include <gsl_util.h> #include <gsl_util.h>
using std::string; #include <boost/range/adaptor/map.hpp>
using std::vector; #include <cmath>
using std::regex; #include <fstream>
using std::map; #include <map>
using std::tuple; #include <regex>
using std::get; #include <tuple>
#include <vector>
#include "core/appInfo.h"
#include "tools/platformTools.h"
using std::endl; using std::endl;
using std::get;
using std::map;
using std::regex;
using std::string;
using std::tuple;
using std::vector;
using std::filesystem::path; using std::filesystem::path;
using Unigram = string; using Unigram = string;
@ -50,9 +53,7 @@ map<Trigram, int> getTrigramCounts(const vector<string>& words) {
} }
map<Unigram, double> getUnigramProbabilities( map<Unigram, double> getUnigramProbabilities(
const vector<string>& words, const vector<string>& words, const map<Unigram, int>& unigramCounts, const double deflator
const map<Unigram, int>& unigramCounts,
const double deflator
) { ) {
map<Unigram, double> unigramProbabilities; map<Unigram, double> unigramProbabilities;
for (const auto& pair : unigramCounts) { for (const auto& pair : unigramCounts) {
@ -97,8 +98,8 @@ map<Unigram, double> getUnigramBackoffWeights(
const map<Unigram, int>& unigramCounts, const map<Unigram, int>& unigramCounts,
const map<Unigram, double>& unigramProbabilities, const map<Unigram, double>& unigramProbabilities,
const map<Bigram, int>& bigramCounts, const map<Bigram, int>& bigramCounts,
const double discountMass) const double discountMass
{ ) {
map<Unigram, double> unigramBackoffWeights; map<Unigram, double> unigramBackoffWeights;
for (const Unigram& unigram : unigramCounts | boost::adaptors::map_keys) { for (const Unigram& unigram : unigramCounts | boost::adaptors::map_keys) {
double denominator = 1; double denominator = 1;
@ -116,8 +117,8 @@ map<Bigram, double> getBigramBackoffWeights(
const map<Bigram, int>& bigramCounts, const map<Bigram, int>& bigramCounts,
const map<Bigram, double>& bigramProbabilities, const map<Bigram, double>& bigramProbabilities,
const map<Trigram, int>& trigramCounts, const map<Trigram, int>& trigramCounts,
const double discountMass) const double discountMass
{ ) {
map<Bigram, double> bigramBackoffWeights; map<Bigram, double> bigramBackoffWeights;
for (const Bigram& bigram : bigramCounts | boost::adaptors::map_keys) { for (const Bigram& bigram : bigramCounts | boost::adaptors::map_keys) {
double denominator = 1; double denominator = 1;
@ -163,24 +164,22 @@ void createLanguageModelFile(const vector<string>& words, const path& filePath)
file.precision(4); file.precision(4);
file << "\\1-grams:" << endl; file << "\\1-grams:" << endl;
for (const Unigram& unigram : unigramCounts | boost::adaptors::map_keys) { for (const Unigram& unigram : unigramCounts | boost::adaptors::map_keys) {
file << log10(unigramProbabilities.at(unigram)) file << log10(unigramProbabilities.at(unigram)) << " " << unigram << " "
<< " " << unigram << log10(unigramBackoffWeights.at(unigram)) << endl;
<< " " << log10(unigramBackoffWeights.at(unigram)) << endl;
} }
file << endl; file << endl;
file << "\\2-grams:" << endl; file << "\\2-grams:" << endl;
for (const Bigram& bigram : bigramCounts | boost::adaptors::map_keys) { for (const Bigram& bigram : bigramCounts | boost::adaptors::map_keys) {
file << log10(bigramProbabilities.at(bigram)) file << log10(bigramProbabilities.at(bigram)) << " " << get<0>(bigram) << " "
<< " " << get<0>(bigram) << " " << get<1>(bigram) << get<1>(bigram) << " " << log10(bigramBackoffWeights.at(bigram)) << endl;
<< " " << log10(bigramBackoffWeights.at(bigram)) << endl;
} }
file << endl; file << endl;
file << "\\3-grams:" << endl; file << "\\3-grams:" << endl;
for (const Trigram& trigram : trigramCounts | boost::adaptors::map_keys) { for (const Trigram& trigram : trigramCounts | boost::adaptors::map_keys) {
file << log10(trigramProbabilities.at(trigram)) file << log10(trigramProbabilities.at(trigram)) << " " << get<0>(trigram) << " "
<< " " << get<0>(trigram) << " " << get<1>(trigram) << " " << get<2>(trigram) << endl; << get<1>(trigram) << " " << get<2>(trigram) << endl;
} }
file << endl; file << endl;
@ -188,14 +187,16 @@ void createLanguageModelFile(const vector<string>& words, const path& filePath)
} }
lambda_unique_ptr<ngram_model_t> createLanguageModel( lambda_unique_ptr<ngram_model_t> createLanguageModel(
const vector<string>& words, const vector<string>& words, ps_decoder_t& decoder
ps_decoder_t& decoder
) { ) {
path tempFilePath = getTempFilePath(); path tempFilePath = getTempFilePath();
createLanguageModelFile(words, tempFilePath); createLanguageModelFile(words, tempFilePath);
auto deleteTempFile = gsl::finally([&]() { std::filesystem::remove(tempFilePath); }); auto deleteTempFile = gsl::finally([&]() { std::filesystem::remove(tempFilePath); });
return lambda_unique_ptr<ngram_model_t>( return lambda_unique_ptr<ngram_model_t>(
ngram_model_read(decoder.config, tempFilePath.u8string().c_str(), NGRAM_ARPA, decoder.lmath), ngram_model_read(
[](ngram_model_t* lm) { ngram_model_free(lm); }); decoder.config, tempFilePath.u8string().c_str(), NGRAM_ARPA, decoder.lmath
),
[](ngram_model_t* lm) { ngram_model_free(lm); }
);
} }

View File

@ -1,14 +1,14 @@
#pragma once #pragma once
#include <vector> #include <vector>
#include "tools/tools.h" #include "tools/tools.h"
extern "C" { extern "C" {
#include <pocketsphinx.h>
#include <ngram_search.h> #include <ngram_search.h>
#include <pocketsphinx.h>
} }
lambda_unique_ptr<ngram_model_t> createLanguageModel( lambda_unique_ptr<ngram_model_t> createLanguageModel(
const std::vector<std::string>& words, const std::vector<std::string>& words, ps_decoder_t& decoder
ps_decoder_t& decoder
); );

View File

@ -1,43 +1,39 @@
#include "pocketSphinxTools.h" #include "pocketSphinxTools.h"
#include "tools/platformTools.h"
#include <regex> #include <regex>
#include "audio/DcOffset.h" #include "audio/DcOffset.h"
#include "audio/voiceActivityDetection.h" #include "audio/voiceActivityDetection.h"
#include "tools/parallel.h"
#include "tools/ObjectPool.h"
#include "time/timedLogging.h" #include "time/timedLogging.h"
#include "tools/ObjectPool.h"
#include "tools/parallel.h"
#include "tools/platformTools.h"
extern "C" { extern "C" {
#include <sphinxbase/err.h>
#include <pocketsphinx_internal.h>
#include <ngram_search.h> #include <ngram_search.h>
#include <pocketsphinx_internal.h>
#include <sphinxbase/err.h>
} }
using std::runtime_error;
using std::invalid_argument;
using std::unique_ptr;
using std::string;
using std::vector;
using std::filesystem::path;
using std::regex;
using boost::optional; using boost::optional;
using std::invalid_argument;
using std::regex;
using std::runtime_error;
using std::string;
using std::unique_ptr;
using std::vector;
using std::chrono::duration_cast; using std::chrono::duration_cast;
using std::filesystem::path;
logging::Level convertSphinxErrorLevel(err_lvl_t errorLevel) { logging::Level convertSphinxErrorLevel(err_lvl_t errorLevel) {
switch (errorLevel) { switch (errorLevel) {
case ERR_DEBUG: case ERR_DEBUG:
case ERR_INFO: case ERR_INFO:
case ERR_INFOCONT: case ERR_INFOCONT: return logging::Level::Trace;
return logging::Level::Trace; case ERR_WARN: return logging::Level::Warn;
case ERR_WARN: case ERR_ERROR: return logging::Level::Error;
return logging::Level::Warn; case ERR_FATAL: return logging::Level::Fatal;
case ERR_ERROR: default: throw invalid_argument("Unknown log level.");
return logging::Level::Error;
case ERR_FATAL:
return logging::Level::Fatal;
default:
throw invalid_argument("Unknown log level.");
} }
} }
@ -110,19 +106,18 @@ BoundedTimeline<Phone> recognizePhones(
redirectPocketSphinxOutput(); redirectPocketSphinxOutput();
// Prepare pool of decoders // Prepare pool of decoders
ObjectPool<ps_decoder_t, lambda_unique_ptr<ps_decoder_t>> decoderPool( ObjectPool<ps_decoder_t, lambda_unique_ptr<ps_decoder_t>> decoderPool([&] {
[&] { return createDecoder(dialog); }); return createDecoder(dialog);
});
BoundedTimeline<Phone> phones(audioClip->getTruncatedRange()); BoundedTimeline<Phone> phones(audioClip->getTruncatedRange());
std::mutex resultMutex; std::mutex resultMutex;
const auto processUtterance = [&](Timed<void> timedUtterance, ProgressSink& utteranceProgressSink) { const auto processUtterance = [&](Timed<void> timedUtterance,
ProgressSink& utteranceProgressSink) {
// Detect phones for utterance // Detect phones for utterance
const auto decoder = decoderPool.acquire(); const auto decoder = decoderPool.acquire();
Timeline<Phone> utterancePhones = utteranceToPhones( Timeline<Phone> utterancePhones = utteranceToPhones(
*audioClip, *audioClip, timedUtterance.getTimeRange(), *decoder, utteranceProgressSink
timedUtterance.getTimeRange(),
*decoder,
utteranceProgressSink
); );
// Copy phones to result timeline // Copy phones to result timeline
@ -139,15 +134,18 @@ BoundedTimeline<Phone> recognizePhones(
// Perform speech recognition // Perform speech recognition
try { try {
// Determine how many parallel threads to use // Determine how many parallel threads to use
int threadCount = std::min({ int threadCount = std::min(
maxThreadCount, {maxThreadCount,
// Don't use more threads than there are utterances to be processed // Don't use more threads than there are utterances to be processed
static_cast<int>(utterances.size()), static_cast<int>(utterances.size()),
// Don't waste time creating additional threads (and decoders!) if the recording is short // Don't waste time creating additional threads (and decoders!) if the recording is
static_cast<int>( // short
duration_cast<std::chrono::seconds>(audioClip->getTruncatedRange().getDuration()).count() / 5 static_cast<int>(
) duration_cast<std::chrono::seconds>(audioClip->getTruncatedRange().getDuration())
}); .count()
/ 5
)}
);
if (threadCount < 1) { if (threadCount < 1) {
threadCount = 1; threadCount = 1;
} }
@ -162,7 +160,9 @@ BoundedTimeline<Phone> recognizePhones(
); );
logging::debug("Speech recognition -- end"); logging::debug("Speech recognition -- end");
} catch (...) { } catch (...) {
std::throw_with_nested(runtime_error("Error performing speech recognition via PocketSphinx tools.")); std::throw_with_nested(
runtime_error("Error performing speech recognition via PocketSphinx tools.")
);
} }
return phones; return phones;
@ -206,8 +206,9 @@ BoundedTimeline<string> recognizeWords(const vector<int16_t>& audioBuffer, ps_de
// Process entire audio clip // Process entire audio clip
const bool noRecognition = false; const bool noRecognition = false;
const bool fullUtterance = true; const bool fullUtterance = true;
const int searchedFrameCount = const int searchedFrameCount = ps_process_raw(
ps_process_raw(&decoder, audioBuffer.data(), audioBuffer.size(), noRecognition, fullUtterance); &decoder, audioBuffer.data(), audioBuffer.size(), noRecognition, fullUtterance
);
if (searchedFrameCount < 0) { if (searchedFrameCount < 0) {
throw runtime_error("Error analyzing raw audio data for word recognition."); throw runtime_error("Error analyzing raw audio data for word recognition.");
} }
@ -227,7 +228,8 @@ BoundedTimeline<string> recognizeWords(const vector<int16_t>& audioBuffer, ps_de
// Not every utterance does contain speech, however. In this case, we exit early to prevent // Not every utterance does contain speech, however. In this case, we exit early to prevent
// the log output. // the log output.
// We *don't* to that in phonetic mode because here, the same code would omit valid phones. // We *don't* to that in phonetic mode because here, the same code would omit valid phones.
const bool noWordsRecognized = reinterpret_cast<ngram_search_t*>(decoder.search)->bpidx == 0; const bool noWordsRecognized =
reinterpret_cast<ngram_search_t*>(decoder.search)->bpidx == 0;
if (noWordsRecognized) { if (noWordsRecognized) {
return result; return result;
} }

View File

@ -1,25 +1,26 @@
#pragma once #pragma once
#include "time/BoundedTimeline.h"
#include "core/Phone.h"
#include "audio/AudioClip.h"
#include "tools/progress.h"
#include <filesystem> #include <filesystem>
#include "audio/AudioClip.h"
#include "core/Phone.h"
#include "time/BoundedTimeline.h"
#include "tools/progress.h"
extern "C" { extern "C" {
#include <pocketsphinx.h> #include <pocketsphinx.h>
} }
typedef std::function<lambda_unique_ptr<ps_decoder_t>( typedef std::function<lambda_unique_ptr<ps_decoder_t>(boost::optional<std::string> dialog)>
boost::optional<std::string> dialog decoderFactory;
)> decoderFactory;
typedef std::function<Timeline<Phone>( typedef std::function<Timeline<Phone>(
const AudioClip& audioClip, const AudioClip& audioClip,
TimeRange utteranceTimeRange, TimeRange utteranceTimeRange,
ps_decoder_t& decoder, ps_decoder_t& decoder,
ProgressSink& utteranceProgressSink ProgressSink& utteranceProgressSink
)> utteranceToPhonesFunction; )>
utteranceToPhonesFunction;
BoundedTimeline<Phone> recognizePhones( BoundedTimeline<Phone> recognizePhones(
const AudioClip& inputAudioClip, const AudioClip& inputAudioClip,
@ -37,6 +38,5 @@ const std::filesystem::path& getSphinxModelDirectory();
JoiningTimeline<void> getNoiseSounds(TimeRange utteranceTimeRange, const Timeline<Phone>& phones); JoiningTimeline<void> getNoiseSounds(TimeRange utteranceTimeRange, const Timeline<Phone>& phones);
BoundedTimeline<std::string> recognizeWords( BoundedTimeline<std::string> recognizeWords(
const std::vector<int16_t>& audioBuffer, const std::vector<int16_t>& audioBuffer, ps_decoder_t& decoder
ps_decoder_t& decoder
); );

View File

@ -1,22 +1,24 @@
#include "tokenization.h" #include "tokenization.h"
#include "tools/tools.h"
#include "tools/stringTools.h"
#include <regex>
#include <boost/optional/optional.hpp> #include <boost/optional/optional.hpp>
#include <regex>
#include "tools/stringTools.h"
#include "tools/tools.h"
extern "C" { extern "C" {
#include <cst_utt_utils.h> #include <cst_utt_utils.h>
#include <lang/usenglish/usenglish.h>
#include <lang/cmulex/cmu_lex.h> #include <lang/cmulex/cmu_lex.h>
#include <lang/usenglish/usenglish.h>
} }
using boost::optional;
using std::function;
using std::pair;
using std::regex;
using std::runtime_error; using std::runtime_error;
using std::string; using std::string;
using std::vector; using std::vector;
using std::regex;
using std::pair;
using boost::optional;
using std::function;
lambda_unique_ptr<cst_voice> createDummyVoice() { lambda_unique_ptr<cst_voice> createDummyVoice() {
lambda_unique_ptr<cst_voice> voice(new_voice(), [](cst_voice* voice) { delete_voice(voice); }); lambda_unique_ptr<cst_voice> voice(new_voice(), [](cst_voice* voice) { delete_voice(voice); });
@ -28,9 +30,9 @@ lambda_unique_ptr<cst_voice> createDummyVoice() {
} }
static const cst_synth_module synth_method_normalize[] = { static const cst_synth_module synth_method_normalize[] = {
{ "tokenizer_func", default_tokenization }, // split text into tokens {"tokenizer_func", default_tokenization}, // split text into tokens
{ "textanalysis_func", default_textanalysis }, // transform tokens into words {"textanalysis_func", default_textanalysis}, // transform tokens into words
{ nullptr, nullptr } {nullptr, nullptr}
}; };
vector<string> tokenizeViaFlite(const string& text) { vector<string> tokenizeViaFlite(const string& text) {
@ -38,10 +40,9 @@ vector<string> tokenizeViaFlite(const string& text) {
const string asciiText = utf8ToAscii(text); const string asciiText = utf8ToAscii(text);
// Create utterance object with text // Create utterance object with text
lambda_unique_ptr<cst_utterance> utterance( lambda_unique_ptr<cst_utterance> utterance(new_utterance(), [](cst_utterance* utterance) {
new_utterance(), delete_utterance(utterance);
[](cst_utterance* utterance) { delete_utterance(utterance); } });
);
utt_set_input_text(utterance.get(), asciiText.c_str()); utt_set_input_text(utterance.get(), asciiText.c_str());
lambda_unique_ptr<cst_voice> voice = createDummyVoice(); lambda_unique_ptr<cst_voice> voice = createDummyVoice();
utt_init(utterance.get(), voice.get()); utt_init(utterance.get(), voice.get());
@ -52,11 +53,8 @@ vector<string> tokenizeViaFlite(const string& text) {
} }
vector<string> result; vector<string> result;
for ( for (cst_item* item = relation_head(utt_relation(utterance.get(), "Word")); item;
cst_item* item = relation_head(utt_relation(utterance.get(), "Word")); item = item_next(item)) {
item;
item = item_next(item)
) {
const char* word = item_feat_string(item, "name"); const char* word = item_feat_string(item, "name");
result.emplace_back(word); result.emplace_back(word);
} }
@ -64,11 +62,11 @@ vector<string> tokenizeViaFlite(const string& text) {
} }
optional<string> findSimilarDictionaryWord( optional<string> findSimilarDictionaryWord(
const string& word, const string& word, const function<bool(const string&)>& dictionaryContains
const function<bool(const string&)>& dictionaryContains
) { ) {
for (bool addPeriod : { false, true }) { for (bool addPeriod : {false, true}) {
for (int apostropheIndex = -1; apostropheIndex <= static_cast<int>(word.size()); ++apostropheIndex) { for (int apostropheIndex = -1; apostropheIndex <= static_cast<int>(word.size());
++apostropheIndex) {
string modified = word; string modified = word;
if (apostropheIndex != -1) { if (apostropheIndex != -1) {
modified.insert(apostropheIndex, "'"); modified.insert(apostropheIndex, "'");
@ -87,8 +85,7 @@ optional<string> findSimilarDictionaryWord(
} }
vector<string> tokenizeText( vector<string> tokenizeText(
const string& text, const string& text, const function<bool(const string&)>& dictionaryContains
const function<bool(const string&)>& dictionaryContains
) { ) {
vector<string> words = tokenizeViaFlite(text); vector<string> words = tokenizeViaFlite(text);
@ -101,13 +98,13 @@ vector<string> tokenizeText(
} }
// Turn some symbols into words, remove the rest // Turn some symbols into words, remove the rest
const static vector<pair<regex, string>> replacements { const static vector<pair<regex, string>> replacements{
{ regex("&"), "and" }, {regex("&"), "and"},
{ regex("\\*"), "times" }, {regex("\\*"), "times"},
{ regex("\\+"), "plus" }, {regex("\\+"), "plus"},
{ regex("="), "equals" }, {regex("="), "equals"},
{ regex("@"), "at" }, {regex("@"), "at"},
{ regex("[^a-z']"), "" } {regex("[^a-z']"), ""}
}; };
for (auto& word : words) { for (auto& word : words) {
for (const auto& replacement : replacements) { for (const auto& replacement : replacements) {

View File

@ -1,10 +1,9 @@
#pragma once #pragma once
#include <vector>
#include <functional> #include <functional>
#include <string> #include <string>
#include <vector>
std::vector<std::string> tokenizeText( std::vector<std::string> tokenizeText(
const std::string& text, const std::string& text, const std::function<bool(const std::string&)>& dictionaryContains
const std::function<bool(const std::string&)>& dictionaryContains
); );

View File

@ -12,11 +12,11 @@ string ExportFormatConverter::getTypeName() {
} }
EnumConverter<ExportFormat>::member_data ExportFormatConverter::getMemberData() { EnumConverter<ExportFormat>::member_data ExportFormatConverter::getMemberData() {
return member_data { return member_data{
{ ExportFormat::Dat, "dat" }, {ExportFormat::Dat, "dat"},
{ ExportFormat::Tsv, "tsv" }, {ExportFormat::Tsv, "tsv"},
{ ExportFormat::Xml, "xml" }, {ExportFormat::Xml, "xml"},
{ ExportFormat::Json, "json" } {ExportFormat::Json, "json"}
}; };
} }

View File

@ -2,16 +2,12 @@
#include "tools/EnumConverter.h" #include "tools/EnumConverter.h"
enum class ExportFormat { enum class ExportFormat { Dat, Tsv, Xml, Json };
Dat,
Tsv,
Xml,
Json
};
class ExportFormatConverter : public EnumConverter<ExportFormat> { class ExportFormatConverter : public EnumConverter<ExportFormat> {
public: public:
static ExportFormatConverter& get(); static ExportFormatConverter& get();
protected: protected:
std::string getTypeName() override; std::string getTypeName() override;
member_data getMemberData() override; member_data getMemberData() override;

View File

@ -12,9 +12,8 @@ string RecognizerTypeConverter::getTypeName() {
} }
EnumConverter<RecognizerType>::member_data RecognizerTypeConverter::getMemberData() { EnumConverter<RecognizerType>::member_data RecognizerTypeConverter::getMemberData() {
return member_data { return member_data{
{ RecognizerType::PocketSphinx, "pocketSphinx" }, {RecognizerType::PocketSphinx, "pocketSphinx"}, {RecognizerType::Phonetic, "phonetic"}
{ RecognizerType::Phonetic, "phonetic" }
}; };
} }

View File

@ -2,14 +2,12 @@
#include "tools/EnumConverter.h" #include "tools/EnumConverter.h"
enum class RecognizerType { enum class RecognizerType { PocketSphinx, Phonetic };
PocketSphinx,
Phonetic
};
class RecognizerTypeConverter : public EnumConverter<RecognizerType> { class RecognizerTypeConverter : public EnumConverter<RecognizerType> {
public: public:
static RecognizerTypeConverter& get(); static RecognizerTypeConverter& get();
protected: protected:
std::string getTypeName() override; std::string getTypeName() override;
member_data getMemberData() override; member_data getMemberData() override;

View File

@ -1,67 +1,68 @@
#include <iostream>
#include <format.h> #include <format.h>
#include <tclap/CmdLine.h>
#include "core/appInfo.h"
#include "tools/NiceCmdLineOutput.h"
#include "logging/logging.h"
#include "logging/sinks.h"
#include "logging/formatters.h"
#include <gsl_util.h> #include <gsl_util.h>
#include "exporters/Exporter.h" #include <tclap/CmdLine.h>
#include "time/ContinuousTimeline.h"
#include "tools/stringTools.h"
#include <boost/range/adaptor/transformed.hpp> #include <boost/range/adaptor/transformed.hpp>
#include <boost/utility/in_place_factory.hpp>
#include <fstream> #include <fstream>
#include "tools/parallel.h" #include <iostream>
#include "tools/exceptions.h"
#include "tools/textFiles.h" #include "animation/targetShapeSet.h"
#include "lib/rhubarbLib.h" #include "core/appInfo.h"
#include "ExportFormat.h"
#include "exporters/DatExporter.h" #include "exporters/DatExporter.h"
#include "exporters/Exporter.h"
#include "exporters/JsonExporter.h"
#include "exporters/TsvExporter.h" #include "exporters/TsvExporter.h"
#include "exporters/XmlExporter.h" #include "exporters/XmlExporter.h"
#include "exporters/JsonExporter.h" #include "ExportFormat.h"
#include "animation/targetShapeSet.h" #include "lib/rhubarbLib.h"
#include <boost/utility/in_place_factory.hpp> #include "logging/formatters.h"
#include "tools/platformTools.h" #include "logging/logging.h"
#include "sinks.h" #include "logging/sinks.h"
#include "semanticEntries.h"
#include "RecognizerType.h"
#include "recognition/PocketSphinxRecognizer.h"
#include "recognition/PhoneticRecognizer.h" #include "recognition/PhoneticRecognizer.h"
#include "recognition/PocketSphinxRecognizer.h"
#include "RecognizerType.h"
#include "semanticEntries.h"
#include "sinks.h"
#include "time/ContinuousTimeline.h"
#include "tools/exceptions.h"
#include "tools/NiceCmdLineOutput.h"
#include "tools/parallel.h"
#include "tools/platformTools.h"
#include "tools/stringTools.h"
#include "tools/textFiles.h"
using boost::optional;
using boost::adaptors::transformed;
using std::exception; using std::exception;
using std::string; using std::make_shared;
using std::string;
using std::vector;
using std::unique_ptr;
using std::make_unique; using std::make_unique;
using std::shared_ptr; using std::shared_ptr;
using std::make_shared; using std::string;
using std::unique_ptr;
using std::vector;
using std::filesystem::path; using std::filesystem::path;
using std::filesystem::u8path; using std::filesystem::u8path;
using boost::adaptors::transformed;
using boost::optional;
namespace tclap = TCLAP; namespace tclap = TCLAP;
// Tell TCLAP how to handle our types // Tell TCLAP how to handle our types
namespace TCLAP { namespace TCLAP {
template<> template <>
struct ArgTraits<logging::Level> { struct ArgTraits<logging::Level> {
typedef ValueLike ValueCategory; typedef ValueLike ValueCategory;
}; };
template<> template <>
struct ArgTraits<ExportFormat> { struct ArgTraits<ExportFormat> {
typedef ValueLike ValueCategory; typedef ValueLike ValueCategory;
}; };
template<> template <>
struct ArgTraits<RecognizerType> { struct ArgTraits<RecognizerType> {
typedef ValueLike ValueCategory; typedef ValueLike ValueCategory;
}; };
} } // namespace TCLAP
shared_ptr<logging::Sink> createFileSink(const path& path, logging::Level minLevel) { shared_ptr<logging::Sink> createFileSink(const path& path, logging::Level minLevel) {
auto file = make_shared<std::ofstream>(); auto file = make_shared<std::ofstream>();
@ -74,12 +75,9 @@ shared_ptr<logging::Sink> createFileSink(const path& path, logging::Level minLev
unique_ptr<Recognizer> createRecognizer(RecognizerType recognizerType) { unique_ptr<Recognizer> createRecognizer(RecognizerType recognizerType) {
switch (recognizerType) { switch (recognizerType) {
case RecognizerType::PocketSphinx: case RecognizerType::PocketSphinx: return make_unique<PocketSphinxRecognizer>();
return make_unique<PocketSphinxRecognizer>(); case RecognizerType::Phonetic: return make_unique<PhoneticRecognizer>();
case RecognizerType::Phonetic: default: throw std::runtime_error("Unknown recognizer.");
return make_unique<PhoneticRecognizer>();
default:
throw std::runtime_error("Unknown recognizer.");
} }
} }
@ -92,14 +90,10 @@ unique_ptr<Exporter> createExporter(
switch (exportFormat) { switch (exportFormat) {
case ExportFormat::Dat: case ExportFormat::Dat:
return make_unique<DatExporter>(targetShapeSet, datFrameRate, datUsePrestonBlair); return make_unique<DatExporter>(targetShapeSet, datFrameRate, datUsePrestonBlair);
case ExportFormat::Tsv: case ExportFormat::Tsv: return make_unique<TsvExporter>();
return make_unique<TsvExporter>(); case ExportFormat::Xml: return make_unique<XmlExporter>();
case ExportFormat::Xml: case ExportFormat::Json: return make_unique<JsonExporter>();
return make_unique<XmlExporter>(); default: throw std::runtime_error("Unknown export format.");
case ExportFormat::Json:
return make_unique<JsonExporter>();
default:
throw std::runtime_error("Unknown export format.");
} }
} }
@ -134,78 +128,118 @@ int main(int platformArgc, char* platformArgv[]) {
cmd.setOutput(new NiceCmdLineOutput()); cmd.setOutput(new NiceCmdLineOutput());
tclap::ValueArg<string> outputFileName( tclap::ValueArg<string> outputFileName(
"o", "output", "The output file path.", "o", "output", "The output file path.", false, string(), "string", cmd
false, string(), "string", cmd
); );
auto logLevels = vector<logging::Level>(logging::LevelConverter::get().getValues()); auto logLevels = vector<logging::Level>(logging::LevelConverter::get().getValues());
tclap::ValuesConstraint<logging::Level> logLevelConstraint(logLevels); tclap::ValuesConstraint<logging::Level> logLevelConstraint(logLevels);
tclap::ValueArg<logging::Level> logLevel( tclap::ValueArg<logging::Level> logLevel(
"", "logLevel", "The minimum log level that will be written to the log file", "",
false, logging::Level::Debug, &logLevelConstraint, cmd "logLevel",
"The minimum log level that will be written to the log file",
false,
logging::Level::Debug,
&logLevelConstraint,
cmd
); );
tclap::ValueArg<string> logFileName( tclap::ValueArg<string> logFileName(
"", "logFile", "The log file path.", "", "logFile", "The log file path.", false, string(), "string", cmd
false, string(), "string", cmd
); );
tclap::ValueArg<logging::Level> consoleLevel( tclap::ValueArg<logging::Level> consoleLevel(
"", "consoleLevel", "The minimum log level that will be printed on the console (stderr)", "",
false, defaultMinStderrLevel, &logLevelConstraint, cmd "consoleLevel",
"The minimum log level that will be printed on the console (stderr)",
false,
defaultMinStderrLevel,
&logLevelConstraint,
cmd
); );
tclap::SwitchArg machineReadableMode( tclap::SwitchArg machineReadableMode(
"", "machineReadable", "Formats all output to stderr in a structured JSON format.", "",
cmd, false "machineReadable",
"Formats all output to stderr in a structured JSON format.",
cmd,
false
); );
tclap::SwitchArg quietMode( tclap::SwitchArg quietMode(
"q", "quiet", "Suppresses all output to stderr except for warnings and error messages.", "q",
cmd, false "quiet",
"Suppresses all output to stderr except for warnings and error messages.",
cmd,
false
); );
tclap::ValueArg<int> maxThreadCount( tclap::ValueArg<int> maxThreadCount(
"", "threads", "The maximum number of worker threads to use.", "",
false, getProcessorCoreCount(), "number", cmd "threads",
"The maximum number of worker threads to use.",
false,
getProcessorCoreCount(),
"number",
cmd
); );
tclap::ValueArg<string> extendedShapes( tclap::ValueArg<string> extendedShapes(
"", "extendedShapes", "All extended, optional shapes to use.", "", "extendedShapes", "All extended, optional shapes to use.", false, "GHX", "string", cmd
false, "GHX", "string", cmd
); );
tclap::ValueArg<string> dialogFile( tclap::ValueArg<string> dialogFile(
"d", "dialogFile", "A file containing the text of the dialog.", "d",
false, string(), "string", cmd "dialogFile",
"A file containing the text of the dialog.",
false,
string(),
"string",
cmd
); );
tclap::SwitchArg datUsePrestonBlair( tclap::SwitchArg datUsePrestonBlair(
"", "datUsePrestonBlair", "Only for dat exporter: uses the Preston Blair mouth shape names.", "",
cmd, false "datUsePrestonBlair",
"Only for dat exporter: uses the Preston Blair mouth shape names.",
cmd,
false
); );
tclap::ValueArg<double> datFrameRate( tclap::ValueArg<double> datFrameRate(
"", "datFrameRate", "Only for dat exporter: the desired frame rate.", "",
false, 24.0, "number", cmd "datFrameRate",
"Only for dat exporter: the desired frame rate.",
false,
24.0,
"number",
cmd
); );
auto exportFormats = vector<ExportFormat>(ExportFormatConverter::get().getValues()); auto exportFormats = vector<ExportFormat>(ExportFormatConverter::get().getValues());
tclap::ValuesConstraint<ExportFormat> exportFormatConstraint(exportFormats); tclap::ValuesConstraint<ExportFormat> exportFormatConstraint(exportFormats);
tclap::ValueArg<ExportFormat> exportFormat( tclap::ValueArg<ExportFormat> exportFormat(
"f", "exportFormat", "The export format.", "f",
false, ExportFormat::Tsv, &exportFormatConstraint, cmd "exportFormat",
"The export format.",
false,
ExportFormat::Tsv,
&exportFormatConstraint,
cmd
); );
auto recognizerTypes = vector<RecognizerType>(RecognizerTypeConverter::get().getValues()); auto recognizerTypes = vector<RecognizerType>(RecognizerTypeConverter::get().getValues());
tclap::ValuesConstraint<RecognizerType> recognizerConstraint(recognizerTypes); tclap::ValuesConstraint<RecognizerType> recognizerConstraint(recognizerTypes);
tclap::ValueArg<RecognizerType> recognizerType( tclap::ValueArg<RecognizerType> recognizerType(
"r", "recognizer", "The dialog recognizer.", "r",
false, RecognizerType::PocketSphinx, &recognizerConstraint, cmd "recognizer",
"The dialog recognizer.",
false,
RecognizerType::PocketSphinx,
&recognizerConstraint,
cmd
); );
tclap::UnlabeledValueArg<string> inputFileName( tclap::UnlabeledValueArg<string> inputFileName(
"inputFile", "The input file. Must be a sound file in WAVE format.", "inputFile", "The input file. Must be a sound file in WAVE format.", true, "", "string", cmd
true, "", "string", cmd
); );
try { try {
@ -247,8 +281,10 @@ int main(int platformArgc, char* platformArgv[]) {
); );
logging::log(StartEntry(inputFilePath)); logging::log(StartEntry(inputFilePath));
logging::debugFormat("Command line: {}", logging::debugFormat(
join(args | transformed([](string arg) { return fmt::format("\"{}\"", arg); }), " ")); "Command line: {}",
join(args | transformed([](string arg) { return fmt::format("\"{}\"", arg); }), " ")
);
try { try {
// On progress change: Create log message // On progress change: Create log message
@ -260,13 +296,13 @@ int main(int platformArgc, char* platformArgv[]) {
logging::info("Starting animation."); logging::info("Starting animation.");
JoiningContinuousTimeline<Shape> animation = animateWaveFile( JoiningContinuousTimeline<Shape> animation = animateWaveFile(
inputFilePath, inputFilePath,
dialogFile.isSet() dialogFile.isSet() ? readUtf8File(u8path(dialogFile.getValue()))
? readUtf8File(u8path(dialogFile.getValue())) : boost::optional<string>(),
: boost::optional<string>(),
*createRecognizer(recognizerType.getValue()), *createRecognizer(recognizerType.getValue()),
targetShapeSet, targetShapeSet,
maxThreadCount.getValue(), maxThreadCount.getValue(),
progressSink); progressSink
);
logging::info("Done animating."); logging::info("Done animating.");
// Export animation // Export animation
@ -282,9 +318,9 @@ int main(int platformArgc, char* platformArgv[]) {
logging::log(SuccessEntry()); logging::log(SuccessEntry());
} catch (...) { } catch (...) {
std::throw_with_nested( std::throw_with_nested(std::runtime_error(
std::runtime_error(fmt::format("Error processing file {}.", inputFilePath.u8string())) fmt::format("Error processing file {}.", inputFilePath.u8string())
); ));
} }
return 0; return 0;

View File

@ -4,13 +4,13 @@ using logging::Level;
using std::string; using std::string;
SemanticEntry::SemanticEntry(Level level, const string& message) : SemanticEntry::SemanticEntry(Level level, const string& message) :
Entry(level, message) Entry(level, message) {}
{}
StartEntry::StartEntry(const std::filesystem::path& inputFilePath) : StartEntry::StartEntry(const std::filesystem::path& inputFilePath) :
SemanticEntry(Level::Info, fmt::format("Application startup. Input file: {}.", inputFilePath.u8string())), SemanticEntry(
inputFilePath(inputFilePath) Level::Info, fmt::format("Application startup. Input file: {}.", inputFilePath.u8string())
{} ),
inputFilePath(inputFilePath) {}
std::filesystem::path StartEntry::getInputFilePath() const { std::filesystem::path StartEntry::getInputFilePath() const {
return inputFilePath; return inputFilePath;
@ -18,21 +18,18 @@ std::filesystem::path StartEntry::getInputFilePath() const {
ProgressEntry::ProgressEntry(double progress) : ProgressEntry::ProgressEntry(double progress) :
SemanticEntry(Level::Trace, fmt::format("Progress: {}%", static_cast<int>(progress * 100))), SemanticEntry(Level::Trace, fmt::format("Progress: {}%", static_cast<int>(progress * 100))),
progress(progress) progress(progress) {}
{}
double ProgressEntry::getProgress() const { double ProgressEntry::getProgress() const {
return progress; return progress;
} }
SuccessEntry::SuccessEntry() : SuccessEntry::SuccessEntry() :
SemanticEntry(Level::Info, "Application terminating normally.") SemanticEntry(Level::Info, "Application terminating normally.") {}
{}
FailureEntry::FailureEntry(const string& reason) : FailureEntry::FailureEntry(const string& reason) :
SemanticEntry(Level::Fatal, fmt::format("Application terminating with error: {}", reason)), SemanticEntry(Level::Fatal, fmt::format("Application terminating with error: {}", reason)),
reason(reason) reason(reason) {}
{}
string FailureEntry::getReason() const { string FailureEntry::getReason() const {
return reason; return reason;

View File

@ -1,7 +1,8 @@
#pragma once #pragma once
#include "logging/Entry.h"
#include <filesystem> #include <filesystem>
#include "logging/Entry.h"
// Marker class for semantic entries // Marker class for semantic entries
class SemanticEntry : public logging::Entry { class SemanticEntry : public logging::Entry {
public: public:
@ -12,6 +13,7 @@ class StartEntry : public SemanticEntry {
public: public:
StartEntry(const std::filesystem::path& inputFilePath); StartEntry(const std::filesystem::path& inputFilePath);
std::filesystem::path getInputFilePath() const; std::filesystem::path getInputFilePath() const;
private: private:
std::filesystem::path inputFilePath; std::filesystem::path inputFilePath;
}; };
@ -20,6 +22,7 @@ class ProgressEntry : public SemanticEntry {
public: public:
ProgressEntry(double progress); ProgressEntry(double progress);
double getProgress() const; double getProgress() const;
private: private:
double progress; double progress;
}; };
@ -33,6 +36,7 @@ class FailureEntry : public SemanticEntry {
public: public:
FailureEntry(const std::string& reason); FailureEntry(const std::string& reason);
std::string getReason() const; std::string getReason() const;
private: private:
std::string reason; std::string reason;
}; };

View File

@ -1,31 +1,32 @@
#include "sinks.h" #include "sinks.h"
#include "logging/sinks.h"
#include "logging/formatters.h"
#include "semanticEntries.h"
#include "tools/stringTools.h"
#include "core/appInfo.h"
#include <boost/utility/in_place_factory.hpp> #include <boost/utility/in_place_factory.hpp>
using std::string; #include "core/appInfo.h"
using std::make_shared; #include "logging/formatters.h"
using logging::Level; #include "logging/sinks.h"
using logging::StdErrSink; #include "semanticEntries.h"
using logging::SimpleConsoleFormatter; #include "tools/stringTools.h"
using boost::optional; using boost::optional;
using logging::Level;
using logging::SimpleConsoleFormatter;
using logging::StdErrSink;
using std::make_shared;
using std::string;
NiceStderrSink::NiceStderrSink(Level minLevel) : NiceStderrSink::NiceStderrSink(Level minLevel) :
minLevel(minLevel), minLevel(minLevel),
progress(0.0), progress(0.0),
innerSink(make_shared<StdErrSink>(make_shared<SimpleConsoleFormatter>())) innerSink(make_shared<StdErrSink>(make_shared<SimpleConsoleFormatter>())) {}
{}
void NiceStderrSink::receive(const logging::Entry& entry) { void NiceStderrSink::receive(const logging::Entry& entry) {
// For selected semantic entries, print a user-friendly message instead of // For selected semantic entries, print a user-friendly message instead of
// the technical log message. // the technical log message.
if (const auto* startEntry = dynamic_cast<const StartEntry*>(&entry)) { if (const auto* startEntry = dynamic_cast<const StartEntry*>(&entry)) {
std::cerr std::cerr << fmt::format(
<< fmt::format("Generating lip sync data for {}.", startEntry->getInputFilePath().u8string()) "Generating lip sync data for {}.", startEntry->getInputFilePath().u8string()
<< std::endl; ) << std::endl;
startProgressIndication(); startProgressIndication();
} else if (const auto* progressEntry = dynamic_cast<const ProgressEntry*>(&entry)) { } else if (const auto* progressEntry = dynamic_cast<const ProgressEntry*>(&entry)) {
assert(progressBar); assert(progressBar);
@ -62,8 +63,7 @@ void NiceStderrSink::resumeProgressIndication() {
QuietStderrSink::QuietStderrSink(Level minLevel) : QuietStderrSink::QuietStderrSink(Level minLevel) :
minLevel(minLevel), minLevel(minLevel),
innerSink(make_shared<StdErrSink>(make_shared<SimpleConsoleFormatter>())) innerSink(make_shared<StdErrSink>(make_shared<SimpleConsoleFormatter>())) {}
{}
void QuietStderrSink::receive(const logging::Entry& entry) { void QuietStderrSink::receive(const logging::Entry& entry) {
// Set inputFilePath as soon as we get it // Set inputFilePath as soon as we get it
@ -75,7 +75,9 @@ void QuietStderrSink::receive(const logging::Entry& entry) {
if (quietSoFar) { if (quietSoFar) {
// This is the first message we print. Give a bit of context. // This is the first message we print. Give a bit of context.
const string intro = inputFilePath const string intro = inputFilePath
? fmt::format("{} {} processing file {}:", appName, appVersion, inputFilePath->u8string()) ? fmt::format(
"{} {} processing file {}:", appName, appVersion, inputFilePath->u8string()
)
: fmt::format("{} {}:", appName, appVersion); : fmt::format("{} {}:", appName, appVersion);
std::cerr << intro << std::endl; std::cerr << intro << std::endl;
quietSoFar = false; quietSoFar = false;
@ -85,8 +87,7 @@ void QuietStderrSink::receive(const logging::Entry& entry) {
} }
MachineReadableStderrSink::MachineReadableStderrSink(Level minLevel) : MachineReadableStderrSink::MachineReadableStderrSink(Level minLevel) :
minLevel(minLevel) minLevel(minLevel) {}
{}
string formatLogProperty(const logging::Entry& entry) { string formatLogProperty(const logging::Entry& entry) {
return fmt::format( return fmt::format(
@ -102,9 +103,7 @@ void MachineReadableStderrSink::receive(const logging::Entry& entry) {
if (const auto* startEntry = dynamic_cast<const StartEntry*>(&entry)) { if (const auto* startEntry = dynamic_cast<const StartEntry*>(&entry)) {
const string file = escapeJsonString(startEntry->getInputFilePath().u8string()); const string file = escapeJsonString(startEntry->getInputFilePath().u8string());
line = fmt::format( line = fmt::format(
R"({{ "type": "start", "file": "{}", {} }})", R"({{ "type": "start", "file": "{}", {} }})", file, formatLogProperty(entry)
file,
formatLogProperty(entry)
); );
} else if (const auto* progressEntry = dynamic_cast<const ProgressEntry*>(&entry)) { } else if (const auto* progressEntry = dynamic_cast<const ProgressEntry*>(&entry)) {
const int progressPercent = static_cast<int>(progressEntry->getProgress() * 100); const int progressPercent = static_cast<int>(progressEntry->getProgress() * 100);
@ -121,9 +120,7 @@ void MachineReadableStderrSink::receive(const logging::Entry& entry) {
} else if (const auto* failureEntry = dynamic_cast<const FailureEntry*>(&entry)) { } else if (const auto* failureEntry = dynamic_cast<const FailureEntry*>(&entry)) {
const string reason = escapeJsonString(failureEntry->getReason()); const string reason = escapeJsonString(failureEntry->getReason());
line = fmt::format( line = fmt::format(
R"({{ "type": "failure", "reason": "{}", {} }})", R"({{ "type": "failure", "reason": "{}", {} }})", reason, formatLogProperty(entry)
reason,
formatLogProperty(entry)
); );
} else { } else {
throw std::runtime_error("Unsupported type of semantic entry."); throw std::runtime_error("Unsupported type of semantic entry.");
@ -136,7 +133,8 @@ void MachineReadableStderrSink::receive(const logging::Entry& entry) {
if (line) { if (line) {
std::cerr << *line << std::endl; std::cerr << *line << std::endl;
// Make sure the stream is flushed so that applications listening to it get the line immediately // Make sure the stream is flushed so that applications listening to it get the line
// immediately
fflush(stderr); fflush(stderr);
} }
} }

View File

@ -1,16 +1,19 @@
#pragma once #pragma once
#include <filesystem>
#include "logging/Entry.h" #include "logging/Entry.h"
#include "logging/Sink.h" #include "logging/Sink.h"
#include "tools/ProgressBar.h" #include "tools/ProgressBar.h"
#include <filesystem>
// Prints nicely formatted progress to stderr. // Prints nicely formatted progress to stderr.
// Non-semantic entries are only printed if their log level at least matches the specified minimum level. // Non-semantic entries are only printed if their log level at least matches the specified minimum
// level.
class NiceStderrSink : public logging::Sink { class NiceStderrSink : public logging::Sink {
public: public:
NiceStderrSink(logging::Level minLevel); NiceStderrSink(logging::Level minLevel);
void receive(const logging::Entry& entry) override; void receive(const logging::Entry& entry) override;
private: private:
void startProgressIndication(); void startProgressIndication();
void interruptProgressIndication(); void interruptProgressIndication();
@ -28,6 +31,7 @@ class QuietStderrSink : public logging::Sink {
public: public:
QuietStderrSink(logging::Level minLevel); QuietStderrSink(logging::Level minLevel);
void receive(const logging::Entry& entry) override; void receive(const logging::Entry& entry) override;
private: private:
logging::Level minLevel; logging::Level minLevel;
bool quietSoFar = true; bool quietSoFar = true;
@ -36,11 +40,13 @@ private:
}; };
// Prints machine-readable progress to stderr. // Prints machine-readable progress to stderr.
// Non-semantic entries are only printed if their log level at least matches the specified minimum level. // Non-semantic entries are only printed if their log level at least matches the specified minimum
// level.
class MachineReadableStderrSink : public logging::Sink { class MachineReadableStderrSink : public logging::Sink {
public: public:
MachineReadableStderrSink(logging::Level minLevel); MachineReadableStderrSink(logging::Level minLevel);
void receive(const logging::Entry& entry) override; void receive(const logging::Entry& entry) override;
private: private:
logging::Level minLevel; logging::Level minLevel;
int lastProgressPercent = -1; int lastProgressPercent = -1;

View File

@ -2,7 +2,7 @@
#include "Timeline.h" #include "Timeline.h"
template<typename T, bool AutoJoin = false> template <typename T, bool AutoJoin = false>
class BoundedTimeline : public Timeline<T, AutoJoin> { class BoundedTimeline : public Timeline<T, AutoJoin> {
using typename Timeline<T, AutoJoin>::time_type; using typename Timeline<T, AutoJoin>::time_type;
using Timeline<T, AutoJoin>::equals; using Timeline<T, AutoJoin>::equals;
@ -12,31 +12,26 @@ public:
using Timeline<T, AutoJoin>::end; using Timeline<T, AutoJoin>::end;
BoundedTimeline() : BoundedTimeline() :
range(TimeRange::zero()) range(TimeRange::zero()) {}
{}
explicit BoundedTimeline(TimeRange range) : explicit BoundedTimeline(TimeRange range) :
range(range) range(range) {}
{}
template<typename InputIterator> template <typename InputIterator>
BoundedTimeline(TimeRange range, InputIterator first, InputIterator last) : BoundedTimeline(TimeRange range, InputIterator first, InputIterator last) :
range(range) range(range) {
{
for (auto it = first; it != last; ++it) { for (auto it = first; it != last; ++it) {
// Virtual function call in constructor. Derived constructors shouldn't call this one! // Virtual function call in constructor. Derived constructors shouldn't call this one!
BoundedTimeline::set(*it); BoundedTimeline::set(*it);
} }
} }
template<typename collection_type> template <typename collection_type>
BoundedTimeline(TimeRange range, collection_type collection) : BoundedTimeline(TimeRange range, collection_type collection) :
BoundedTimeline(range, collection.begin(), collection.end()) BoundedTimeline(range, collection.begin(), collection.end()) {}
{}
BoundedTimeline(TimeRange range, std::initializer_list<Timed<T>> initializerList) : BoundedTimeline(TimeRange range, std::initializer_list<Timed<T>> initializerList) :
BoundedTimeline(range, initializerList.begin(), initializerList.end()) BoundedTimeline(range, initializerList.begin(), initializerList.end()) {}
{}
TimeRange getRange() const override { TimeRange getRange() const override {
return range; return range;
@ -53,8 +48,7 @@ public:
// Clip the value's range to bounds // Clip the value's range to bounds
TimeRange& valueRange = timedValue.getTimeRange(); TimeRange& valueRange = timedValue.getTimeRange();
valueRange.resize( valueRange.resize(
max(range.getStart(), valueRange.getStart()), max(range.getStart(), valueRange.getStart()), min(range.getEnd(), valueRange.getEnd())
min(range.getEnd(), valueRange.getEnd())
); );
return Timeline<T, AutoJoin>::set(timedValue); return Timeline<T, AutoJoin>::set(timedValue);
@ -77,5 +71,5 @@ private:
TimeRange range; TimeRange range;
}; };
template<typename T> template <typename T>
using JoiningBoundedTimeline = BoundedTimeline<T, true>; using JoiningBoundedTimeline = BoundedTimeline<T, true>;

View File

@ -2,40 +2,33 @@
#include "BoundedTimeline.h" #include "BoundedTimeline.h"
template<typename T, bool AutoJoin = false> template <typename T, bool AutoJoin = false>
class ContinuousTimeline : public BoundedTimeline<T, AutoJoin> { class ContinuousTimeline : public BoundedTimeline<T, AutoJoin> {
public: public:
ContinuousTimeline(TimeRange range, T defaultValue) : ContinuousTimeline(TimeRange range, T defaultValue) :
BoundedTimeline<T, AutoJoin>(range), BoundedTimeline<T, AutoJoin>(range),
defaultValue(defaultValue) defaultValue(defaultValue) {
{
// Virtual function call in constructor. Derived constructors shouldn't call this one! // Virtual function call in constructor. Derived constructors shouldn't call this one!
ContinuousTimeline::clear(range); ContinuousTimeline::clear(range);
} }
template<typename InputIterator> template <typename InputIterator>
ContinuousTimeline(TimeRange range, T defaultValue, InputIterator first, InputIterator last) : ContinuousTimeline(TimeRange range, T defaultValue, InputIterator first, InputIterator last) :
ContinuousTimeline(range, defaultValue) ContinuousTimeline(range, defaultValue) {
{
// Virtual function calls in constructor. Derived constructors shouldn't call this one! // Virtual function calls in constructor. Derived constructors shouldn't call this one!
for (auto it = first; it != last; ++it) { for (auto it = first; it != last; ++it) {
ContinuousTimeline::set(*it); ContinuousTimeline::set(*it);
} }
} }
template<typename collection_type> template <typename collection_type>
ContinuousTimeline(TimeRange range, T defaultValue, collection_type collection) : ContinuousTimeline(TimeRange range, T defaultValue, collection_type collection) :
ContinuousTimeline(range, defaultValue, collection.begin(), collection.end()) ContinuousTimeline(range, defaultValue, collection.begin(), collection.end()) {}
{}
ContinuousTimeline( ContinuousTimeline(
TimeRange range, TimeRange range, T defaultValue, std::initializer_list<Timed<T>> initializerList
T defaultValue,
std::initializer_list<Timed<T>> initializerList
) : ) :
ContinuousTimeline(range, defaultValue, initializerList.begin(), initializerList.end()) ContinuousTimeline(range, defaultValue, initializerList.begin(), initializerList.end()) {}
{}
using BoundedTimeline<T, AutoJoin>::clear; using BoundedTimeline<T, AutoJoin>::clear;
@ -47,5 +40,5 @@ private:
T defaultValue; T defaultValue;
}; };
template<typename T> template <typename T>
using JoiningContinuousTimeline = ContinuousTimeline<T, true>; using JoiningContinuousTimeline = ContinuousTimeline<T, true>;

View File

@ -1,8 +1,10 @@
#include "TimeRange.h" #include "TimeRange.h"
#include <stdexcept>
#include <ostream>
#include <format.h> #include <format.h>
#include <ostream>
#include <stdexcept>
using time_type = TimeRange::time_type; using time_type = TimeRange::time_type;
TimeRange TimeRange::zero() { TimeRange TimeRange::zero() {
@ -12,18 +14,14 @@ TimeRange TimeRange::zero() {
TimeRange::TimeRange() : TimeRange::TimeRange() :
start(0_cs), start(0_cs),
end(0_cs) end(0_cs) {}
{}
TimeRange::TimeRange(time_type start, time_type end) : TimeRange::TimeRange(time_type start, time_type end) :
start(start), start(start),
end(end) end(end) {
{
if (start > end) { if (start > end) {
throw std::invalid_argument(fmt::format( throw std::invalid_argument(fmt::format(
"Time range start must not be less than end. Start: {0}, end: {1}", "Time range start must not be less than end. Start: {0}, end: {1}", start, end
start,
end
)); ));
} }
} }
@ -97,11 +95,11 @@ void TimeRange::trim(const TimeRange& limits) {
} }
void TimeRange::trimLeft(time_type value) { void TimeRange::trimLeft(time_type value) {
trim({ value, end }); trim({value, end});
} }
void TimeRange::trimRight(time_type value) { void TimeRange::trimRight(time_type value) {
trim({ start, value }); trim({start, value});
} }
bool TimeRange::operator==(const TimeRange& rhs) const { bool TimeRange::operator==(const TimeRange& rhs) const {

Some files were not shown because too many files have changed in this diff Show More