Skip to content

Commit

Permalink
Fix pthreads with closure compiler (#9569)
Browse files Browse the repository at this point in the history
Turns out much of the pthreads runtime was not closure-safe. This PR fixes that,
using quoted strings as closure expects.

While doing that I noticed that some of the things in place for pthreads +
MODULARIZE would also help closure. In both cases the key thing is that
worker.js, that loads the main JS file, is separate from it, so it needs proper
interfaces and can't just rely on global variables (which in closure are
minified in the main JS, and in MODULARIZE are in another scope). So I removed
things like makeAsmExportAccessInPthread, makeAsmGlobalAccessInPthread,
makeAsmExportAndGlobalAssignTargetInPthread which makes the code simpler.

Because of that I was also able to remove a bunch of globals from worker.js
which further simplifies things, and should also help node.js workers (where the
 global scope is very different, #6567).

Most of this PR is straightforward according to the above notes. One slightly
tricky thing is the stack setup, which I refactored to use a new
Module.applyStackValues method that worker.js calls at the right time (as the
stack is set up after the JS loads).

Code size effects:

Before this PR: 69,567 bytes.
After this PR: 69,644.
After this PR, plus closure: 34,909.

So this adds only 77 bytes, while also making the code simpler + making it
possible to run closure and decrease size by 50%. In addition, I have followup
work to further reduce the non-closure code size too, so this regression will
be fixed (and more).
  • Loading branch information
kripken committed Oct 16, 2019
1 parent a04ebef commit a5117d7
Show file tree
Hide file tree
Showing 13 changed files with 187 additions and 190 deletions.
8 changes: 7 additions & 1 deletion emcc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1195,6 +1195,9 @@ def is_supported_link_flag(f):
# These runtime methods are called from worker.js
shared.Settings.EXPORTED_RUNTIME_METHODS += ['establishStackSpace', 'dynCall_ii']

if shared.Settings.STACK_OVERFLOW_CHECK:
shared.Settings.EXPORTED_RUNTIME_METHODS += ['writeStackCookie', 'checkStackCookie', 'abortStackOverflow']

if shared.Settings.MODULARIZE_INSTANCE:
shared.Settings.MODULARIZE = 1

Expand Down Expand Up @@ -1361,7 +1364,10 @@ def is_supported_link_flag(f):
newargs += ['-pthread']
# some pthreads code is in asm.js library functions, which are auto-exported; for the wasm backend, we must
# manually export them
shared.Settings.EXPORTED_FUNCTIONS += ['_emscripten_get_global_libc', '___pthread_tsd_run_dtors', '__register_pthread_ptr', '_pthread_self', '___emscripten_pthread_data_constructor']
shared.Settings.EXPORTED_FUNCTIONS += [
'_emscripten_get_global_libc', '___pthread_tsd_run_dtors',
'__register_pthread_ptr', '_pthread_self',
'___emscripten_pthread_data_constructor', '_emscripten_futex_wake']

# set location of worker.js
shared.Settings.PTHREAD_WORKER_FILE = unsuffixed(os.path.basename(target)) + '.worker.js'
Expand Down
12 changes: 12 additions & 0 deletions src/closure-externs.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,18 @@ Math.max = function() {};
Math.clz32 = function() {};
Math.trunc = function() {};

/**
* Atomics
*/

var Atomics = {};
Atomics.compareExchange = function() {};
Atomics.exchange = function() {};
Atomics.wait = function() {};
Atomics.notify = function() {};
Atomics.load = function() {};
Atomics.store = function() {};

/**
* SIMD.js support (not in upstream closure yet).
*/
Expand Down
14 changes: 7 additions & 7 deletions src/library.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
LibraryManager.library = {
// keep this low in memory, because we flatten arrays with them in them
#if USE_PTHREADS
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit.__impure_ptr; else PthreadWorkerInit.__impure_ptr __impure_ptr = {{{ makeStaticAlloc(4) }}}',
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit.___dso_handle; else PthreadWorkerInit.___dso_handle = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit["__impure_ptr"]; else PthreadWorkerInit["__impure_ptr"] = {{{ makeStaticAlloc(4) }}}',
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit["___dso_handle"]; else PthreadWorkerInit["___dso_handle"] = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
#else
_impure_ptr: '{{{ makeStaticAlloc(1) }}}',
__dso_handle: '{{{ makeStaticAlloc(1) }}}',
Expand Down Expand Up @@ -1880,9 +1880,9 @@ LibraryManager.library = {

// Statically allocated time struct.
#if USE_PTHREADS
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit.___tm_current; else PthreadWorkerInit.___tm_current = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit.___tm_timezone; else PthreadWorkerInit.___tm_timezone = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit.___tm_formatted; else PthreadWorkerInit.___tm_formatted = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit["___tm_current"]; else PthreadWorkerInit["___tm_current"] = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit["___tm_timezone"]; else PthreadWorkerInit["___tm_timezone"] = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit["___tm_formatted"]; else PthreadWorkerInit["___tm_formatted"] = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
#else
__tm_current: '{{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
// Statically allocated copy of the string "GMT" for gmtime() to point to
Expand Down Expand Up @@ -3239,8 +3239,8 @@ LibraryManager.library = {
// ==========================================================================

#if USE_PTHREADS
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit._in6addr_any; else PthreadWorkerInit._in6addr_any = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit._in6addr_loopback; else PthreadWorkerInit._in6addr_loopback = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit["_in6addr_any"]; else PthreadWorkerInit["_in6addr_any"] = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit["_in6addr_loopback"]; else PthreadWorkerInit["_in6addr_loopback"] = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
#else
in6addr_any:
'{{{ makeStaticAlloc(16) }}}',
Expand Down
2 changes: 1 addition & 1 deletion src/library_fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
var LibraryFetch = {
#if USE_PTHREADS
$Fetch__postset: 'if (!ENVIRONMENT_IS_PTHREAD) Fetch.staticInit();',
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit._fetch_work_queue; else PthreadWorkerInit._fetch_work_queue = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit["_fetch_work_queue"]; else PthreadWorkerInit["_fetch_work_queue"] = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
#else
$Fetch__postset: 'Fetch.staticInit();',
fetch_work_queue: '{{{ makeStaticAlloc(12) }}}',
Expand Down
130 changes: 72 additions & 58 deletions src/library_pthread.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@
// found in the LICENSE file.

var LibraryPThread = {
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock();',
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr', 'emscripten_main_thread_process_queued_calls', '$ERRNO_CODES', 'emscripten_futex_wake'],
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock(); else PThread.initWorker();',
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr',
'emscripten_main_thread_process_queued_calls',
'$ERRNO_CODES', 'emscripten_futex_wake', '_kill_thread',
'_cancel_thread', '_cleanup_thread'],
$PThread: {
MAIN_THREAD_ID: 1, // A special constant that identifies the main JS thread ID.
mainThreadInfo: {
Expand Down Expand Up @@ -65,6 +68,16 @@ var LibraryPThread = {
PThread.createProfilerBlock(PThread.mainThreadBlock);
PThread.setThreadName(PThread.mainThreadBlock, "Browser main thread");
PThread.setThreadStatus(PThread.mainThreadBlock, {{{ cDefine('EM_THREAD_STATUS_RUNNING') }}});
#endif
},
initWorker: function() {
#if USE_CLOSURE_COMPILER
// worker.js is not compiled together with us, and must access certain
// things.
PThread['receiveObjectTransfer'] = PThread.receiveObjectTransfer;
PThread['setThreadStatus'] = PThread.setThreadStatus;
PThread['threadCancel'] = PThread.threadCancel;
PThread['threadExit'] = PThread.threadExit;
#endif
},
// Maps pthread_t to pthread info objects
Expand Down Expand Up @@ -178,7 +191,7 @@ var LibraryPThread = {
if (ENVIRONMENT_IS_PTHREAD) {
// Note: in theory we would like to return any offscreen canvases back to the main thread,
// but if we ever fetched a rendering context for them that would not be valid, so we don't try.
postMessage({ cmd: 'exit' });
postMessage({ 'cmd': 'exit' });
}
}
},
Expand All @@ -190,7 +203,7 @@ var LibraryPThread = {
_emscripten_futex_wake(threadInfoStruct + {{{ C_STRUCTS.pthread.threadStatus }}}, {{{ cDefine('INT_MAX') }}}); // wake all threads
threadInfoStruct = selfThreadId = 0; // Not hosting a pthread anymore in this worker, reset the info structures to null.
__register_pthread_ptr(0, 0, 0); // Unregister the thread block also inside the asm.js scope.
postMessage({ cmd: 'cancelDone' });
postMessage({ 'cmd': 'cancelDone' });
},

terminateAllThreads: function() {
Expand Down Expand Up @@ -305,32 +318,32 @@ var LibraryPThread = {

// Ask the new worker to load up the Emscripten-compiled page. This is a heavy operation.
worker.postMessage({
cmd: 'load',
'cmd': 'load',
// If the application main .js file was loaded from a Blob, then it is not possible
// to access the URL of the current script that could be passed to a Web Worker so that
// it could load up the same file. In that case, developer must either deliver the Blob
// object in Module['mainScriptUrlOrBlob'], or a URL to it, so that pthread Workers can
// independently load up the same main application file.
urlOrBlob: Module['mainScriptUrlOrBlob'] || _scriptDir,
'urlOrBlob': Module['mainScriptUrlOrBlob'] || _scriptDir,
#if WASM
wasmMemory: wasmMemory,
wasmModule: wasmModule,
'wasmMemory': wasmMemory,
'wasmModule': wasmModule,
#if LOAD_SOURCE_MAP
wasmSourceMap: wasmSourceMap,
'wasmSourceMap': wasmSourceMap,
#endif
#if USE_OFFSET_CONVERTER
wasmOffsetConverter: wasmOffsetConverter,
'wasmOffsetConverter': wasmOffsetConverter,
#endif
#else
buffer: HEAPU8.buffer,
asmJsUrlOrBlob: Module["asmJsUrlOrBlob"],
'buffer': HEAPU8.buffer,
'asmJsUrlOrBlob': Module["asmJsUrlOrBlob"],
#endif
#if !WASM_BACKEND
tempDoublePtr: tempDoublePtr,
'tempDoublePtr': tempDoublePtr,
#endif
DYNAMIC_BASE: DYNAMIC_BASE,
DYNAMICTOP_PTR: DYNAMICTOP_PTR,
PthreadWorkerInit: PthreadWorkerInit
'DYNAMIC_BASE': DYNAMIC_BASE,
'DYNAMICTOP_PTR': DYNAMICTOP_PTR,
'PthreadWorkerInit': PthreadWorkerInit
});
PThread.unusedWorkers.push(worker);
}
Expand All @@ -346,34 +359,35 @@ var LibraryPThread = {
var worker = workers[i];
(function(worker) {
worker.onmessage = function(e) {
var d = e.data;
var d = e['data'];
var cmd = d['cmd'];
// Sometimes we need to backproxy events to the calling thread (e.g. HTML5 DOM events handlers such as emscripten_set_mousemove_callback()), so keep track in a globally accessible variable about the thread that initiated the proxying.
if (worker.pthread) PThread.currentProxiedOperationCallerThread = worker.pthread.threadInfoStruct;

// If this message is intended to a recipient that is not the main thread, forward it to the target thread.
if (d.targetThread && d.targetThread != _pthread_self()) {
if (d['targetThread'] && d['targetThread'] != _pthread_self()) {
var thread = PThread.pthreads[d.targetThread];
if (thread) {
thread.worker.postMessage(e.data, d.transferList);
thread.worker.postMessage(e.data, d['transferList']);
} else {
console.error('Internal error! Worker sent a message "' + d.cmd + '" to target pthread ' + d.targetThread + ', but that thread no longer exists!');
console.error('Internal error! Worker sent a message "' + cmd + '" to target pthread ' + d['targetThread'] + ', but that thread no longer exists!');
}
PThread.currentProxiedOperationCallerThread = undefined;
return;
}

if (d.cmd === 'processQueuedMainThreadWork') {
if (cmd === 'processQueuedMainThreadWork') {
// TODO: Must post message to main Emscripten thread in PROXY_TO_WORKER mode.
_emscripten_main_thread_process_queued_calls();
} else if (d.cmd === 'spawnThread') {
} else if (cmd === 'spawnThread') {
__spawn_thread(e.data);
} else if (d.cmd === 'cleanupThread') {
__cleanup_thread(d.thread);
} else if (d.cmd === 'killThread') {
__kill_thread(d.thread);
} else if (d.cmd === 'cancelThread') {
__cancel_thread(d.thread);
} else if (d.cmd === 'loaded') {
} else if (cmd === 'cleanupThread') {
__cleanup_thread(d['thread']);
} else if (cmd === 'killThread') {
__kill_thread(d['thread']);
} else if (cmd === 'cancelThread') {
__cancel_thread(d['thread']);
} else if (cmd === 'loaded') {
worker.loaded = true;
// If this Worker is already pending to start running a thread, launch the thread now
if (worker.runPthread) {
Expand All @@ -384,34 +398,34 @@ var LibraryPThread = {
if (numWorkersLoaded === numWorkers && onFinishedLoading) {
onFinishedLoading();
}
} else if (d.cmd === 'print') {
out('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'printErr') {
err('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'alert') {
alert('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'exit') {
} else if (cmd === 'print') {
out('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'printErr') {
err('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'alert') {
alert('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'exit') {
var detached = worker.pthread && Atomics.load(HEAPU32, (worker.pthread.thread + {{{ C_STRUCTS.pthread.detached }}}) >> 2);
if (detached) {
PThread.returnWorkerToPool(worker);
}
} else if (d.cmd === 'exitProcess') {
} else if (cmd === 'exitProcess') {
// A pthread has requested to exit the whole application process (runtime).
noExitRuntime = false;
try {
exit(d.returnCode);
exit(d['returnCode']);
} catch (e) {
if (e instanceof ExitStatus) return;
throw e;
}
} else if (d.cmd === 'cancelDone') {
} else if (cmd === 'cancelDone') {
PThread.returnWorkerToPool(worker);
} else if (d.cmd === 'objectTransfer') {
} else if (cmd === 'objectTransfer') {
PThread.receiveObjectTransfer(e.data);
} else if (e.data.target === 'setimmediate') {
worker.postMessage(e.data); // Worker wants to postMessage() to itself to implement setImmediate() emulation.
} else {
err("worker sent an unknown command " + d.cmd);
err("worker sent an unknown command " + cmd);
}
PThread.currentProxiedOperationCallerThread = undefined;
};
Expand Down Expand Up @@ -483,7 +497,7 @@ var LibraryPThread = {
if (ENVIRONMENT_IS_PTHREAD) throw 'Internal Error! _cancel_thread() can only ever be called from main application thread!';
if (!pthread_ptr) throw 'Internal Error! Null pthread_ptr in _cancel_thread!';
var pthread = PThread.pthreads[pthread_ptr];
pthread.worker.postMessage({ cmd: 'cancel' });
pthread.worker.postMessage({ 'cmd': 'cancel' });
},

_spawn_thread: function(threadParams) {
Expand Down Expand Up @@ -537,17 +551,17 @@ var LibraryPThread = {

worker.pthread = pthread;
var msg = {
cmd: 'run',
start_routine: threadParams.startRoutine,
arg: threadParams.arg,
threadInfoStruct: threadParams.pthread_ptr,
selfThreadId: threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
parentThreadId: threadParams.parent_pthread_ptr,
stackBase: threadParams.stackBase,
stackSize: threadParams.stackSize,
'cmd': 'run',
'start_routine': threadParams.startRoutine,
'arg': threadParams.arg,
'threadInfoStruct': threadParams.pthread_ptr,
'selfThreadId': threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
'parentThreadId': threadParams.parent_pthread_ptr,
'stackBase': threadParams.stackBase,
'stackSize': threadParams.stackSize,
#if OFFSCREENCANVAS_SUPPORT
moduleCanvasId: threadParams.moduleCanvasId,
offscreenCanvases: threadParams.offscreenCanvases,
'moduleCanvasId': threadParams.moduleCanvasId,
'offscreenCanvases': threadParams.offscreenCanvases,
#endif
};
worker.runPthread = function() {
Expand All @@ -562,7 +576,7 @@ var LibraryPThread = {
},

_num_logical_cores__deps: ['emscripten_force_num_logical_cores'],
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit.__num_logical_cores; else { PthreadWorkerInit.__num_logical_cores = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit["__num_logical_cores"]; else { PthreadWorkerInit["__num_logical_cores"] = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',

emscripten_has_threading_support: function() {
return typeof SharedArrayBuffer !== 'undefined';
Expand Down Expand Up @@ -822,7 +836,7 @@ var LibraryPThread = {
Atomics.store(HEAPU32, (thread + {{{ C_STRUCTS.pthread.detached }}} ) >> 2, 1); // Mark the thread as detached.
if (!ENVIRONMENT_IS_PTHREAD) __cleanup_thread(thread);
else postMessage({ cmd: 'cleanupThread', thread: thread });
else postMessage({ 'cmd': 'cleanupThread', 'thread': thread });
return 0;
}
// TODO HACK! Replace the _js variant with just _pthread_testcancel:
Expand Down Expand Up @@ -854,7 +868,7 @@ var LibraryPThread = {
}
if (signal != 0) {
if (!ENVIRONMENT_IS_PTHREAD) __kill_thread(thread);
else postMessage({ cmd: 'killThread', thread: thread});
else postMessage({ 'cmd': 'killThread', 'thread': thread});
}
return 0;
},
Expand All @@ -876,7 +890,7 @@ var LibraryPThread = {
}
Atomics.compareExchange(HEAPU32, (thread + {{{ C_STRUCTS.pthread.threadStatus }}} ) >> 2, 0, 2); // Signal the thread that it needs to cancel itself.
if (!ENVIRONMENT_IS_PTHREAD) __cancel_thread(thread);
else postMessage({ cmd: 'cancelThread', thread: thread});
else postMessage({ 'cmd': 'cancelThread', 'thread': thread});
return 0;
},
Expand Down Expand Up @@ -1068,7 +1082,7 @@ var LibraryPThread = {
},
// Stores the memory address that the main thread is waiting on, if any.
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit.__main_thread_futex_wait_address; else PthreadWorkerInit.__main_thread_futex_wait_address = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit["__main_thread_futex_wait_address"]; else PthreadWorkerInit["__main_thread_futex_wait_address"] = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',
// Returns 0 on success, or one of the values -ETIMEDOUT, -EWOULDBLOCK or -EINVAL on error.
emscripten_futex_wait__deps: ['_main_thread_futex_wait_address', 'emscripten_main_thread_process_queued_calls'],
Expand Down Expand Up @@ -1159,7 +1173,7 @@ var LibraryPThread = {

__call_main: function(argc, argv) {
var returnCode = _main(argc, argv);
if (!noExitRuntime) postMessage({ cmd: 'exitProcess', returnCode: returnCode });
if (!noExitRuntime) postMessage({ 'cmd': 'exitProcess', 'returnCode': returnCode });
return returnCode;
},

Expand Down
Loading

0 comments on commit a5117d7

Please sign in to comment.