added (and stripped) a simple C/C++ preprocessor (written in Lua), so the stringifier can handle the #include directive,
and embed the included files directly in the stringified files. We need this, because we start sharing struct definitions and code between C/C++ and OpenCL (and potentially other languages) preprocessor is from http://github.com/willsteel/lcpp
This commit is contained in:
@@ -96,4 +96,3 @@ public:
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
@@ -10,5 +10,4 @@ static const char* createShadowMapInstancingFragmentShader= \
|
||||
"{\n"
|
||||
" fragmentdepth = gl_FragCoord.z;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -55,5 +55,4 @@ static const char* createShadowMapInstancingVertexShader= \
|
||||
" gl_Position = vertexPos;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -36,5 +36,4 @@ static const char* instancingFragmentShader= \
|
||||
" \n"
|
||||
" color = vec4(ct * cf, at * af); \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -82,5 +82,4 @@ static const char* instancingVertexShader= \
|
||||
" vert.texcoord = uvcoords;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//this file is autogenerated using stringify.bat (premake --stringify) in the build folder of this project
|
||||
static const char* pointSpriteFragmentShader= \
|
||||
"\n"
|
||||
"#version 330\n"
|
||||
"precision highp float;\n"
|
||||
"\n"
|
||||
@@ -37,5 +36,4 @@ static const char* pointSpriteFragmentShader= \
|
||||
" float diffuse = max(0.0, dot(lightDir, N));\n"
|
||||
" color = vec4(ct * diffuse, at * af); \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -46,5 +46,4 @@ static const char* pointSpriteVertexShader= \
|
||||
" \n"
|
||||
" fragment.color = instance_color;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -56,5 +56,4 @@ static const char* useShadowMapInstancingFragmentShader= \
|
||||
" \n"
|
||||
" color = vec4(ct * visibility, 1.f);//at * af); \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -86,5 +86,4 @@ static const char* useShadowMapInstancingVertexShader= \
|
||||
" vert.texcoord = uvcoords;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -22,14 +22,16 @@ function stringifyKernel(filenameIn, filenameOut, kernelMethod)
|
||||
else
|
||||
endpos = i
|
||||
end
|
||||
oneline = string.sub(lines,startpos,endpos)
|
||||
oneline = string.gsub(oneline,"\n","")
|
||||
oneline = string.gsub(oneline,"\"","\\\"");
|
||||
oneline = '\"' .. oneline .. '\\n\"'
|
||||
oneline = string.gsub(oneline,"\\\\n","")
|
||||
oneline = oneline .. "\n"
|
||||
--print(oneline)
|
||||
fw:write(oneline)
|
||||
|
||||
oneline = string.sub(lines,startpos,endpos)
|
||||
oneline = string.gsub(oneline,"\n","")
|
||||
oneline = string.gsub(oneline,"\"","\\\"");
|
||||
oneline = '\"' .. oneline .. '\\n\"'
|
||||
oneline = string.gsub(oneline,"\\\\n","")
|
||||
oneline = oneline .. "\n"
|
||||
--print(oneline)
|
||||
fw:write(oneline);
|
||||
|
||||
if i == nil then break end
|
||||
startpos = i+1
|
||||
end
|
||||
@@ -50,7 +52,21 @@ function stringifyKernel(filenameIn, filenameOut, kernelMethod)
|
||||
fw:write(";\n")
|
||||
fw:close()
|
||||
end
|
||||
|
||||
|
||||
|
||||
function preprocessKernel(kernelfile, filenameOut, kernelMethod)
|
||||
lcpp=require('stringifyKernel');
|
||||
local out=lcpp.compileFile(kernelfile);
|
||||
|
||||
local fw = io.open("tmp","w");
|
||||
fw:write(out)
|
||||
fw:close()
|
||||
stringifyKernel("tmp",filenameOut, kernelMethod);
|
||||
os.remove("tmp")
|
||||
end
|
||||
|
||||
|
||||
newoption {
|
||||
trigger = "kernelfile",
|
||||
value = "kernelpath",
|
||||
@@ -69,11 +85,781 @@ function stringifyKernel(filenameIn, filenameOut, kernelMethod)
|
||||
description = "name of the kernel string variable"
|
||||
}
|
||||
|
||||
|
||||
|
||||
newaction {
|
||||
trigger = "stringify",
|
||||
description = "stringify kernels source code into strings",
|
||||
execute = function ()
|
||||
stringifyKernel( _OPTIONS["kernelfile"] , _OPTIONS["headerfile"], _OPTIONS["stringname"])
|
||||
|
||||
execute = function()
|
||||
preprocessKernel( _OPTIONS["kernelfile"] , _OPTIONS["headerfile"], _OPTIONS["stringname"])
|
||||
end
|
||||
}
|
||||
|
||||
|
||||
----------------------------------------------------------------------------
|
||||
--## lcpp - a C-PreProcessor in Lua 5.1 for LuaJIT ffi
|
||||
--
|
||||
-- Copyright (C) 2012-2013 Michael Schmoock <michael@willigens.de>
|
||||
--
|
||||
--### Links
|
||||
-- * GitHub page: [http://github.com/willsteel/lcpp](http://github.com/willsteel/lcpp)
|
||||
-- * Project page: [http://lcpp.schmoock.net](http://lcpp.schmoock.net)
|
||||
-- * Lua: [http://www.lua.org](http://www.lua.org)
|
||||
-- * LuaJIT: [http://luajit.org](http://luajit.org)
|
||||
-- * Sponsored by: [http://mmbbq.org](http://mmbbq.org)
|
||||
--
|
||||
-- It can be used to pre-process LuaJIT ffi C header file input.
|
||||
-- It can also be used to preprocess any other code (i.e. Lua itself)
|
||||
--
|
||||
-- git clone https://github.com/willsteel/lcpp.git
|
||||
----------------------------------------------------------------------------
|
||||
--## USAGE
|
||||
-- -- load lcpp
|
||||
-- local lcpp = require("stringify")
|
||||
--
|
||||
-- -- use LuaJIT ffi and lcpp to parse cpp code
|
||||
-- ffi.cdef("#include <your_header.h>")
|
||||
--
|
||||
-- -- compile some input
|
||||
-- local out = lcpp.compile([[
|
||||
-- #include "myheader.h"
|
||||
-- #define MAXPATH 260
|
||||
-- typedef struct somestruct_t {
|
||||
-- void* base;
|
||||
-- size_t size;
|
||||
-- wchar_t path[MAXPATH];
|
||||
-- } t_exe;
|
||||
-- ]])
|
||||
--
|
||||
-- -- the result should be
|
||||
-- out = [[
|
||||
-- // <preprocessed content of file "myheader.h">
|
||||
-- typedef struct somestruct_t {
|
||||
-- void* base;
|
||||
-- size_t size;
|
||||
-- wchar_t path[260];
|
||||
-- } t_exe;
|
||||
-- ]]
|
||||
--
|
||||
--## This CPPs BNF:
|
||||
-- RULES:
|
||||
-- CODE := {LINE}
|
||||
-- LINE := {STUFF NEWML} STUFF NEWL
|
||||
-- STUFF := DIRECTIVE | IGNORED_CONTENT
|
||||
-- DIRECTIVE := OPTSPACES CMD OPTSPACES DIRECTIVE_NAME WHITESPACES DIRECTIVE_CONTENT WHITESPACES NEWL
|
||||
--
|
||||
-- LEAVES:
|
||||
-- NEWL := "\n"
|
||||
-- NEWL_ESC := "\\n"
|
||||
-- WHITESPACES := "[ \t]+"
|
||||
-- OPTSPACES := "[ \t]*"
|
||||
-- COMMENT := "//(.-)$"
|
||||
-- MLCOMMENT := "/[*](.-)[*]/"
|
||||
-- IGNORED_CONTENT := "[^#].*"
|
||||
-- CMD := "#"
|
||||
-- DIRECTIVE_NAME := "include"|"define"|"undef"|"if"|"else"|"elif"|"else if"|"endif"|"ifdef"|"ifndef"|"pragma"|"version"
|
||||
-- DIRECTIVE_CONTENT := ".*?"
|
||||
--
|
||||
--## TODOs:
|
||||
-- - lcpp.LCPP_LUA for: load, loadfile
|
||||
-- - "#" operator for stringification
|
||||
-- - literal concatenation: "foo" "bar" -> "foobar"
|
||||
--
|
||||
--## License (MIT)
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
-- of this software and associated documentation files (the "Software"), to deal
|
||||
-- in the Software without restriction, including without limitation the rights
|
||||
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
-- copies of the Software, and to permit persons to whom the Software is
|
||||
-- furnished to do so, subject to the following conditions:
|
||||
--
|
||||
-- The above copyright notice and this permission notice shall be included in
|
||||
-- all copies or substantial portions of the Software.
|
||||
--
|
||||
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
-- THE SOFTWARE.
|
||||
--
|
||||
-- MIT license: http://www.opensource.org/licenses/mit-license.php
|
||||
-- -----------------------------------------------------------------------------
|
||||
--
|
||||
-- @module lcpp
|
||||
local lcpp = {}
|
||||
|
||||
-- CONFIG
|
||||
lcpp.LCPP_LUA = false -- whether to use lcpp to preprocess Lua code (load, loadfile, loadstring...)
|
||||
lcpp.LCPP_FFI = true -- whether to use lcpp as LuaJIT ffi PreProcessor (if used in luaJIT)
|
||||
lcpp.LCPP_TEST = false -- whether to run lcpp unit tests when loading lcpp module
|
||||
lcpp.ENV = {} -- static predefines (env-like)
|
||||
lcpp.FAST = false -- perf. tweaks when enabled. con: breaks minor stuff like __LINE__ macros
|
||||
lcpp.DEBUG = false
|
||||
|
||||
-- PREDEFINES
|
||||
local __FILE__ = "__FILE__"
|
||||
local __LINE__ = "__LINE__"
|
||||
local __DATE__ = "__DATE__"
|
||||
local __TIME__ = "__TIME__"
|
||||
local __LCPP_INDENT__ = "__LCPP_INDENT__"
|
||||
|
||||
-- BNF LEAVES
|
||||
local ENDL = "$"
|
||||
local STARTL = "^"
|
||||
local NEWL = "\n"
|
||||
local NEWL_BYTE = NEWL:byte(1)
|
||||
local NEWL_ESC = "\\"
|
||||
local NEWML = "\\\n"
|
||||
local CMD = "#"
|
||||
local CMD_BYTE = CMD:byte(1)
|
||||
local COMMENT = "^(.-)//.-$"
|
||||
local MLCOMMENT = "/[*].-[*]/"
|
||||
local WHITESPACES = "%s+"
|
||||
local OPTSPACES = "%s*"
|
||||
local IDENTIFIER = "[_%a][_%w]*"
|
||||
local NOIDENTIFIER = "[^%w_]+"
|
||||
local FILENAME = "[0-9a-zA-Z.-_/\\]+"
|
||||
local TEXT = ".+"
|
||||
|
||||
-- BNF WORDS
|
||||
local _INCLUDE = "include"
|
||||
local _DEFINE = "define"
|
||||
local _IFDEF = "ifdef"
|
||||
local _IFNDEF = "ifndef"
|
||||
local _ENDIF = "endif"
|
||||
local _UNDEF = "undef"
|
||||
local _IF = "if"
|
||||
local _ELSE = "else"
|
||||
local _ELIF = "elif"
|
||||
local _NOT = "!"
|
||||
local _ERROR = "error"
|
||||
local _PRAGMA = "pragma"
|
||||
local _VERSION = "version"
|
||||
|
||||
-- BNF RULES
|
||||
local INCLUDE = STARTL.._INCLUDE..WHITESPACES.."[\"<]("..FILENAME..")[\">]"..OPTSPACES..ENDL
|
||||
local DEFINE = STARTL.._DEFINE
|
||||
local IFDEF = STARTL.._IFDEF..WHITESPACES.."("..IDENTIFIER..")"..OPTSPACES..ENDL
|
||||
local IFNDEF = STARTL.._IFNDEF..WHITESPACES.."("..IDENTIFIER..")"..OPTSPACES..ENDL
|
||||
local ENDIF = STARTL.._ENDIF..OPTSPACES..ENDL
|
||||
local UNDEF = STARTL.._UNDEF..WHITESPACES.."("..IDENTIFIER..")"..OPTSPACES..ENDL
|
||||
local IF = STARTL.._IF..WHITESPACES.."(.*)"..ENDL
|
||||
local ELSE = STARTL.._ELSE..OPTSPACES..ENDL
|
||||
local ELIF = STARTL.._ELIF..WHITESPACES.."(.*)"..ENDL
|
||||
local ELSEIF = STARTL.._ELSE..WHITESPACES.._IF..WHITESPACES.."(.*)"..ENDL
|
||||
local ERROR = STARTL.._ERROR..WHITESPACES.."("..TEXT..")"..OPTSPACES..ENDL
|
||||
local ERROR_NOTEXT = STARTL.._ERROR..OPTSPACES..ENDL --> not required when we have POSIX regex
|
||||
local PRAGMA = STARTL.._PRAGMA
|
||||
local VERSION = STARTL.._VERSION
|
||||
|
||||
|
||||
|
||||
-- speedups
|
||||
local TRUEMACRO = STARTL.."("..IDENTIFIER..")%s*$"
|
||||
local REPLMACRO = STARTL.."("..IDENTIFIER..")"..WHITESPACES.."(.+)$"
|
||||
local FUNCMACRO = STARTL.."("..IDENTIFIER..")%s*%(([%s%w,]*)%)%s*(.*)"
|
||||
|
||||
|
||||
-- ------------
|
||||
-- LOCAL UTILS
|
||||
-- ------------
|
||||
lcpp.STATE = {lineno = 0} -- current state for debugging the last operation
|
||||
local function error(msg) _G.print(debug.traceback()); _G.error(string.format("lcpp ERR [%04i] %s", lcpp.STATE.lineno, msg)) end
|
||||
local function print(msg) _G.print(string.format("lcpp INF [%04i] %s", lcpp.STATE.lineno, msg)) end
|
||||
|
||||
-- splits a string using a pattern into a table of substrings
|
||||
local function gsplit(str, pat)
|
||||
local function _split(str, pat)
|
||||
local t = {} -- NOTE: use {n = 0} in Lua-5.0
|
||||
local fpat = "(.-)"..pat
|
||||
local last_end = 1
|
||||
local s, e, cap = str:find(fpat, 1)
|
||||
while s do
|
||||
if s ~= 1 or cap ~= "" then
|
||||
coroutine.yield(cap)
|
||||
end
|
||||
last_end = e + 1
|
||||
s, e, cap = str:find(fpat, last_end)
|
||||
end
|
||||
if last_end <= #str then
|
||||
cap = str:sub(last_end)
|
||||
coroutine.yield(cap)
|
||||
end
|
||||
end
|
||||
return coroutine.wrap(function() _split(str, pat) end)
|
||||
end
|
||||
local function split(str, pat)
|
||||
local t = {}
|
||||
for str in gsplit(str, pat) do table.insert(t, str) end
|
||||
return t
|
||||
end
|
||||
|
||||
-- Checks whether a string starts with a given substring
|
||||
-- offset is optional
|
||||
local function strsw(str, pat, offset)
|
||||
if not str then return false end
|
||||
if not offset then offset = 0 end
|
||||
return string.sub(str, 1+offset, string.len(pat)+offset) == pat
|
||||
end
|
||||
|
||||
-- Checks whether a string ends with a given substring
|
||||
local function strew(str, pat)
|
||||
if not str then return false end
|
||||
return pat=='' or string.sub(str,-string.len(pat)) == pat
|
||||
end
|
||||
|
||||
-- string trim12 from lua wiki
|
||||
local function trim(str)
|
||||
local from = str:match"^%s*()"
|
||||
return from > #str and "" or str:match(".*%S", from)
|
||||
end
|
||||
|
||||
-- returns the number of string occurrences
|
||||
local function findn(input, what)
|
||||
local count = 0
|
||||
local offset = 0
|
||||
while true do
|
||||
_, offset = string.find(input, what, offset+1, true)
|
||||
if not offset then return count end
|
||||
count = count + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- a lightweight and flexible tokenizer
|
||||
local function _tokenizer(str, setup)
|
||||
local defsetup = {
|
||||
-- EXAMPLE patterns have to be pretended with "^" for the tokenizer
|
||||
["identifier"] = '^[_%a][_%w]*',
|
||||
["number"] = '^[%+%-]?%d+[%.]?%d*',
|
||||
["ignore"] = '^%s+',
|
||||
["string"] = true,
|
||||
["keywords"] = {
|
||||
-- ["NAME"] = '^pattern',
|
||||
-- ...
|
||||
},
|
||||
}
|
||||
if not setup then
|
||||
setup = defsetup
|
||||
end
|
||||
setup.identifier = setup.identifier or defsetup.identifier
|
||||
setup.number = setup.number or defsetup.number
|
||||
setup.ignore = setup.number or defsetup.ignore
|
||||
if nil == setup.string then setup.string = true end
|
||||
setup.keywords = setup.keywords or {}
|
||||
|
||||
local strlen = #str
|
||||
local i = 1
|
||||
local i1, i2
|
||||
local keyword
|
||||
|
||||
local function find(pat)
|
||||
i1, i2 = str:find(pat,i)
|
||||
return i1 ~= nil
|
||||
end
|
||||
|
||||
local function cut()
|
||||
return str:sub(i, i2)
|
||||
end
|
||||
|
||||
local function findKeyword()
|
||||
for name, pat in pairs(setup.keywords) do
|
||||
local result = find(pat)
|
||||
if result then
|
||||
keyword = name
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
while true do
|
||||
if i > strlen then return 'eof', nil, strlen, strlen end
|
||||
if find(setup.ignore) then
|
||||
coroutine.yield("ignore", cut(), i1, i2)
|
||||
elseif findKeyword() then
|
||||
coroutine.yield(keyword, cut(), i1, i2)
|
||||
elseif find(setup.number) then
|
||||
coroutine.yield('number', tonumber(cut()), i1, i2)
|
||||
elseif find(setup.identifier) then
|
||||
coroutine.yield('identifier', cut(), i1, i2)
|
||||
elseif setup.string and (find('^"[^"]*"') or find("^'[^']*'")) then
|
||||
-- strip the quotes
|
||||
coroutine.yield('string', cut():sub(2,-2), i1, i2)
|
||||
else -- any other unknown character
|
||||
i1 = i
|
||||
i2 = i
|
||||
coroutine.yield('unknown', cut(), i1, i2)
|
||||
end
|
||||
i = i2+1
|
||||
end
|
||||
end
|
||||
local function tokenizer(str, setup)
|
||||
return coroutine.wrap(function() _tokenizer(str, setup) end)
|
||||
end
|
||||
|
||||
|
||||
-- ------------
|
||||
-- PARSER
|
||||
-- ------------
|
||||
|
||||
|
||||
-- screener: revmoce comments, trim, ml concat...
|
||||
-- it only splits to cpp input lines and removes comments. it does not tokenize.
|
||||
local function screener(input)
|
||||
local function _screener(input)
|
||||
|
||||
|
||||
-- concat mulit-line input.
|
||||
local count = 1
|
||||
while count > 0 do input, count = string.gsub(input, "^(.-)\\\n(.-)$", "%1 %2\n") end
|
||||
|
||||
-- trim and join blocks not starting with "#"
|
||||
local buffer = {}
|
||||
for line in gsplit(input, NEWL) do
|
||||
--line = trim(line)
|
||||
if #line > 0 then
|
||||
if line:byte(1) == CMD_BYTE then
|
||||
--line = line:gsub("#%s*(.*)", "#%1") -- remove optinal whitespaces after "#". reduce triming later.
|
||||
if #buffer > 0 then
|
||||
coroutine.yield(table.concat(buffer, NEWL))
|
||||
buffer = {}
|
||||
end
|
||||
coroutine.yield(line)
|
||||
else
|
||||
if lcpp.FAST then
|
||||
table.insert(buffer, line)
|
||||
else
|
||||
coroutine.yield(line)
|
||||
end
|
||||
end
|
||||
elseif not lcpp.FAST then
|
||||
coroutine.yield(line)
|
||||
end
|
||||
end
|
||||
if #buffer > 0 then
|
||||
coroutine.yield(table.concat(buffer, NEWL))
|
||||
end
|
||||
end
|
||||
|
||||
return coroutine.wrap(function() _screener(input) end)
|
||||
end
|
||||
|
||||
-- apply currently known macros to input (and returns it)
|
||||
local function apply(state, input)
|
||||
local out = {}
|
||||
local functions = {}
|
||||
|
||||
for k, v, start, end_ in tokenizer(input) do
|
||||
if k == "identifier" then
|
||||
local repl = v
|
||||
local macro = state.defines[v]
|
||||
if macro then
|
||||
if type(macro) == "boolean" then
|
||||
repl = ""
|
||||
elseif type(macro) == "string" then
|
||||
repl = macro
|
||||
elseif type(macro) == "number" then
|
||||
repl = tostring(macro)
|
||||
elseif type(macro) == "function" then
|
||||
table.insert(functions, macro) -- we apply functions in a later step
|
||||
end
|
||||
end
|
||||
table.insert(out, repl)
|
||||
else
|
||||
table.insert(out, input:sub(start, end_))
|
||||
end
|
||||
end
|
||||
input = table.concat(out)
|
||||
for _, func in pairs(functions) do -- TODO: looks sucky (but works quite nice)
|
||||
input = func(input)
|
||||
end
|
||||
|
||||
return input
|
||||
end
|
||||
|
||||
-- processes an input line. called from lcpp doWork loop
|
||||
local function processLine(state, line)
|
||||
if not line or #line == 0 then return line end
|
||||
local cmd = nil
|
||||
if line:byte(1) == CMD_BYTE then cmd = line:sub(2) end
|
||||
--print("processLine(): "..line)
|
||||
|
||||
|
||||
--[[ SKIPPING ]]--
|
||||
if state:skip() then return end
|
||||
|
||||
|
||||
--[[ READ NEW DIRECTIVES ]]--
|
||||
if cmd then
|
||||
-- handle #include ...
|
||||
local filename = cmd:match(INCLUDE)
|
||||
if filename then
|
||||
return state:includeFile(filename)
|
||||
end
|
||||
|
||||
-- ignore all directives, except #include
|
||||
return line
|
||||
|
||||
end
|
||||
|
||||
|
||||
--[[ APPLY MACROS ]]--
|
||||
line = state:apply(line);
|
||||
|
||||
return line
|
||||
end
|
||||
|
||||
local function doWork(state)
|
||||
local function _doWork(state)
|
||||
if not state:defined(__FILE__) then state:define(__FILE__, "<USER_CHUNK>", true) end
|
||||
local oldIndent = state:getIndent()
|
||||
while true do
|
||||
local input = state:getLine()
|
||||
if not input then break end
|
||||
local output = processLine(state, input)
|
||||
if not lcpp.FAST and not output then output = "" end -- output empty skipped lines
|
||||
if lcpp.DEBUG then output = output.." -- "..input end -- input as comment when DEBUG
|
||||
if output then coroutine.yield(output) end
|
||||
end
|
||||
if (oldIndent ~= state:getIndent()) then error("indentation level must be balanced within a file. was:"..oldIndent.." is:"..state:getIndent()) end
|
||||
end
|
||||
return coroutine.wrap(function() _doWork(state) end)
|
||||
end
|
||||
|
||||
local function includeFile(state, filename)
|
||||
local result, result_state = lcpp.compileFile(filename, state.defines)
|
||||
-- now, we take the define table of the sub file for further processing
|
||||
state.defines = result_state.defines
|
||||
-- and return the compiled result
|
||||
return result
|
||||
end
|
||||
|
||||
-- sets a global define
|
||||
local function define(state, key, value, override)
|
||||
--print("define:"..key.." type:"..type(value))
|
||||
--if value and not override and state:defined(key) then error("already defined: "..key) end
|
||||
value = state:prepareMacro(value)
|
||||
state.defines[key] = value
|
||||
end
|
||||
|
||||
-- parses CPP exressions
|
||||
-- i.e.: #if !defined(_UNICODE) && !defined(UNICODE)
|
||||
--
|
||||
--BNF:
|
||||
-- EXPR -> (BRACKET_OPEN)(EXPR)(BRACKET_CLOSE)
|
||||
-- EXPR -> (EXPR)(OR)(EXPR)
|
||||
-- EXPR -> (EXPR)(AND)(EXPR)
|
||||
-- EXPR -> (NOT)(EXPR)
|
||||
-- EXPR -> (FUNCTION)
|
||||
-- FUNCTION -> (IDENTIFIER)(BRACKET_OPEN)(ARGS)(BRACKET_CLOSE)
|
||||
-- ARGS -> ((IDENTIFIER)[(COMMA)(IDENTIFIER)])?
|
||||
--LEAVES:
|
||||
-- IGNORE -> " \t"
|
||||
-- BRACKET_OPEN -> "("
|
||||
-- BRACKET_CLOSE -> ")"
|
||||
-- OR -> "||"
|
||||
-- AND -> "&&"
|
||||
-- NOT -> "!"
|
||||
-- IDENTIFIER -> "[0-9a-zA-Z_]"
|
||||
--
|
||||
|
||||
local LCPP_TOKENIZE_MACRO = {
|
||||
string = true,
|
||||
keywords = {
|
||||
CONCAT = "^##",
|
||||
},
|
||||
}
|
||||
local LCPP_TOKENIZE_EXPR = {
|
||||
string = false,
|
||||
keywords = {
|
||||
NOT = '^!',
|
||||
DEFINED = '^defined',
|
||||
BROPEN = '^[(]',
|
||||
BRCLOSE = '^[)]',
|
||||
AND = '^&&',
|
||||
OR = '^||',
|
||||
},
|
||||
}
|
||||
|
||||
local function parseDefined(state, input)
|
||||
local result = false
|
||||
local bropen = false
|
||||
local brclose = false
|
||||
local ident = nil
|
||||
|
||||
for key, value in input do
|
||||
if key == "BROPEN" then
|
||||
bropen = true
|
||||
end
|
||||
if key == "identifier" then
|
||||
ident = value
|
||||
if not bropen then break end
|
||||
end
|
||||
if key == "BRCLOSE" and ident then
|
||||
brclose = true
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
-- wiht and w/o brackets allowed
|
||||
if ident and ((bropen and brclose) or (not bropen and not brclose)) then
|
||||
return state:defined(ident)
|
||||
end
|
||||
|
||||
error("expression parse error: defined(ident)")
|
||||
end
|
||||
|
||||
local function parseExpr(state, input)
|
||||
-- first call gets string input. rest uses tokenizer
|
||||
if type(input) == "string" then input = tokenizer(input, LCPP_TOKENIZE_EXPR) end
|
||||
local result = false
|
||||
local _not = false
|
||||
|
||||
for type, value in input do
|
||||
-- print("type:"..type.." value:"..value)
|
||||
if type == "NOT" then
|
||||
_not = true
|
||||
end
|
||||
if type == "BROPEN" then
|
||||
return state:parseExpr(input)
|
||||
end
|
||||
if type == "BRCLOSE" then
|
||||
return result
|
||||
end
|
||||
if type == "AND" then
|
||||
return result and state:parseExpr(input)
|
||||
end
|
||||
if type == "OR" then
|
||||
return result or state:parseExpr(input)
|
||||
end
|
||||
|
||||
if type == "DEFINED" then
|
||||
if _not then
|
||||
result = not parseDefined(state, input)
|
||||
else
|
||||
result = parseDefined(state, input)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- apply macros chaining and string ops "##" and "#"
|
||||
local function prepareMacro(state, input)
|
||||
if type(input) ~= "string" then return input end
|
||||
input = state:apply(input)
|
||||
local out = {}
|
||||
for k, v, start, end_ in tokenizer(input, LCPP_TOKENIZE_MACRO) do
|
||||
if k == "CONCAT" then
|
||||
-- remove concat op "##"
|
||||
else
|
||||
table.insert(out, input:sub(start, end_))
|
||||
end
|
||||
end
|
||||
return table.concat(out)
|
||||
end
|
||||
|
||||
-- i.e.: "MAX(x, y) (((x) > (y)) ? (x) : (y))"
|
||||
local function parseFunction(state, input)
|
||||
if not input then return end
|
||||
local name, argsstr, repl = input:match(FUNCMACRO)
|
||||
if not name or not argsstr or not repl then return end
|
||||
repl = state:prepareMacro(repl)
|
||||
|
||||
-- rename args to %1,%2... for later gsub
|
||||
local noargs = 0
|
||||
for argname in argsstr:gmatch(IDENTIFIER) do
|
||||
noargs = noargs + 1
|
||||
repl = repl:gsub(argname, "%%"..noargs)
|
||||
end
|
||||
|
||||
-- build pattern string: name(arg, arg, ...)
|
||||
local pattern
|
||||
if noargs == 0 then pattern = name.."%s*%(%s*%)" -- quick 0 arg version
|
||||
elseif noargs == 1 then pattern = name.."%s*%(%s*([^,%)]*)%s*%)" -- quick 1 arg version
|
||||
elseif noargs == 2 then pattern = name.."%s*%(%s*([^,%)]*)%s*,%s*([^,%)]*)%s*%)" -- quick 2 arg version
|
||||
else -- arbitrary arg version
|
||||
local buf = {}
|
||||
table.insert(buf, name)
|
||||
table.insert(buf, "%s*%(%s*")
|
||||
for i = 1, noargs do
|
||||
table.insert(buf, "([^,%)]*)%s*")
|
||||
if i < noargs then
|
||||
table.insert(buf, ",%s*")
|
||||
end
|
||||
end
|
||||
table.insert(buf, "%)")
|
||||
pattern = table.concat(buf)
|
||||
end
|
||||
|
||||
-- build macro funcion
|
||||
local func = function(input)
|
||||
return input:gsub(pattern, repl)
|
||||
end
|
||||
|
||||
return name, func
|
||||
end
|
||||
|
||||
|
||||
-- ------------
|
||||
-- LCPP INTERFACE
|
||||
-- ------------
|
||||
|
||||
--- initialies a lcpp state. not needed manually. handy for testing
|
||||
function lcpp.init(input, predefines)
|
||||
-- create sate var
|
||||
local state = {} -- init the state object
|
||||
state.defines = {} -- the table of known defines and replacements
|
||||
state.screener = screener(input)
|
||||
state.lineno = 0 -- the current line number
|
||||
state.stack = {} -- stores wether the current stack level is to be included
|
||||
state.once = {} -- stack level was once true (first if that evals to true)
|
||||
|
||||
-- funcs
|
||||
state.define = define
|
||||
state.undefine = function(state, key)
|
||||
state:define(key, nil)
|
||||
end
|
||||
state.defined = function(state, key)
|
||||
return state.defines[key] ~= nil
|
||||
end
|
||||
state.apply = apply
|
||||
state.includeFile = includeFile
|
||||
state.doWork = doWork
|
||||
state.getIndent = function(state)
|
||||
return #state.stack
|
||||
end
|
||||
state.openBlock = function(state, bool)
|
||||
state.stack[#state.stack+1] = bool
|
||||
state.once [#state.once+1] = bool
|
||||
state:define(__LCPP_INDENT__, state:getIndent(), true)
|
||||
end
|
||||
state.elseBlock = function(state, bool)
|
||||
if state.once[#state.once] then
|
||||
state.stack[#state.stack] = false
|
||||
else
|
||||
state.stack[#state.stack] = bool
|
||||
if bool then state.once[#state.once] = true end
|
||||
end
|
||||
end
|
||||
state.closeBlock = function(state)
|
||||
state.stack[#state.stack] = nil
|
||||
state.once [#state.once] = nil
|
||||
state:define(__LCPP_INDENT__, state:getIndent(), true)
|
||||
if state:getIndent() < 0 then error("Unopened block detected. Indentaion problem.") end
|
||||
end
|
||||
state.skip = function(state)
|
||||
for i = 1, #state.stack do
|
||||
if not state.stack[i] then return true end
|
||||
end
|
||||
return false
|
||||
end
|
||||
state.getLine = function(state)
|
||||
state.lineno = state.lineno + 1
|
||||
state:define(__LINE__, state.lineno, true)
|
||||
return state.screener()
|
||||
end
|
||||
state.prepareMacro = prepareMacro
|
||||
state.parseExpr = parseExpr
|
||||
state.parseFunction = parseFunction
|
||||
|
||||
-- predefines
|
||||
state:define(__DATE__, os.date("%B %d %Y"), true)
|
||||
state:define(__TIME__, os.date("%H:%M:%S"), true)
|
||||
state:define(__LINE__, state.lineno, true)
|
||||
state:define(__LCPP_INDENT__, state:getIndent(), true)
|
||||
predefines = predefines or {}
|
||||
for k,v in pairs(lcpp.ENV) do state:define(k, v, true) end -- static ones
|
||||
for k,v in pairs(predefines) do state:define(k, v, true) end
|
||||
|
||||
if lcpp.LCPP_TEST then lcpp.STATE = state end -- activate static state debugging
|
||||
|
||||
return state
|
||||
end
|
||||
|
||||
--- the preprocessors main function.
|
||||
-- returns the preprocessed output as a string.
|
||||
-- @param code data as string
|
||||
-- @param predefines OPTIONAL a table of predefined variables
|
||||
-- @usage lcpp.compile("#define bar 0x1337\nstatic const int foo = bar;")
|
||||
-- @usage lcpp.compile("#define bar 0x1337\nstatic const int foo = bar;", {["bar"] = "0x1338"})
|
||||
function lcpp.compile(code, predefines)
|
||||
local state = lcpp.init(code, predefines)
|
||||
local buf = {}
|
||||
for output in state:doWork() do
|
||||
table.insert(buf, output)
|
||||
end
|
||||
local output = table.concat(buf, NEWL)
|
||||
if lcpp.DEBUG then print(output) end
|
||||
return output, state
|
||||
end
|
||||
|
||||
--- preprocesses a file
|
||||
-- @param filename the file to read
|
||||
-- @param predefines OPTIONAL a table of predefined variables
|
||||
-- @usage out, state = lcpp.compileFile("../odbg/plugin.h", {["MAXPATH"]=260, ["UNICODE"]=true})
|
||||
function lcpp.compileFile(filename, predefines)
|
||||
if not filename then error("processFile() arg1 has to be a string") end
|
||||
local file = io.open(filename, 'r')
|
||||
if not file then error("file not found: "..filename) end
|
||||
local code = file:read('*a')
|
||||
predefines = predefines or {}
|
||||
predefines[__FILE__] = filename
|
||||
return lcpp.compile(code, predefines)
|
||||
end
|
||||
|
||||
|
||||
|
||||
-- ------------
|
||||
-- REGISTER LCPP
|
||||
-- ------------
|
||||
|
||||
--- disable lcpp processing for ffi, loadstring and such
|
||||
lcpp.disable = function()
|
||||
if lcpp.LCPP_LUA then
|
||||
-- activate LCPP_LUA actually does anything useful
|
||||
-- _G.loadstring = _G.loadstring_lcpp_backup
|
||||
end
|
||||
|
||||
if lcpp.LCPP_FFI and pcall(require, "ffi") then
|
||||
ffi = require("ffi")
|
||||
if ffi.lcpp_cdef_backup then
|
||||
ffi.cdef = ffi.lcpp_cdef_backup
|
||||
ffi.lcpp_cdef_backup = nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- (re)enable lcpp processing for ffi, loadstring and such
|
||||
lcpp.enable = function()
|
||||
-- Use LCPP to process Lua code (load, loadfile, loadstring...)
|
||||
if lcpp.LCPP_LUA then
|
||||
-- TODO: make it properly work on all functions
|
||||
error("lcpp.LCPP_LUA = true -- not properly implemented yet");
|
||||
_G.loadstring_lcpp_backup = _G.loadstring
|
||||
_G.loadstring = function(str, chunk)
|
||||
return loadstring_lcpp_backup(lcpp.compile(str), chunk)
|
||||
end
|
||||
end
|
||||
-- Use LCPP as LuaJIT PreProcessor if used inside LuaJIT. i.e. Hook ffi.cdef
|
||||
if lcpp.LCPP_FFI and pcall(require, "ffi") then
|
||||
ffi = require("ffi")
|
||||
if not ffi.lcpp_cdef_backup then
|
||||
if not ffi.lcpp_defs then ffi.lcpp_defs = {} end -- defs are stored and reused
|
||||
ffi.lcpp = function(input)
|
||||
local output, state = lcpp.compile(input, ffi.lcpp_defs)
|
||||
ffi.lcpp_defs = state.defines
|
||||
return output
|
||||
end
|
||||
ffi.lcpp_cdef_backup = ffi.cdef
|
||||
ffi.cdef = function(input) return ffi.lcpp_cdef_backup(ffi.lcpp(input)) end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
lcpp.enable()
|
||||
return lcpp
|
||||
|
||||
|
||||
@@ -355,5 +355,4 @@ static const char* sapCL= \
|
||||
" sum[i]=s;\n"
|
||||
" sum2[i]=s*s; \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
305
src/Bullet3OpenCL/NarrowphaseCollision/b3ContactCache.cpp
Normal file
305
src/Bullet3OpenCL/NarrowphaseCollision/b3ContactCache.cpp
Normal file
@@ -0,0 +1,305 @@
|
||||
#if 0
|
||||
/*
|
||||
Bullet Continuous Collision Detection and Physics Library
|
||||
Copyright (c) 2003-2006 Erwin Coumans http://continuousphysics.com/Bullet/
|
||||
|
||||
This software is provided 'as-is', without any express or implied warranty.
|
||||
In no event will the authors be held liable for any damages arising from the use of this software.
|
||||
Permission is granted to anyone to use this software for any purpose,
|
||||
including commercial applications, and to alter it and redistribute it freely,
|
||||
subject to the following restrictions:
|
||||
|
||||
1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
|
||||
2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
|
||||
3. This notice may not be removed or altered from any source distribution.
|
||||
*/
|
||||
|
||||
|
||||
#include "b3ContactCache.h"
|
||||
#include "Bullet3Common/b3Transform.h"
|
||||
|
||||
|
||||
b3Scalar gContactBreakingThreshold = b3Scalar(0.02);
|
||||
b3Scalar m_contactBreakingThreshold;
|
||||
b3Scalar m_contactProcessingThreshold;
|
||||
|
||||
///gContactCalcArea3Points will approximate the convex hull area using 3 points
|
||||
///when setting it to false, it will use 4 points to compute the area: it is more accurate but slower
|
||||
bool gContactCalcArea3Points = true;
|
||||
|
||||
|
||||
b3ContactCache::b3ContactCache()
|
||||
:m_index1a(0)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
#include <stdio.h>
|
||||
void b3ContactCache::DebugPersistency()
|
||||
{
|
||||
int i;
|
||||
printf("DebugPersistency : numPoints %d\n",m_cachedPoints);
|
||||
for (i=0;i<m_cachedPoints;i++)
|
||||
{
|
||||
printf("m_pointCache[%d].m_userPersistentData = %x\n",i,m_pointCache[i].m_userPersistentData);
|
||||
}
|
||||
}
|
||||
#endif //DEBUG_PERSISTENCY
|
||||
|
||||
void b3ContactCache::clearUserCache(btManifoldPoint& pt)
|
||||
{
|
||||
|
||||
void* oldPtr = pt.m_userPersistentData;
|
||||
if (oldPtr)
|
||||
{
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
int i;
|
||||
int occurance = 0;
|
||||
for (i=0;i<m_cachedPoints;i++)
|
||||
{
|
||||
if (m_pointCache[i].m_userPersistentData == oldPtr)
|
||||
{
|
||||
occurance++;
|
||||
if (occurance>1)
|
||||
printf("error in clearUserCache\n");
|
||||
}
|
||||
}
|
||||
btAssert(occurance<=0);
|
||||
#endif //DEBUG_PERSISTENCY
|
||||
|
||||
if (pt.m_userPersistentData && gContactDestroyedCallback)
|
||||
{
|
||||
(*gContactDestroyedCallback)(pt.m_userPersistentData);
|
||||
pt.m_userPersistentData = 0;
|
||||
}
|
||||
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
DebugPersistency();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
static inline b3Scalar calcArea4Points(const btVector3 &p0,const btVector3 &p1,const btVector3 &p2,const btVector3 &p3)
|
||||
{
|
||||
// It calculates possible 3 area constructed from random 4 points and returns the biggest one.
|
||||
|
||||
btVector3 a[3],b[3];
|
||||
a[0] = p0 - p1;
|
||||
a[1] = p0 - p2;
|
||||
a[2] = p0 - p3;
|
||||
b[0] = p2 - p3;
|
||||
b[1] = p1 - p3;
|
||||
b[2] = p1 - p2;
|
||||
|
||||
//todo: Following 3 cross production can be easily optimized by SIMD.
|
||||
btVector3 tmp0 = a[0].cross(b[0]);
|
||||
btVector3 tmp1 = a[1].cross(b[1]);
|
||||
btVector3 tmp2 = a[2].cross(b[2]);
|
||||
|
||||
return btMax(btMax(tmp0.length2(),tmp1.length2()),tmp2.length2());
|
||||
}
|
||||
|
||||
int b3ContactCache::sortCachedPoints(const btManifoldPoint& pt)
|
||||
{
|
||||
//calculate 4 possible cases areas, and take biggest area
|
||||
//also need to keep 'deepest'
|
||||
|
||||
int maxPenetrationIndex = -1;
|
||||
#define KEEP_DEEPEST_POINT 1
|
||||
#ifdef KEEP_DEEPEST_POINT
|
||||
b3Scalar maxPenetration = pt.getDistance();
|
||||
for (int i=0;i<4;i++)
|
||||
{
|
||||
if (m_pointCache[i].getDistance() < maxPenetration)
|
||||
{
|
||||
maxPenetrationIndex = i;
|
||||
maxPenetration = m_pointCache[i].getDistance();
|
||||
}
|
||||
}
|
||||
#endif //KEEP_DEEPEST_POINT
|
||||
|
||||
b3Scalar res0(b3Scalar(0.)),res1(b3Scalar(0.)),res2(b3Scalar(0.)),res3(b3Scalar(0.));
|
||||
|
||||
if (gContactCalcArea3Points)
|
||||
{
|
||||
if (maxPenetrationIndex != 0)
|
||||
{
|
||||
btVector3 a0 = pt.m_localPointA-m_pointCache[1].m_localPointA;
|
||||
btVector3 b0 = m_pointCache[3].m_localPointA-m_pointCache[2].m_localPointA;
|
||||
btVector3 cross = a0.cross(b0);
|
||||
res0 = cross.length2();
|
||||
}
|
||||
if (maxPenetrationIndex != 1)
|
||||
{
|
||||
btVector3 a1 = pt.m_localPointA-m_pointCache[0].m_localPointA;
|
||||
btVector3 b1 = m_pointCache[3].m_localPointA-m_pointCache[2].m_localPointA;
|
||||
btVector3 cross = a1.cross(b1);
|
||||
res1 = cross.length2();
|
||||
}
|
||||
|
||||
if (maxPenetrationIndex != 2)
|
||||
{
|
||||
btVector3 a2 = pt.m_localPointA-m_pointCache[0].m_localPointA;
|
||||
btVector3 b2 = m_pointCache[3].m_localPointA-m_pointCache[1].m_localPointA;
|
||||
btVector3 cross = a2.cross(b2);
|
||||
res2 = cross.length2();
|
||||
}
|
||||
|
||||
if (maxPenetrationIndex != 3)
|
||||
{
|
||||
btVector3 a3 = pt.m_localPointA-m_pointCache[0].m_localPointA;
|
||||
btVector3 b3 = m_pointCache[2].m_localPointA-m_pointCache[1].m_localPointA;
|
||||
btVector3 cross = a3.cross(b3);
|
||||
res3 = cross.length2();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(maxPenetrationIndex != 0) {
|
||||
res0 = calcArea4Points(pt.m_localPointA,m_pointCache[1].m_localPointA,m_pointCache[2].m_localPointA,m_pointCache[3].m_localPointA);
|
||||
}
|
||||
|
||||
if(maxPenetrationIndex != 1) {
|
||||
res1 = calcArea4Points(pt.m_localPointA,m_pointCache[0].m_localPointA,m_pointCache[2].m_localPointA,m_pointCache[3].m_localPointA);
|
||||
}
|
||||
|
||||
if(maxPenetrationIndex != 2) {
|
||||
res2 = calcArea4Points(pt.m_localPointA,m_pointCache[0].m_localPointA,m_pointCache[1].m_localPointA,m_pointCache[3].m_localPointA);
|
||||
}
|
||||
|
||||
if(maxPenetrationIndex != 3) {
|
||||
res3 = calcArea4Points(pt.m_localPointA,m_pointCache[0].m_localPointA,m_pointCache[1].m_localPointA,m_pointCache[2].m_localPointA);
|
||||
}
|
||||
}
|
||||
btVector4 maxvec(res0,res1,res2,res3);
|
||||
int biggestarea = maxvec.closestAxis4();
|
||||
return biggestarea;
|
||||
|
||||
}
|
||||
|
||||
|
||||
int b3ContactCache::getCacheEntry(const btManifoldPoint& newPoint) const
|
||||
{
|
||||
b3Scalar shortestDist = getContactBreakingThreshold() * getContactBreakingThreshold();
|
||||
int size = getNumContacts();
|
||||
int nearestPoint = -1;
|
||||
for( int i = 0; i < size; i++ )
|
||||
{
|
||||
const btManifoldPoint &mp = m_pointCache[i];
|
||||
|
||||
btVector3 diffA = mp.m_localPointA- newPoint.m_localPointA;
|
||||
const b3Scalar distToManiPoint = diffA.dot(diffA);
|
||||
if( distToManiPoint < shortestDist )
|
||||
{
|
||||
shortestDist = distToManiPoint;
|
||||
nearestPoint = i;
|
||||
}
|
||||
}
|
||||
return nearestPoint;
|
||||
}
|
||||
|
||||
int b3ContactCache::addManifoldPoint(const btManifoldPoint& newPoint, bool isPredictive)
|
||||
{
|
||||
if (!isPredictive)
|
||||
{
|
||||
btAssert(validContactDistance(newPoint));
|
||||
}
|
||||
|
||||
int insertIndex = getNumContacts();
|
||||
if (insertIndex == MANIFOLD_CACHE_SIZE)
|
||||
{
|
||||
#if MANIFOLD_CACHE_SIZE >= 4
|
||||
//sort cache so best points come first, based on area
|
||||
insertIndex = sortCachedPoints(newPoint);
|
||||
#else
|
||||
insertIndex = 0;
|
||||
#endif
|
||||
clearUserCache(m_pointCache[insertIndex]);
|
||||
|
||||
} else
|
||||
{
|
||||
m_cachedPoints++;
|
||||
|
||||
|
||||
}
|
||||
if (insertIndex<0)
|
||||
insertIndex=0;
|
||||
|
||||
btAssert(m_pointCache[insertIndex].m_userPersistentData==0);
|
||||
m_pointCache[insertIndex] = newPoint;
|
||||
return insertIndex;
|
||||
}
|
||||
|
||||
b3Scalar b3ContactCache::getContactBreakingThreshold() const
|
||||
{
|
||||
return m_contactBreakingThreshold;
|
||||
}
|
||||
|
||||
|
||||
|
||||
void b3ContactCache::refreshContactPoints(const btTransform& trA,const btTransform& trB)
|
||||
{
|
||||
int i;
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
printf("refreshContactPoints posA = (%f,%f,%f) posB = (%f,%f,%f)\n",
|
||||
trA.getOrigin().getX(),
|
||||
trA.getOrigin().getY(),
|
||||
trA.getOrigin().getZ(),
|
||||
trB.getOrigin().getX(),
|
||||
trB.getOrigin().getY(),
|
||||
trB.getOrigin().getZ());
|
||||
#endif //DEBUG_PERSISTENCY
|
||||
/// first refresh worldspace positions and distance
|
||||
for (i=getNumContacts()-1;i>=0;i--)
|
||||
{
|
||||
btManifoldPoint &manifoldPoint = m_pointCache[i];
|
||||
manifoldPoint.m_positionWorldOnA = trA( manifoldPoint.m_localPointA );
|
||||
manifoldPoint.m_positionWorldOnB = trB( manifoldPoint.m_localPointB );
|
||||
manifoldPoint.m_distance1 = (manifoldPoint.m_positionWorldOnA - manifoldPoint.m_positionWorldOnB).dot(manifoldPoint.m_normalWorldOnB);
|
||||
manifoldPoint.m_lifeTime++;
|
||||
}
|
||||
|
||||
/// then
|
||||
b3Scalar distance2d;
|
||||
btVector3 projectedDifference,projectedPoint;
|
||||
for (i=getNumContacts()-1;i>=0;i--)
|
||||
{
|
||||
|
||||
btManifoldPoint &manifoldPoint = m_pointCache[i];
|
||||
//contact becomes invalid when signed distance exceeds margin (projected on contactnormal direction)
|
||||
if (!validContactDistance(manifoldPoint))
|
||||
{
|
||||
removeContactPoint(i);
|
||||
} else
|
||||
{
|
||||
//contact also becomes invalid when relative movement orthogonal to normal exceeds margin
|
||||
projectedPoint = manifoldPoint.m_positionWorldOnA - manifoldPoint.m_normalWorldOnB * manifoldPoint.m_distance1;
|
||||
projectedDifference = manifoldPoint.m_positionWorldOnB - projectedPoint;
|
||||
distance2d = projectedDifference.dot(projectedDifference);
|
||||
if (distance2d > getContactBreakingThreshold()*getContactBreakingThreshold() )
|
||||
{
|
||||
removeContactPoint(i);
|
||||
} else
|
||||
{
|
||||
//contact point processed callback
|
||||
if (gContactProcessedCallback)
|
||||
(*gContactProcessedCallback)(manifoldPoint,(void*)m_body0,(void*)m_body1);
|
||||
}
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
DebugPersistency();
|
||||
#endif //
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
216
src/Bullet3OpenCL/NarrowphaseCollision/b3ContactCache.h
Normal file
216
src/Bullet3OpenCL/NarrowphaseCollision/b3ContactCache.h
Normal file
@@ -0,0 +1,216 @@
|
||||
/*
|
||||
Bullet Continuous Collision Detection and Physics Library
|
||||
Copyright (c) 2003-2006 Erwin Coumans http://continuousphysics.com/Bullet/
|
||||
|
||||
This software is provided 'as-is', without any express or implied warranty.
|
||||
In no event will the authors be held liable for any damages arising from the use of this software.
|
||||
Permission is granted to anyone to use this software for any purpose,
|
||||
including commercial applications, and to alter it and redistribute it freely,
|
||||
subject to the following restrictions:
|
||||
|
||||
1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
|
||||
2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
|
||||
3. This notice may not be removed or altered from any source distribution.
|
||||
*/
|
||||
|
||||
#ifndef B3_CONTACT_CACHE_H
|
||||
#define B3_CONTACT_CACHE_H
|
||||
|
||||
|
||||
#include "LinearMath/btVector3.h"
|
||||
#include "LinearMath/btTransform.h"
|
||||
#include "btManifoldPoint.h"
|
||||
class btCollisionObject;
|
||||
#include "LinearMath/btAlignedAllocator.h"
|
||||
|
||||
struct btCollisionResult;
|
||||
|
||||
///maximum contact breaking and merging threshold
|
||||
extern b3Scalar gContactBreakingThreshold;
|
||||
|
||||
//the enum starts at 1024 to avoid type conflicts with btTypedConstraint
|
||||
enum btContactManifoldTypes
|
||||
{
|
||||
MIN_CONTACT_MANIFOLD_TYPE = 1024,
|
||||
BT_PERSISTENT_MANIFOLD_TYPE
|
||||
};
|
||||
|
||||
#define MANIFOLD_CACHE_SIZE 4
|
||||
|
||||
///b3ContactCache is a contact point cache, it stays persistent as long as objects are overlapping in the broadphase.
|
||||
///Those contact points are created by the collision narrow phase.
|
||||
///The cache can be empty, or hold 1,2,3 or 4 points. Some collision algorithms (GJK) might only add one point at a time.
|
||||
///updates/refreshes old contact points, and throw them away if necessary (distance becomes too large)
|
||||
///reduces the cache to 4 points, when more then 4 points are added, using following rules:
|
||||
///the contact point with deepest penetration is always kept, and it tries to maximuze the area covered by the points
|
||||
///note that some pairs of objects might have more then one contact manifold.
|
||||
|
||||
|
||||
B3_ATTRIBUTE_ALIGNED16( class) b3ContactCache
|
||||
{
|
||||
|
||||
|
||||
|
||||
|
||||
/// sort cached points so most isolated points come first
|
||||
int sortCachedPoints(const btManifoldPoint& pt);
|
||||
|
||||
int findContactPoint(const btManifoldPoint* unUsed, int numUnused,const btManifoldPoint& pt);
|
||||
|
||||
public:
|
||||
|
||||
BT_DECLARE_ALIGNED_ALLOCATOR();
|
||||
|
||||
|
||||
int m_index1a;
|
||||
|
||||
b3ContactCache();
|
||||
|
||||
b3ContactCache(const btCollisionObject* body0,const btCollisionObject* body1,int , b3Scalar contactBreakingThreshold,b3Scalar contactProcessingThreshold)
|
||||
: btTypedObject(BT_PERSISTENT_MANIFOLD_TYPE),
|
||||
m_body0(body0),m_body1(body1),m_cachedPoints(0),
|
||||
m_contactBreakingThreshold(contactBreakingThreshold),
|
||||
m_contactProcessingThreshold(contactProcessingThreshold)
|
||||
{
|
||||
}
|
||||
|
||||
B3_FORCE_INLINE const btCollisionObject* getBody0() const { return m_body0;}
|
||||
B3_FORCE_INLINE const btCollisionObject* getBody1() const { return m_body1;}
|
||||
|
||||
void setBodies(const btCollisionObject* body0,const btCollisionObject* body1)
|
||||
{
|
||||
m_body0 = body0;
|
||||
m_body1 = body1;
|
||||
}
|
||||
|
||||
void clearUserCache(btManifoldPoint& pt);
|
||||
|
||||
#ifdef DEBUG_PERSISTENCY
|
||||
void DebugPersistency();
|
||||
#endif //
|
||||
|
||||
B3_FORCE_INLINE int getNumContacts() const { return m_cachedPoints;}
|
||||
/// the setNumContacts API is usually not used, except when you gather/fill all contacts manually
|
||||
void setNumContacts(int cachedPoints)
|
||||
{
|
||||
m_cachedPoints = cachedPoints;
|
||||
}
|
||||
|
||||
|
||||
B3_FORCE_INLINE const btManifoldPoint& getContactPoint(int index) const
|
||||
{
|
||||
btAssert(index < m_cachedPoints);
|
||||
return m_pointCache[index];
|
||||
}
|
||||
|
||||
B3_FORCE_INLINE btManifoldPoint& getContactPoint(int index)
|
||||
{
|
||||
btAssert(index < m_cachedPoints);
|
||||
return m_pointCache[index];
|
||||
}
|
||||
|
||||
|
||||
void setContactBreakingThreshold(b3Scalar contactBreakingThreshold)
|
||||
{
|
||||
m_contactBreakingThreshold = contactBreakingThreshold;
|
||||
}
|
||||
|
||||
void setContactProcessingThreshold(b3Scalar contactProcessingThreshold)
|
||||
{
|
||||
m_contactProcessingThreshold = contactProcessingThreshold;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
int getCacheEntry(const btManifoldPoint& newPoint) const;
|
||||
|
||||
int addManifoldPoint( const btManifoldPoint& newPoint, bool isPredictive=false);
|
||||
|
||||
void removeContactPoint (int index)
|
||||
{
|
||||
clearUserCache(m_pointCache[index]);
|
||||
|
||||
int lastUsedIndex = getNumContacts() - 1;
|
||||
// m_pointCache[index] = m_pointCache[lastUsedIndex];
|
||||
if(index != lastUsedIndex)
|
||||
{
|
||||
m_pointCache[index] = m_pointCache[lastUsedIndex];
|
||||
//get rid of duplicated userPersistentData pointer
|
||||
m_pointCache[lastUsedIndex].m_userPersistentData = 0;
|
||||
m_pointCache[lastUsedIndex].m_appliedImpulse = 0.f;
|
||||
m_pointCache[lastUsedIndex].m_lateralFrictionInitialized = false;
|
||||
m_pointCache[lastUsedIndex].m_appliedImpulseLateral1 = 0.f;
|
||||
m_pointCache[lastUsedIndex].m_appliedImpulseLateral2 = 0.f;
|
||||
m_pointCache[lastUsedIndex].m_lifeTime = 0;
|
||||
}
|
||||
|
||||
btAssert(m_pointCache[lastUsedIndex].m_userPersistentData==0);
|
||||
m_cachedPoints--;
|
||||
}
|
||||
void replaceContactPoint(const btManifoldPoint& newPoint,int insertIndex)
|
||||
{
|
||||
btAssert(validContactDistance(newPoint));
|
||||
|
||||
#define MAINTAIN_PERSISTENCY 1
|
||||
#ifdef MAINTAIN_PERSISTENCY
|
||||
int lifeTime = m_pointCache[insertIndex].getLifeTime();
|
||||
b3Scalar appliedImpulse = m_pointCache[insertIndex].m_appliedImpulse;
|
||||
b3Scalar appliedLateralImpulse1 = m_pointCache[insertIndex].m_appliedImpulseLateral1;
|
||||
b3Scalar appliedLateralImpulse2 = m_pointCache[insertIndex].m_appliedImpulseLateral2;
|
||||
// bool isLateralFrictionInitialized = m_pointCache[insertIndex].m_lateralFrictionInitialized;
|
||||
|
||||
|
||||
|
||||
btAssert(lifeTime>=0);
|
||||
void* cache = m_pointCache[insertIndex].m_userPersistentData;
|
||||
|
||||
m_pointCache[insertIndex] = newPoint;
|
||||
|
||||
m_pointCache[insertIndex].m_userPersistentData = cache;
|
||||
m_pointCache[insertIndex].m_appliedImpulse = appliedImpulse;
|
||||
m_pointCache[insertIndex].m_appliedImpulseLateral1 = appliedLateralImpulse1;
|
||||
m_pointCache[insertIndex].m_appliedImpulseLateral2 = appliedLateralImpulse2;
|
||||
|
||||
m_pointCache[insertIndex].m_appliedImpulse = appliedImpulse;
|
||||
m_pointCache[insertIndex].m_appliedImpulseLateral1 = appliedLateralImpulse1;
|
||||
m_pointCache[insertIndex].m_appliedImpulseLateral2 = appliedLateralImpulse2;
|
||||
|
||||
|
||||
m_pointCache[insertIndex].m_lifeTime = lifeTime;
|
||||
#else
|
||||
clearUserCache(m_pointCache[insertIndex]);
|
||||
m_pointCache[insertIndex] = newPoint;
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
bool validContactDistance(const btManifoldPoint& pt) const
|
||||
{
|
||||
return pt.m_distance1 <= getContactBreakingThreshold();
|
||||
}
|
||||
/// calculated new worldspace coordinates and depth, and reject points that exceed the collision margin
|
||||
void refreshContactPoints( const btTransform& trA,const btTransform& trB);
|
||||
|
||||
|
||||
B3_FORCE_INLINE void clearManifold()
|
||||
{
|
||||
int i;
|
||||
for (i=0;i<m_cachedPoints;i++)
|
||||
{
|
||||
clearUserCache(m_pointCache[i]);
|
||||
}
|
||||
m_cachedPoints = 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#endif //B3_CONTACT_CACHE_H
|
||||
@@ -1571,7 +1571,7 @@ int clipHullHullSingle(
|
||||
#include "b3GjkEpa.h"
|
||||
#include "b3VoronoiSimplexSolver.h"
|
||||
|
||||
int computeContactConvexConvex(
|
||||
int computeContactConvexConvex( b3AlignedObjectArray<b3Int4>& pairs,
|
||||
int pairIndex,
|
||||
int bodyIndexA, int bodyIndexB,
|
||||
int collidableIndexA, int collidableIndexB,
|
||||
@@ -1610,6 +1610,8 @@ int computeContactConvexConvex(
|
||||
int shapeIndexA = collidables[collidableIndexA].m_shapeIndex;
|
||||
int shapeIndexB = collidables[collidableIndexB].m_shapeIndex;
|
||||
|
||||
int sz = sizeof(b3Contact4);
|
||||
|
||||
bool result2 = getClosestPoints(&gjkDetector, transA, transB,
|
||||
convexShapes[shapeIndexA], convexShapes[shapeIndexB],
|
||||
convexVertices,convexVertices,
|
||||
@@ -1617,7 +1619,8 @@ int computeContactConvexConvex(
|
||||
sepAxis2,
|
||||
distance2,
|
||||
resultPointOnB);
|
||||
|
||||
|
||||
|
||||
if (result2)
|
||||
{
|
||||
if (nGlobalContactsOut<maxContactCapacity)
|
||||
@@ -1634,9 +1637,15 @@ int computeContactConvexConvex(
|
||||
|
||||
|
||||
int numPoints = 1;
|
||||
if (pairs[pairIndex].z>=0)
|
||||
{
|
||||
printf("add existing points?\n");
|
||||
|
||||
}
|
||||
for (int p=0;p<numPoints;p++)
|
||||
{
|
||||
resultPointOnB.w = distance2;
|
||||
|
||||
contact.m_worldPos[p] = resultPointOnB;
|
||||
|
||||
contact.m_worldNormal = -sepAxis2;
|
||||
@@ -1900,7 +1909,7 @@ void GpuSatCollision::computeConvexConvexContactsGPUSAT( b3OpenCLArray<b3Int4>*
|
||||
hostCollidables[collidableIndexB].m_shapeType == SHAPE_CONVEX_HULL)
|
||||
{
|
||||
//printf("hostPairs[i].z=%d\n",hostPairs[i].z);
|
||||
int contactIndex = computeContactConvexConvex(i,bodyIndexA,bodyIndexB,collidableIndexA,collidableIndexB,hostBodyBuf,
|
||||
int contactIndex = computeContactConvexConvex(hostPairs,i,bodyIndexA,bodyIndexB,collidableIndexA,collidableIndexB,hostBodyBuf,
|
||||
hostCollidables,hostConvexData,hostVertices,hostUniqueEdges,hostIndices,hostFaces,hostContacts,nContacts,maxContactCapacity,
|
||||
oldHostContacts);
|
||||
|
||||
|
||||
@@ -229,7 +229,7 @@ bool getClosestPoints(b3GjkPairDetector* gjkDetector, const b3Transform& transA,
|
||||
if (l2>B3_EPSILON*B3_EPSILON)
|
||||
{
|
||||
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1./b3Sqrt(l2));
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1.f/b3Sqrt(l2));
|
||||
float computedDepth=1e30f;
|
||||
if (!TestSepAxis(hullA,hullB,transA.getOrigin(),transA.getRotation(),
|
||||
transB.getOrigin(),transB.getRotation(),testAxis,verticesA,verticesB,computedDepth))
|
||||
@@ -362,7 +362,7 @@ bool getClosestPoints(b3GjkPairDetector* gjkDetector, const b3Transform& transA,
|
||||
if (l2>B3_EPSILON*B3_EPSILON)
|
||||
{
|
||||
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1./b3Sqrt(l2));
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1.f/b3Sqrt(l2));
|
||||
float computedDepth=1e30f;
|
||||
if (!TestSepAxis(hullA,hullB,transA.getOrigin(),transA.getRotation(),
|
||||
transB.getOrigin(),transB.getRotation(),testAxis,verticesA,verticesB,computedDepth))
|
||||
@@ -576,7 +576,7 @@ bool getClosestPoints(b3GjkPairDetector* gjkDetector, const b3Transform& transA,
|
||||
if (l2>B3_EPSILON*B3_EPSILON)
|
||||
{
|
||||
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1./b3Sqrt(l2));
|
||||
b3Vector3 testAxis = gjkDetector->m_cachedSeparatingAxis*(1.f/b3Sqrt(l2));
|
||||
float computedDepth=1e30f;
|
||||
if (!TestSepAxis(hullA,hullB,transA.getOrigin(),transA.getRotation(),
|
||||
transB.getOrigin(),transB.getRotation(),testAxis,verticesA,verticesB,computedDepth))
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
#include "Bullet3Common/b3Vector3.h"
|
||||
#include "Bullet3Common/b3AlignedObjectArray.h"
|
||||
|
||||
struct b3Transform;
|
||||
class b3Transform;
|
||||
struct b3GjkEpaSolver2;
|
||||
class b3VoronoiSimplexSolver;
|
||||
struct b3ConvexPolyhedronCL;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//this file is autogenerated using stringify.bat (premake --stringify) in the build folder of this project
|
||||
static const char* satClipKernelsCL= \
|
||||
"\n"
|
||||
"#define TRIANGLE_NUM_CONVEX_FACES 5\n"
|
||||
"\n"
|
||||
"#define SHAPE_CONVEX_HULL 3\n"
|
||||
@@ -47,15 +46,15 @@ static const char* satClipKernelsCL= \
|
||||
"{\n"
|
||||
" float4 m_worldPos[4];\n"
|
||||
" float4 m_worldNormal; // w: m_nPoints\n"
|
||||
"\n"
|
||||
" u32 m_coeffs;\n"
|
||||
" u32 m_batchIdx;\n"
|
||||
"\n"
|
||||
" int m_bodyAPtrAndSignBit;//x:m_bodyAPtr, y:m_bodyBPtr\n"
|
||||
" int m_bodyBPtrAndSignBit;\n"
|
||||
"\n"
|
||||
" int m_childIndexA;\n"
|
||||
" int m_childIndexB;\n"
|
||||
" int m_unused1;\n"
|
||||
" float m_unused1;\n"
|
||||
" int m_unused2;\n"
|
||||
"\n"
|
||||
"} Contact4;\n"
|
||||
@@ -678,10 +677,8 @@ static const char* satClipKernelsCL= \
|
||||
"\n"
|
||||
"#define PARALLEL_SUM(v, n) for(int j=1; j<n; j++) v[0] += v[j];\n"
|
||||
"#define PARALLEL_DO(execution, n) for(int ie=0; ie<n; ie++){execution;}\n"
|
||||
"#define REDUCE_MAX(v, n) {int i=0;"
|
||||
"for(int offset=0; offset<n; offset++) v[i] = (v[i].y > v[i+offset].y)? v[i]: v[i+offset]; }\n"
|
||||
"#define REDUCE_MIN(v, n) {int i=0;"
|
||||
"for(int offset=0; offset<n; offset++) v[i] = (v[i].y < v[i+offset].y)? v[i]: v[i+offset]; }\n"
|
||||
"#define REDUCE_MAX(v, n) {int i=0; for(int offset=0; offset<n; offset++) v[i] = (v[i].y > v[i+offset].y)? v[i]: v[i+offset]; }\n"
|
||||
"#define REDUCE_MIN(v, n) {int i=0; for(int offset=0; offset<n; offset++) v[i] = (v[i].y < v[i+offset].y)? v[i]: v[i+offset]; }\n"
|
||||
"\n"
|
||||
"int extractManifoldSequentialGlobal(__global const float4* p, int nPoints, float4 nearNormal, int4* contactIdx)\n"
|
||||
"{\n"
|
||||
@@ -1947,4 +1944,5 @@ static const char* satClipKernelsCL= \
|
||||
" \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -1316,5 +1316,4 @@ static const char* satKernelsCL= \
|
||||
" concavePairs[pairIdx].w = -1;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -106,5 +106,4 @@ static const char* boundSearchKernelsCL= \
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -107,5 +107,4 @@ static const char* fillKernelsCL= \
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -154,5 +154,4 @@ static const char* prefixScanKernelsCL= \
|
||||
" dst[cb.m_numBlocks] = sum;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -154,5 +154,4 @@ static const char* prefixScanKernelsFloat4CL= \
|
||||
" dst[cb.m_numBlocks] = sum;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//this file is autogenerated using stringify.bat (premake --stringify) in the build folder of this project
|
||||
static const char* rayCastKernelCL= \
|
||||
"\n"
|
||||
"#define SHAPE_CONVEX_HULL 3\n"
|
||||
"#define SHAPE_PLANE 4\n"
|
||||
"#define SHAPE_CONCAVE_TRIMESH 5\n"
|
||||
@@ -339,5 +338,4 @@ static const char* rayCastKernelCL= \
|
||||
" }\n"
|
||||
"\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -570,4 +570,3 @@ void b3GpuRigidBodyPipeline::castRays(const b3AlignedObjectArray<b3RayInfo>& ray
|
||||
m_data->m_narrowphase->getNumCollidablesGpu(), m_data->m_narrowphase->getCollidablesCpu(), m_data->m_narrowphase->getInternalData()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -352,5 +352,4 @@ static const char* batchingKernelsCL= \
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -242,5 +242,4 @@ static const char* batchingKernelsNewCL= \
|
||||
" \n"
|
||||
" //return batchIdx;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -106,5 +106,4 @@ static const char* integrateKernelCL= \
|
||||
" \n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -877,5 +877,4 @@ static const char* solveConstraintRowsCL= \
|
||||
" }\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -493,5 +493,4 @@ static const char* solveContactCL= \
|
||||
" \n"
|
||||
" \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -515,5 +515,4 @@ static const char* solveFrictionCL= \
|
||||
" \n"
|
||||
" \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//this file is autogenerated using stringify.bat (premake --stringify) in the build folder of this project
|
||||
static const char* solverSetupCL= \
|
||||
"\n"
|
||||
"/*\n"
|
||||
"Copyright (c) 2012 Advanced Micro Devices, Inc. \n"
|
||||
"\n"
|
||||
@@ -666,5 +665,4 @@ static const char* solverSetupCL= \
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -626,5 +626,4 @@ static const char* solverSetup2CL= \
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//this file is autogenerated using stringify.bat (premake --stringify) in the build folder of this project
|
||||
static const char* updateAabbsKernelCL= \
|
||||
"\n"
|
||||
"#define SHAPE_CONVEX_HULL 3\n"
|
||||
"\n"
|
||||
"typedef float4 Quaternion;\n"
|
||||
@@ -195,5 +194,4 @@ static const char* updateAabbsKernelCL= \
|
||||
" }\n"
|
||||
" } \n"
|
||||
"}\n"
|
||||
"\n"
|
||||
;
|
||||
|
||||
Reference in New Issue
Block a user