mirror of
https://github.com/bulletphysics/bullet3
synced 2024-12-13 13:20:07 +00:00
fix issues related to camera width/height
add width,height as arguments to pybullet.renderImage(x,y,[viewMat4x4],[projMat4x4])
This commit is contained in:
parent
bd668d11b0
commit
d2e50d045b
@ -183,7 +183,7 @@ TinyRendererSetup::TinyRendererSetup(struct GUIHelperInterface* gui)
|
||||
m_guiHelper->getRenderInterface()->writeTransforms();
|
||||
|
||||
m_internalData->m_shapePtr.push_back(0);
|
||||
TinyRenderObjectData* ob = new TinyRenderObjectData(m_internalData->m_width,m_internalData->m_height,
|
||||
TinyRenderObjectData* ob = new TinyRenderObjectData(
|
||||
m_internalData->m_rgbColorBuffer,
|
||||
m_internalData->m_depthBuffer);
|
||||
//ob->loadModel("cube.obj");
|
||||
@ -363,7 +363,7 @@ void TinyRendererSetup::stepSimulation(float deltaTime)
|
||||
|
||||
}
|
||||
}
|
||||
TinyRenderer::renderObject(*m_internalData->m_renderObjects[o]);
|
||||
TinyRenderer::renderObject(*m_internalData->m_renderObjects[o], m_internalData->m_width,m_internalData->m_height);
|
||||
}
|
||||
//m_app->drawText("hello",500,500);
|
||||
render->activateTexture(m_internalData->m_textureHandle);
|
||||
|
@ -735,6 +735,16 @@ void b3RequestCameraImageSetCameraMatrices(b3SharedMemoryCommandHandle commandHa
|
||||
command->m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES;
|
||||
}
|
||||
|
||||
void b3RequestCameraImageSetPixelResolution(b3SharedMemoryCommandHandle commandHandle, int width, int height )
|
||||
{
|
||||
struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle;
|
||||
b3Assert(command);
|
||||
b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA);
|
||||
command->m_requestPixelDataArguments.m_pixelWidth = width;
|
||||
command->m_requestPixelDataArguments.m_pixelHeight = height;
|
||||
command->m_updateFlags |= REQUEST_PIXEL_ARGS_SET_PIXEL_WIDTH_HEIGHT;
|
||||
}
|
||||
|
||||
void b3GetCameraImageData(b3PhysicsClientHandle physClient, struct b3CameraImageData* imageData)
|
||||
{
|
||||
PhysicsClient* cl = (PhysicsClient* ) physClient;
|
||||
|
@ -68,6 +68,7 @@ void b3GetDebugLines(b3PhysicsClientHandle physClient, struct b3DebugLines* l
|
||||
///request an image from a simulated camera, using a software renderer.
|
||||
b3SharedMemoryCommandHandle b3InitRequestCameraImage(b3PhysicsClientHandle physClient);
|
||||
void b3RequestCameraImageSetCameraMatrices(b3SharedMemoryCommandHandle command, float viewMatrix[16], float projectionMatrix[16]);
|
||||
void b3RequestCameraImageSetPixelResolution(b3SharedMemoryCommandHandle command, int width, int height );
|
||||
void b3GetCameraImageData(b3PhysicsClientHandle physClient, struct b3CameraImageData* imageData);
|
||||
|
||||
|
||||
|
@ -21,8 +21,8 @@ struct MyMotorInfo2
|
||||
int m_qIndex;
|
||||
};
|
||||
|
||||
int camVisualizerWidth = 640;//1024/3;
|
||||
int camVisualizerHeight = 480;//768/3;
|
||||
int camVisualizerWidth = 320;//1024/3;
|
||||
int camVisualizerHeight = 240;//768/3;
|
||||
|
||||
|
||||
#define MAX_NUM_MOTORS 128
|
||||
@ -77,10 +77,10 @@ protected:
|
||||
|
||||
virtual void resetCamera()
|
||||
{
|
||||
float dist = 5;
|
||||
float dist = 1.1;
|
||||
float pitch = 50;
|
||||
float yaw = 35;
|
||||
float targetPos[3]={0,0,0};//-3,2.8,-2.5};
|
||||
float targetPos[3]={0,0,0.5};//-3,2.8,-2.5};
|
||||
m_guiHelper->resetCamera(dist,pitch,yaw,targetPos[0],targetPos[1],targetPos[2]);
|
||||
|
||||
}
|
||||
@ -248,7 +248,8 @@ void PhysicsClientExample::prepareAndSubmitCommand(int commandId)
|
||||
this->m_guiHelper->getRenderInterface()->getActiveCamera()->getCameraProjectionMatrix(projectionMatrix);
|
||||
this->m_guiHelper->getRenderInterface()->getActiveCamera()->getCameraViewMatrix(viewMatrix);
|
||||
b3RequestCameraImageSetCameraMatrices(commandHandle, viewMatrix,projectionMatrix);
|
||||
b3SubmitClientCommand(m_physicsClientHandle, commandHandle);
|
||||
b3RequestCameraImageSetPixelResolution(commandHandle, camVisualizerWidth,camVisualizerHeight);
|
||||
b3SubmitClientCommand(m_physicsClientHandle, commandHandle);
|
||||
break;
|
||||
}
|
||||
case CMD_CREATE_BOX_COLLISION_SHAPE:
|
||||
@ -619,18 +620,18 @@ void PhysicsClientExample::stepSimulation(float deltaTime)
|
||||
b3GetCameraImageData(m_physicsClientHandle,&imageData);
|
||||
if (m_canvas && m_canvasIndex >=0)
|
||||
{
|
||||
for (int i=0;i<imageData.m_pixelWidth;i++)
|
||||
for (int i=0;i<camVisualizerWidth;i++)
|
||||
{
|
||||
for (int j=0;j<imageData.m_pixelHeight;j++)
|
||||
for (int j=0;j<camVisualizerHeight;j++)
|
||||
{
|
||||
int xIndex = int(float(i)*(float(camVisualizerWidth)/float(imageData.m_pixelWidth)));
|
||||
int yIndex = int(float(j)*(float(camVisualizerHeight)/float(imageData.m_pixelHeight)));
|
||||
int xIndex = int(float(i)*(float(imageData.m_pixelWidth)/float(camVisualizerWidth)));
|
||||
int yIndex = int(float(j)*(float(imageData.m_pixelHeight)/float(camVisualizerHeight)));
|
||||
btClamp(yIndex,0,imageData.m_pixelHeight);
|
||||
btClamp(xIndex,0,imageData.m_pixelWidth);
|
||||
int bytesPerPixel = 4;
|
||||
|
||||
int pixelIndex = (i+j*imageData.m_pixelWidth)*bytesPerPixel;
|
||||
m_canvas->setPixel(m_canvasIndex,xIndex,camVisualizerHeight-1-yIndex,
|
||||
int pixelIndex = (xIndex+yIndex*imageData.m_pixelWidth)*bytesPerPixel;
|
||||
m_canvas->setPixel(m_canvasIndex,i,camVisualizerHeight-1-j,
|
||||
|
||||
imageData.m_rgbColorData[pixelIndex],
|
||||
imageData.m_rgbColorData[pixelIndex+1],
|
||||
|
@ -450,6 +450,13 @@ const SharedMemoryStatus* PhysicsClientSharedMemory::processServerStatus() {
|
||||
(unsigned char*)&m_data->m_testBlock1->m_bulletStreamDataServerToClientRefactor[0];
|
||||
// printf("pixel = %d\n", rgbaPixelsReceived[0]);
|
||||
|
||||
float* depthBuffer = (float*)&(m_data->m_testBlock1->m_bulletStreamDataServerToClientRefactor[serverCmd.m_sendPixelDataArguments.m_numPixelsCopied*4]);
|
||||
|
||||
for (int i=0;i<serverCmd.m_sendPixelDataArguments.m_numPixelsCopied;i++)
|
||||
{
|
||||
m_data->m_cachedCameraDepthBuffer[i + serverCmd.m_sendPixelDataArguments.m_startingPixelIndex] = depthBuffer[i];
|
||||
}
|
||||
|
||||
for (int i=0;i<serverCmd.m_sendPixelDataArguments.m_numPixelsCopied*numBytesPerPixel;i++)
|
||||
{
|
||||
m_data->m_cachedCameraPixelsRGBA[i + serverCmd.m_sendPixelDataArguments.m_startingPixelIndex*numBytesPerPixel]
|
||||
|
@ -210,8 +210,15 @@ bool PhysicsDirect::processCamera(const struct SharedMemoryCommand& orgCommand)
|
||||
|
||||
unsigned char* rgbaPixelsReceived =
|
||||
(unsigned char*)&m_data->m_bulletStreamDataServerToClient[0];
|
||||
printf("pixel = %d\n", rgbaPixelsReceived[0]);
|
||||
|
||||
float* depthBuffer = (float*)&(m_data->m_bulletStreamDataServerToClient[serverCmd.m_sendPixelDataArguments.m_numPixelsCopied*4]);
|
||||
|
||||
// printf("pixel = %d\n", rgbaPixelsReceived[0]);
|
||||
|
||||
for (int i=0;i<serverCmd.m_sendPixelDataArguments.m_numPixelsCopied;i++)
|
||||
{
|
||||
m_data->m_cachedCameraDepthBuffer[i + serverCmd.m_sendPixelDataArguments.m_startingPixelIndex] = depthBuffer[i];
|
||||
}
|
||||
for (int i=0;i<serverCmd.m_sendPixelDataArguments.m_numPixelsCopied*numBytesPerPixel;i++)
|
||||
{
|
||||
m_data->m_cachedCameraPixelsRGBA[i + serverCmd.m_sendPixelDataArguments.m_startingPixelIndex*numBytesPerPixel]
|
||||
@ -227,6 +234,7 @@ bool PhysicsDirect::processCamera(const struct SharedMemoryCommand& orgCommand)
|
||||
command.m_requestPixelDataArguments.m_startPixelIndex =
|
||||
serverCmd.m_sendPixelDataArguments.m_startingPixelIndex +
|
||||
serverCmd.m_sendPixelDataArguments.m_numPixelsCopied;
|
||||
|
||||
} else
|
||||
{
|
||||
m_data->m_cachedCameraPixelsWidth = serverCmd.m_sendPixelDataArguments.m_imageWidth;
|
||||
|
@ -972,6 +972,14 @@ bool PhysicsServerCommandProcessor::processCommand(const struct SharedMemoryComm
|
||||
int width, height;
|
||||
int numPixelsCopied = 0;
|
||||
|
||||
if (
|
||||
(clientCmd.m_requestPixelDataArguments.m_startPixelIndex==0) &&
|
||||
(clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_SET_PIXEL_WIDTH_HEIGHT)!=0)
|
||||
{
|
||||
m_data->m_visualConverter.setWidthAndHeight(clientCmd.m_requestPixelDataArguments.m_pixelWidth,
|
||||
clientCmd.m_requestPixelDataArguments.m_pixelHeight);
|
||||
}
|
||||
|
||||
if ((clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_USE_HARDWARE_OPENGL)!=0)
|
||||
{
|
||||
m_data->m_guiHelper->copyCameraImageData(0,0,0,0,0,&width,&height,0);
|
||||
@ -1005,6 +1013,7 @@ bool PhysicsServerCommandProcessor::processCommand(const struct SharedMemoryComm
|
||||
{
|
||||
// printf("-------------------------------\nRendering\n");
|
||||
|
||||
|
||||
if ((clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES)!=0)
|
||||
{
|
||||
m_data->m_visualConverter.render(
|
||||
|
@ -129,12 +129,16 @@ struct RequestPixelDataArgs
|
||||
float m_viewMatrix[16];
|
||||
float m_projectionMatrix[16];
|
||||
int m_startPixelIndex;
|
||||
int m_pixelWidth;
|
||||
int m_pixelHeight;
|
||||
};
|
||||
|
||||
enum EnumRequestPixelDataUpdateFlags
|
||||
{
|
||||
REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES=1,
|
||||
REQUEST_PIXEL_ARGS_USE_HARDWARE_OPENGL=2,
|
||||
REQUEST_PIXEL_ARGS_SET_PIXEL_WIDTH_HEIGHT=4,
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
@ -462,7 +462,7 @@ void TinyRendererVisualShapeConverter::convertVisualShapes(int linkIndex, const
|
||||
|
||||
if (vertices.size() && indices.size())
|
||||
{
|
||||
TinyRenderObjectData* tinyObj = new TinyRenderObjectData(m_data->m_swWidth,m_data->m_swHeight,m_data->m_rgbColorBuffer,m_data->m_depthBuffer);
|
||||
TinyRenderObjectData* tinyObj = new TinyRenderObjectData(m_data->m_rgbColorBuffer,m_data->m_depthBuffer);
|
||||
tinyObj->registerMeshShape(&vertices[0].xyzw[0],vertices.size(),&indices[0],indices.size(),rgbaColor);
|
||||
visuals->m_renderObjects.push_back(tinyObj);
|
||||
}
|
||||
@ -576,7 +576,7 @@ void TinyRendererVisualShapeConverter::render(const float viewMat[16], const flo
|
||||
renderObj->m_lightDirWorld = lightDirWorld;
|
||||
}
|
||||
}
|
||||
TinyRenderer::renderObject(*renderObj);
|
||||
TinyRenderer::renderObject(*renderObj,m_data->m_swWidth,m_data->m_swHeight);
|
||||
}
|
||||
}
|
||||
//printf("write tga \n");
|
||||
@ -590,6 +590,17 @@ void TinyRendererVisualShapeConverter::getWidthAndHeight(int& width, int& height
|
||||
height = m_data->m_swHeight;
|
||||
}
|
||||
|
||||
|
||||
void TinyRendererVisualShapeConverter::setWidthAndHeight(int width, int height)
|
||||
{
|
||||
m_data->m_swWidth = width;
|
||||
m_data->m_swHeight = height;
|
||||
|
||||
m_data->m_depthBuffer.resize(m_data->m_swWidth*m_data->m_swHeight);
|
||||
m_data->m_rgbColorBuffer = TGAImage(width, height, TGAImage::RGB);
|
||||
|
||||
}
|
||||
|
||||
void TinyRendererVisualShapeConverter::copyCameraImageData(unsigned char* pixelsRGBA, int rgbaBufferSizeInPixels, float* depthBuffer, int depthBufferSizeInPixels, int startPixelIndex, int* widthPtr, int* heightPtr, int* numPixelsCopied)
|
||||
{
|
||||
int w = m_data->m_rgbColorBuffer.get_width();
|
||||
@ -612,6 +623,10 @@ void TinyRendererVisualShapeConverter::copyCameraImageData(unsigned char* pixels
|
||||
{
|
||||
for (int i=0;i<numRequestedPixels;i++)
|
||||
{
|
||||
if (depthBuffer)
|
||||
{
|
||||
depthBuffer[i] = m_data->m_depthBuffer[i+startPixelIndex];
|
||||
}
|
||||
if (pixelsRGBA)
|
||||
{
|
||||
pixelsRGBA[i*numBytesPerPixel] = m_data->m_rgbColorBuffer.buffer()[(i+startPixelIndex)*3+0];
|
||||
|
@ -24,6 +24,7 @@ struct TinyRendererVisualShapeConverter : public LinkVisualShapesConverter
|
||||
void resetAll();
|
||||
|
||||
void getWidthAndHeight(int& width, int& height);
|
||||
void setWidthAndHeight(int width, int height);
|
||||
|
||||
void copyCameraImageData(unsigned char* pixelsRGBA, int rgbaBufferSizeInPixels, float* depthBuffer, int depthBufferSizeInPixels, int startPixelIndex, int* widthPtr, int* heightPtr, int* numPixelsCopied);
|
||||
|
||||
|
@ -88,10 +88,8 @@ struct Shader : public IShader {
|
||||
};
|
||||
|
||||
|
||||
TinyRenderObjectData::TinyRenderObjectData(int width, int height,TGAImage& rgbColorBuffer,b3AlignedObjectArray<float>&depthBuffer)
|
||||
:m_width(width),
|
||||
m_height(height),
|
||||
m_rgbColorBuffer(rgbColorBuffer),
|
||||
TinyRenderObjectData::TinyRenderObjectData(TGAImage& rgbColorBuffer,b3AlignedObjectArray<float>&depthBuffer)
|
||||
:m_rgbColorBuffer(rgbColorBuffer),
|
||||
m_depthBuffer(depthBuffer),
|
||||
m_model(0),
|
||||
m_userData(0),
|
||||
@ -103,11 +101,6 @@ m_userIndex(-1)
|
||||
m_lightDirWorld.setValue(0,0,0);
|
||||
m_localScaling.setValue(1,1,1);
|
||||
m_modelMatrix = Matrix::identity();
|
||||
m_viewMatrix = lookat(eye, center, up);
|
||||
//m_viewportMatrix = viewport(width/8, height/8, width*3/4, height*3/4);
|
||||
//m_viewportMatrix = viewport(width/8, height/8, width*3/4, height*3/4);
|
||||
m_viewportMatrix = viewport(0,0,width,height);
|
||||
m_projectionMatrix = projection(-1.f/(eye-center).norm());
|
||||
|
||||
}
|
||||
|
||||
@ -238,7 +231,7 @@ TinyRenderObjectData::~TinyRenderObjectData()
|
||||
delete m_model;
|
||||
}
|
||||
|
||||
void TinyRenderer::renderObject(TinyRenderObjectData& renderData)
|
||||
void TinyRenderer::renderObject(TinyRenderObjectData& renderData, int width, int height)
|
||||
{
|
||||
Vec3f light_dir_local = Vec3f(renderData.m_lightDirWorld[0],renderData.m_lightDirWorld[1],renderData.m_lightDirWorld[2]);
|
||||
Model* model = renderData.m_model;
|
||||
@ -247,13 +240,8 @@ void TinyRenderer::renderObject(TinyRenderObjectData& renderData)
|
||||
|
||||
|
||||
|
||||
//renderData.m_viewMatrix = lookat(eye, center, up);
|
||||
int width = renderData.m_width;
|
||||
int height = renderData.m_height;
|
||||
//renderData.m_viewportMatrix = viewport(width/8, height/8, width*3/4, height*3/4);
|
||||
renderData.m_viewportMatrix = viewport(0,0,renderData.m_width,renderData.m_height);
|
||||
//renderData.m_projectionMatrix = projection(-1.f/(eye-center).norm());
|
||||
|
||||
renderData.m_viewportMatrix = viewport(0,0,width, height);
|
||||
|
||||
b3AlignedObjectArray<float>& zbuffer = renderData.m_depthBuffer;
|
||||
|
||||
TGAImage& frame = renderData.m_rgbColorBuffer;
|
||||
|
@ -25,12 +25,11 @@ struct TinyRenderObjectData
|
||||
//class IShader* m_shader; todo(erwincoumans) expose the shader, for now we use a default shader
|
||||
|
||||
//Output
|
||||
int m_width;
|
||||
int m_height;
|
||||
|
||||
TGAImage& m_rgbColorBuffer;
|
||||
b3AlignedObjectArray<float>& m_depthBuffer;
|
||||
|
||||
TinyRenderObjectData(int width, int height,TGAImage& rgbColorBuffer,b3AlignedObjectArray<float>& depthBuffer);
|
||||
TinyRenderObjectData(TGAImage& rgbColorBuffer,b3AlignedObjectArray<float>& depthBuffer);
|
||||
virtual ~TinyRenderObjectData();
|
||||
|
||||
void loadModel(const char* fileName);
|
||||
@ -48,7 +47,7 @@ struct TinyRenderObjectData
|
||||
class TinyRenderer
|
||||
{
|
||||
public:
|
||||
static void renderObject(TinyRenderObjectData& renderData);
|
||||
static void renderObject(TinyRenderObjectData& renderData, int width, int height);
|
||||
};
|
||||
|
||||
#endif // TINY_RENDERER_Hbla
|
||||
|
@ -362,6 +362,11 @@ pybullet_getNumJoints(PyObject* self, PyObject* args)
|
||||
static PyObject*
|
||||
pybullet_setJointPositions(PyObject* self, PyObject* args)
|
||||
{
|
||||
if (0==sm)
|
||||
{
|
||||
PyErr_SetString(SpamError, "Not connected to physics server.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
@ -372,17 +377,22 @@ pybullet_setJointPositions(PyObject* self, PyObject* args)
|
||||
|
||||
static PyObject* pybullet_renderImage(PyObject* self, PyObject* args)
|
||||
{
|
||||
|
||||
if (0==sm)
|
||||
{
|
||||
PyErr_SetString(SpamError, "Not connected to physics server.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
///request an image from a simulated camera, using a software renderer.
|
||||
struct b3CameraImageData imageData;
|
||||
PyObject* objViewMat,* objProjMat;
|
||||
int width, height;
|
||||
|
||||
b3SharedMemoryCommandHandle command = b3InitRequestCameraImage(sm);
|
||||
|
||||
if (PyArg_ParseTuple(args, "OO", &objViewMat, &objProjMat))
|
||||
if (PyArg_ParseTuple(args, "iiOO", &width, &height, &objViewMat, &objProjMat))
|
||||
{
|
||||
PyObject* seq;
|
||||
int i, len;
|
||||
|
||||
PyObject* seq;
|
||||
int i, len;
|
||||
PyObject* item;
|
||||
float viewMatrix[16];
|
||||
float projectionMatrix[16];
|
||||
@ -391,124 +401,133 @@ static PyObject* pybullet_renderImage(PyObject* self, PyObject* args)
|
||||
seq = PySequence_Fast(objViewMat, "expected a sequence");
|
||||
len = PySequence_Size(objViewMat);
|
||||
//printf("objViewMat size = %d\n", len);
|
||||
if (len==16)
|
||||
{
|
||||
|
||||
if (PyList_Check(seq))
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
if (len==16)
|
||||
{
|
||||
|
||||
if (PyList_Check(seq))
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyList_GET_ITEM(seq, i);
|
||||
viewMatrix[i] = PyFloat_AsDouble(item);
|
||||
float v = viewMatrix[i];
|
||||
viewMatrix[i] = PyFloat_AsDouble(item);
|
||||
float v = viewMatrix[i];
|
||||
//printf("view %d to %f\n", i,v);
|
||||
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyTuple_GET_ITEM(seq,i);
|
||||
viewMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
} else
|
||||
{
|
||||
valid = 0;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyTuple_GET_ITEM(seq,i);
|
||||
viewMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
} else
|
||||
{
|
||||
valid = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
seq = PySequence_Fast(objProjMat, "expected a sequence");
|
||||
len = PySequence_Size(objProjMat);
|
||||
//printf("projMat len = %d\n", len);
|
||||
if (len==16)
|
||||
{
|
||||
if (PyList_Check(seq))
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyList_GET_ITEM(seq, i);
|
||||
projectionMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyTuple_GET_ITEM(seq,i);
|
||||
projectionMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
} else
|
||||
{
|
||||
valid = 0;
|
||||
}
|
||||
}
|
||||
|
||||
seq = PySequence_Fast(objProjMat, "expected a sequence");
|
||||
len = PySequence_Size(objProjMat);
|
||||
//printf("projMat len = %d\n", len);
|
||||
if (len==16)
|
||||
{
|
||||
if (PyList_Check(seq))
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyList_GET_ITEM(seq, i);
|
||||
projectionMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < len; i++)
|
||||
{
|
||||
item = PyTuple_GET_ITEM(seq,i);
|
||||
projectionMatrix[i] = PyFloat_AsDouble(item);
|
||||
}
|
||||
}
|
||||
} else
|
||||
{
|
||||
valid = 0;
|
||||
}
|
||||
}
|
||||
|
||||
Py_DECREF(seq);
|
||||
if (valid)
|
||||
{
|
||||
b3SharedMemoryCommandHandle command;
|
||||
|
||||
command = b3InitRequestCameraImage(sm);
|
||||
|
||||
//printf("set b3RequestCameraImageSetCameraMatrices\n");
|
||||
b3RequestCameraImageSetCameraMatrices(command, viewMatrix, projectionMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
if (b3CanSubmitCommand(sm))
|
||||
{
|
||||
b3SharedMemoryStatusHandle statusHandle;
|
||||
int statusType;
|
||||
statusHandle = b3SubmitClientCommandAndWaitStatus(sm, command);
|
||||
statusType = b3GetStatusType(statusHandle);
|
||||
if (statusType==CMD_CAMERA_IMAGE_COMPLETED)
|
||||
{
|
||||
PyObject *item2;
|
||||
PyObject* pyResultList;//store 4 elements in this result: width, height, rgbData, depth
|
||||
|
||||
b3GetCameraImageData(sm, &imageData);
|
||||
//todo: error handling if image size is 0
|
||||
pyResultList = PyTuple_New(4);
|
||||
PyTuple_SetItem(pyResultList, 0, PyInt_FromLong(imageData.m_pixelWidth));
|
||||
PyTuple_SetItem(pyResultList, 1, PyInt_FromLong(imageData.m_pixelHeight));
|
||||
|
||||
PyObject *pylistPos;
|
||||
PyObject* pylistDep;
|
||||
|
||||
//printf("image width = %d, height = %d\n", imageData.m_pixelWidth, imageData.m_pixelHeight);
|
||||
{
|
||||
|
||||
PyObject *item;
|
||||
int bytesPerPixel = 3;//Red, Green, Blue, each 8 bit values
|
||||
int num=bytesPerPixel*imageData.m_pixelWidth*imageData.m_pixelHeight;
|
||||
pylistPos = PyTuple_New(num);
|
||||
pylistDep = PyTuple_New(imageData.m_pixelWidth*imageData.m_pixelHeight);
|
||||
|
||||
for (int i=0;i<imageData.m_pixelWidth;i++)
|
||||
|
||||
b3RequestCameraImageSetPixelResolution(command,width,height);
|
||||
|
||||
if (b3CanSubmitCommand(sm))
|
||||
{
|
||||
b3SharedMemoryStatusHandle statusHandle;
|
||||
int statusType;
|
||||
statusHandle = b3SubmitClientCommandAndWaitStatus(sm, command);
|
||||
statusType = b3GetStatusType(statusHandle);
|
||||
if (statusType==CMD_CAMERA_IMAGE_COMPLETED)
|
||||
{
|
||||
for (int j=0;j<imageData.m_pixelHeight;j++)
|
||||
{
|
||||
int depIndex = i+j*imageData.m_pixelWidth;
|
||||
item = PyFloat_FromDouble(imageData.m_depthValues[depIndex]);
|
||||
PyTuple_SetItem(pylistDep, depIndex, item);
|
||||
for (int p=0;p<bytesPerPixel;p++)
|
||||
PyObject *item2;
|
||||
PyObject* pyResultList;//store 4 elements in this result: width, height, rgbData, depth
|
||||
|
||||
b3GetCameraImageData(sm, &imageData);
|
||||
//todo: error handling if image size is 0
|
||||
pyResultList = PyTuple_New(4);
|
||||
PyTuple_SetItem(pyResultList, 0, PyInt_FromLong(imageData.m_pixelWidth));
|
||||
PyTuple_SetItem(pyResultList, 1, PyInt_FromLong(imageData.m_pixelHeight));
|
||||
|
||||
PyObject *pylistPos;
|
||||
PyObject* pylistDep;
|
||||
|
||||
//printf("image width = %d, height = %d\n", imageData.m_pixelWidth, imageData.m_pixelHeight);
|
||||
{
|
||||
int pixelIndex = bytesPerPixel*(i+j*imageData.m_pixelWidth)+p;
|
||||
item = PyInt_FromLong(imageData.m_rgbColorData[pixelIndex]);
|
||||
PyTuple_SetItem(pylistPos, pixelIndex, item);
|
||||
}
|
||||
|
||||
PyObject *item;
|
||||
int bytesPerPixel = 3;//Red, Green, Blue, each 8 bit values
|
||||
int num=bytesPerPixel*imageData.m_pixelWidth*imageData.m_pixelHeight;
|
||||
pylistPos = PyTuple_New(num);
|
||||
pylistDep = PyTuple_New(imageData.m_pixelWidth*imageData.m_pixelHeight);
|
||||
|
||||
for (int i=0;i<imageData.m_pixelWidth;i++)
|
||||
{
|
||||
for (int j=0;j<imageData.m_pixelHeight;j++)
|
||||
{
|
||||
int depIndex = i+j*imageData.m_pixelWidth;
|
||||
item = PyFloat_FromDouble(imageData.m_depthValues[depIndex]);
|
||||
PyTuple_SetItem(pylistDep, depIndex, item);
|
||||
for (int p=0;p<bytesPerPixel;p++)
|
||||
{
|
||||
int pixelIndex = bytesPerPixel*(i+j*imageData.m_pixelWidth)+p;
|
||||
item = PyInt_FromLong(imageData.m_rgbColorData[pixelIndex]);
|
||||
PyTuple_SetItem(pylistPos, pixelIndex, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PyTuple_SetItem(pyResultList, 2,pylistPos);
|
||||
PyTuple_SetItem(pyResultList, 3,pylistDep);
|
||||
return pyResultList;
|
||||
}
|
||||
}
|
||||
PyTuple_SetItem(pyResultList, 2,pylistPos);
|
||||
PyTuple_SetItem(pyResultList, 3,pylistDep);
|
||||
return pyResultList;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
static PyMethodDef SpamMethods[] = {
|
||||
@ -534,7 +553,7 @@ static PyMethodDef SpamMethods[] = {
|
||||
"Initialize the joint positions for all joints. This method skips any physics simulation and teleports all joints to the new positions."},
|
||||
|
||||
{"renderImage", pybullet_renderImage, METH_VARARGS,
|
||||
"Render an image (given the camera view & projection matrices and resolution), and return the 8-8-8bit RGB pixel data and floating point depth values"},
|
||||
"Render an image (given the pixel resolution width & height and camera view & projection matrices), and return the 8-8-8bit RGB pixel data and floating point depth values"},
|
||||
|
||||
{"getBasePositionAndOrientation", pybullet_getBasePositionAndOrientation, METH_VARARGS,
|
||||
"Get the world position and orientation of the base of the object. (x,y,z) position vector and (x,y,z,w) quaternion orientation."},
|
||||
|
Loading…
Reference in New Issue
Block a user