gl renderer: Declare color matrix child separately

Otherwise I'm doing this all the time when debugging.
This commit is contained in:
Timm Bäder 2019-03-31 07:50:33 +02:00
parent 9e8e3eb0ca
commit 3569348f9c

View File

@ -1215,13 +1215,14 @@ render_color_matrix_node (GskGLRenderer *self,
const float min_y = builder->dy + node->bounds.origin.y; const float min_y = builder->dy + node->bounds.origin.y;
const float max_x = min_x + node->bounds.size.width; const float max_x = min_x + node->bounds.size.width;
const float max_y = min_y + node->bounds.size.height; const float max_y = min_y + node->bounds.size.height;
GskRenderNode *child = gsk_color_matrix_node_get_child (node);
int texture_id; int texture_id;
gboolean is_offscreen; gboolean is_offscreen;
/* Pass min_x/max_x/min_y/max_y without builder->dx/dy! */ /* Pass min_x/max_x/min_y/max_y without builder->dx/dy! */
add_offscreen_ops (self, builder, add_offscreen_ops (self, builder,
&node->bounds, &node->bounds,
gsk_color_matrix_node_get_child (node), child,
&texture_id, &is_offscreen, &texture_id, &is_offscreen,
RESET_CLIP | RESET_OPACITY); RESET_CLIP | RESET_OPACITY);