First you need to convert 3D beziers to 2D. If I remember correctly, it is enough to project the curves in the same way as you design 3D points for rendering.
Subsequently, you must find the extrema of the curves.
Little HowTo:
Transform a Bezier curve from a Bezier representation to a polynomial in form
x(t) = a*t^3 + b*t^2 + c*t + d
y(t) = e*t^3 + f*t^2 + g*t + g
Here t is your interpolation variable that goes from 0 to 1.
a to d are the coefficients for the curve along the x-axis
e to g are the coefficients for the curve along the y-axis.
(, ).
. , 0..1. , .
. , , . - .
, , , ( ), ) y-.
, . , , , . .
EDIT:
, . , :
x(t) = x0 * (1-t)³ + 3*x1*(1-t)²*t + 3*x2*(1-t)*t² +x3*t³
(x0 - x3 - x- ).
t. , , , , , :-) , - mathlab, ?
, , , . ( x):
A = 3.0f*(x[1] - x[0]);
B = 6.0f*(x[2] - 2.0f*x[1] + x[0]);
C = 3.0f*(x[3] - 3.0f*x[2] + 3.0f *x[1] - x[0]);
(A, B, C), :
x(t) = A*t^2 + B*t + C
A, B C , . C- :
int GetQuadraticRoots (float A, float B, float C, float *roots)
{
if ((C < -FLT_EPSILON) || (C > FLT_EPSILON))
{
float d,p;
p = B*B - 4.0f * C*A;
d = 0.5f / C;
if (p>=0)
{
p = (float) sqrt(p);
if ((p < -FLT_EPSILON) || (p > FLT_EPSILON))
{
roots[0] = (-B + p)*d;
roots[1] = (-B - p)*d;
return 2;
}
roots[0] = -B*d;
return 1;
} else {
return 0;
}
}
if ((B < -FLT_EPSILON) || (B > FLT_EPSILON))
{
roots[0] = -A/B;
return 1;
}
return 0;
}