At the end comparison can be done by EpsilonEquals, but all curves has to be converted to nurbscurves
Remark: hash-code algorithm was implemented in two steps, but can be merged into one hash-code mathod.
Usage: call FindDuplicteCurves( curves, tolerance) method
//
public delegate uint CreateCurveHashCodeDelegateX(NurbsCurve crv, Double tolerance, int salt);
public struct HashIndexPair
{
public uint Hash { get; set; }
public int Id { get; set; }
public HashIndexPair(uint hashPrm, int idPrm)
{
Hash = hashPrm;
Id = idPrm;
}
}
public static List<bool> FindDuplicteCurves(List<Curve> curves, double tolerance)
{
//Reverse curve direction where necesary
var curvesToCheck = curves.Where(x => x != null)
.Select(x => {
if (x.PointAtStart > x.PointAtEnd) x.Reverse();
return x;
}).ToList();
var nurbsCurves = curvesToCheck.Select(x => x.ToNurbsCurve()).Where(x => x != null).ToList();
if (curves.Count != nurbsCurves.Count)
{
RhinoApp.WriteLine("FAILED TO CONVERT TO NURBS CURVES. CurvesCount={0}, NurbesCurveCount={1}", curves.Count, nurbsCurves);
return new List<bool>();
}
if (nurbsCurves.Count < 2)
{
return new List<bool>();
}
//Now we group curves in groups based on the same hash-code retrun by our hasing method; for grouping we use ILookup
//First we use CreateCurveHashCodeX1 method for calculating our hash-value...
CreateCurveHashCodeDelegateX createMyCurveHashCode1 = CreateCurveHashCodeX1;
var curvesLookupStart = nurbsCurves.Select((x, i) => new HashIndexPair(createMyCurveHashCode1(x, tolerance, 101), i)).ToLookup(y => y.Hash, y => y.Id);
var curvesLookUpShrinked = ShrinkLookup(curvesLookupStart, nurbsCurves, tolerance, 313, createMyCurveHashCode1);
//... after that we use CreateCurveHashCodeX2 method for calculating our hash-value.
CreateCurveHashCodeDelegateX createMyCurveHashCode2 = CreateCurveHashCodeX2;
var curvesLookUpShrinked2 = ShrinkLookup(curvesLookUpShrinked, nurbsCurves, tolerance, 313, createMyCurveHashCode2);
//Now we have to do final check on remaining items in each group if they are EpsilonEquals()
//If curve1 is EpsilonEquals to curve2 we mark both curves as duplicates by seting DuplicateStatus[] to true for each curve-index
bool[] DuplicateStatus = new bool[curves.Count];
foreach (var group in curvesLookUpShrinked2)
{
var ids = group.Select(x => x).ToList();
for (int i = 0; i < ids.Count - 1; i++)
{
for (int j = i+1; j < ids.Count ; j++)
{
if (!DuplicateStatus[ids[i]] || !DuplicateStatus[ids[j]])
{
if (nurbsCurves[ids[i]].EpsilonEquals(nurbsCurves[ids[j]], tolerance))
{
DuplicateStatus[ids[i]] = true;
DuplicateStatus[ids[j]] = true;
}
}
}
}
}
return DuplicateStatus.ToList();
}
public static ILookup<uint, int> ShrinkLookup(ILookup<uint, int> lookUpToShrink, List<NurbsCurve> curves, double tolerance, int salt,
CreateCurveHashCodeDelegateX createMyCurveHashCodeDelegate)
{
List<IGrouping<uint, int>> result = new List<IGrouping<uint, int>>();
foreach (var group in lookUpToShrink)
{
int grpCount = group.Count();
if (grpCount == 1)
{
// skip single-item group => group has one item that has no duplicates
continue;
}
var groupLookUp = group.Select(x => new HashIndexPair(createMyCurveHashCodeDelegate(curves[x], tolerance, salt), x))
.ToLookup(y => y.Hash, y => y.Id);
if (groupLookUp.Count > 1)
{
//group has been divided into more subgroups by using actual hashing-method => try to shrink subgroups more
var shrinkMore = ShrinkLookup(groupLookUp, curves, tolerance, salt * 2 + 1, createMyCurveHashCodeDelegate);
result.AddRange(shrinkMore);
}
else
{
result.AddRange(groupLookUp);
}
}
var resultLookup = result.SelectMany(x => x.Select(y => new HashIndexPair(x.Key, y))).ToLookup(x => x.Hash, x => x.Id);
return resultLookup;
}
public static uint CreateCurveHashCodeX1(NurbsCurve crv, Double tolerance, int salt)
{
int myHash = 0;
unchecked
{
for (int i = 0; i < crv.Knots.Count; i++)
{
myHash ^= crv.Knots[i].GetHashCode() * (2 * i + salt);
}
myHash ^= crv.Degree * 1559;
myHash ^= crv.Dimension * 2029;
myHash ^= saltHash(crv.Domain.GetHashCode(), salt) * 1699;
myHash ^= saltHash(crv.IsClosed.GetHashCode(), salt) * 4099;
myHash ^= saltHash(crv.IsPeriodic.GetHashCode(), salt) * 4447;
myHash ^= saltHash(crv.IsValid.GetHashCode(), salt) * 5557;
// point at start/end w. tolerance
int roundPrecision = (int)Math.Log10(1d / tolerance);
Point3d pAtEnd = PointRounded(crv.PointAtEnd, roundPrecision);
myHash ^= saltHash(pAtEnd.GetHashCode(), salt) * 6007;
Point3d pAtStart = PointRounded(crv.PointAtStart, roundPrecision);
myHash ^= saltHash(pAtStart.GetHashCode(), salt) * 6977;
myHash ^= crv.SpanCount * 9199;
}
return (uint)myHash;
}
public static uint CreateCurveHashCodeX2(NurbsCurve crv, Double tolerance, int salt)
{
int myHash = 0;
int roundPrecision = (int)Math.Log10(1d / tolerance);
unchecked
{
for (int i = 0; i < crv.Points.Count; i++)
{
var controlPointLocation = PointRounded(crv.Points[i].Location, roundPrecision);
myHash ^= saltHash(controlPointLocation.GetHashCode(), salt) * (31 * i + 1);
myHash ^= saltHash(crv.Points[i].Weight.GetHashCode(), salt) * (97 * i + 1);
}
}
return (uint)myHash;
}
public static Point3d PointRounded(Point3d point, int precision)
{
double p_X = Math.Round(point.X, precision);
double p_Y = Math.Round(point.Y, precision);
double p_Z = Math.Round(point.Z, precision);
return new Point3d(p_X, p_Y, p_Z);
}
public static int saltHash(int hash, int divisor)
{
int remainder;
Math.DivRem(hash, divisor, out remainder);
return hash + remainder;
}
//