Yes, you are absolutely correct. Thanks for pointing it out.
It is not possible to use tolerance the way I did, some points will be “rounded” to different values even if they are very close to each other (=> distance less than tolerance).
So I updated code to do the job but WITHOUT TOLERANCE. I tested it on few millions different curves and it seems to give the same result as SelDupAll command in Rhino. And it is very fast at the same time.
Here is the code:
//
public delegate uint CreateCurveHashCodeDelegate(NurbsCurve crv, int salt);
public struct HashIndexPair
{
public uint Hash { get; set; }
public int Id { get; set; }
public HashIndexPair(uint hashPrm, int idPrm)
{
Hash = hashPrm;
Id = idPrm;
}
}
public static List<bool> FindDuplicteCurvesA(List<Curve> curves)
{
//Reverse curve direction where necesary
var curvesToCheck = curves.Where(x => x != null)
.Select(x => {
if (x.PointAtStart > x.PointAtEnd) x.Reverse();
return x;
}).ToList();
var nurbsCurves = curvesToCheck.Select(x => x.ToNurbsCurve()).Where(x => x != null).ToList();
if (curves.Count != nurbsCurves.Count)
{
RhinoApp.WriteLine("*** FAILED TO CONVERT TO NURBS CURVES. CurvesCount={0}, NurbesCurveCount={1}", curves.Count, nurbsCurves);
return new List<bool>();
}
if (nurbsCurves.Count < 2)
{
return new List<bool>();
}
//Now we group curves in groups based on the same hash-code retrun by our hasing method; for grouping we use ILookup
CreateCurveHashCodeDelegate createMyCurveHashCode1 = CreateCurveHashCodeX3;
var curvesLookupStart = nurbsCurves.Select((x, i) => new HashIndexPair(createMyCurveHashCode1(x, 101), i)).ToLookup(y => y.Hash, y => y.Id);
var curvesLookUpShrinked = ShrinkLookup(curvesLookupStart, nurbsCurves, 313, createMyCurveHashCode1);
//
bool[] DuplicateStatus = new bool[curves.Count];
foreach (var group in curvesLookUpShrinked)
{
var ids = group.Select(x => x).ToList();
for (int j = 0; j < ids.Count; j++)
{
DuplicateStatus[ids[j]] = true;
}
}
return DuplicateStatus.ToList();
}
public static ILookup<uint, int> ShrinkLookup(ILookup<uint, int> lookUpToShrink, List<NurbsCurve> curves, int salt,
CreateCurveHashCodeDelegate createMyCurveHashCodeDelegate)
{
List<IGrouping<uint, int>> result = new List<IGrouping<uint, int>>();
foreach (var group in lookUpToShrink)
{
int grpCount = group.Count();
if (grpCount == 1)
{
// skip single-item group => group has one item that has no duplicates
continue;
}
var groupLookUp = group.Select(x => new HashIndexPair(createMyCurveHashCodeDelegate(curves[x], salt), x))
.ToLookup(y => y.Hash, y => y.Id);
if (groupLookUp.Count > 1)
{
//group has been divided into more subgroups by using actual hashing-method => try to shrink subgroups more
var shrinkMore = ShrinkLookup(groupLookUp, curves, salt * 2 + 1, createMyCurveHashCodeDelegate);
result.AddRange(shrinkMore);
}
else
{
result.AddRange(groupLookUp);
}
}
var resultLookup = result.SelectMany(x => x.Select(y => new HashIndexPair(x.Key, y))).ToLookup(x => x.Hash, x => x.Id);
return resultLookup;
}
public static uint CreateCurveHashCodeX3(NurbsCurve crv, int salt)
{
int myHash = 0;
unchecked
{
for (int i = 0; i < crv.Knots.Count; i++)
{
myHash ^= crv.Knots[i].GetHashCode() * (2 * i + salt);
}
for (int i = 0; i < crv.Points.Count; i++)
{
myHash ^= saltHash(crv.Points[i].Location.GetHashCode(), salt) * (31 * i + 1);
myHash ^= saltHash(crv.Points[i].Weight.GetHashCode(), salt) * (97 * i + 1);
}
myHash ^= crv.Degree * 1559;
myHash ^= crv.Dimension * 2029;
myHash ^= saltHash(crv.Domain.GetHashCode(), salt) * 1699;
myHash ^= saltHash(crv.IsClosed.GetHashCode(), salt) * 4099;
myHash ^= saltHash(crv.IsPeriodic.GetHashCode(), salt) * 4447;
myHash ^= saltHash(crv.IsValid.GetHashCode(), salt) * 5557;
myHash ^= crv.SpanCount * 9199;
}
return (uint)myHash;
}
public static int saltHash(int hash, int divisor)
{
int remainder;
Math.DivRem(hash, divisor, out remainder);
return hash + remainder;
}