提高米切尔的最佳人选算法 [英] Improving Mitchell's best candidate algorithm

查看:250
本文介绍了提高米切尔的最佳人选算法的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我已经成功地实施了米切尔的最佳人选算法。 米切尔的最佳人选算法生成创建ķ候选样本和采摘的最佳k的一个新的随机样本。 这里的最佳样本被定义为样本距离最远为previous样品。该算法逼近泊松圆盘取样,产生更自然的外观(更好的蓝噪声频谱特性),比均匀随机抽样。

我试图改善它尤其是在速度方面。 这样来到我的脑海里是候选样本进行比较比较整个previous样品只有到最后添加的元素,而不是第一个想法。这将偏向泊松圆盘取样,但可能会产生一些有趣的结果。

下面是我实现的主要部分。

 公共类MitchellBestCandidateII扩展的JFrame {

    私人列表<点> mitchellPoints =新的ArrayList<点>();
    私人点currentPoint;
    私人诠释currentPointIndex = 0;
    私人布尔isBeginning = TRUE;
    私人点[] candidatesBunch =新点[MAX_CANDIDATES_AT_TIME]

    公共MitchellBestCandidateII(){
        computeBestPoints();
        的initComponents();
    }
 

方法 computeBestPoints 从米切尔的算法在这个意义上,它比较考生只有到最后加点,而不是把它比作整个样品的计算点是不同的。

 私人无效computeBestPoints(){

        做 {
            如果(isBeginning){
                currentPoint = getRandomPoint();
                mitchellPoints.add(currentPoint);
                isBeginning = FALSE;
                currentPointIndex = 0;
            }

            setCandidates();
            点bestCandidate = pickUpCandidateFor(currentPoint);
            mitchellPoints.add(bestCandidate);
            currentPoint = bestCandidate;
            currentPointIndex ++;
        }而(currentPointIndex< MAX_NUMBER_OF_POINTS);

    }

    私人点pickUpCandidateFor(点p){
        双biggestDistance = 0.0D;
        点结果= NULL;
        的for(int i = 0; I< MAX_CANDIDATES_AT_TIME;我++){

            双D = distanceBetween(P,candidatesBunch [I]);
            如果(biggestDistance< D​​){
                biggestDistance = D;
                结果= candidatesBunch [I]

            }
        }

        返回结果;
    }
 

setCandidates 方法生成随机的候选人。其中只有一个最终会被部分样品:其他人将被丢弃

 私人无效setCandidates(){
        的for(int i = 0; I< MAX_CANDIDATES_AT_TIME;我++){
            candidatesBunch [I] = getRandomPoint();
        }
    }


    私人点getRandomPoint(){
        返回新的点(Randomizer.getHelper()nextInt(SCREEN_WIDTH),Randomizer.getHelper()nextInt(SCREEN_HEIGHT)。);

    }
 

的initComponents 设置在JFrame和JPanel并通过点的名单提请的JPanel

 私人无效的initComponents(){
        this.setSize(SCREEN_WIDTH,SCREEN_HEIGHT);
        PaintPanel面板=新PaintPanel(mitchellPoints);
        panel.set preferredSize(新尺寸(SCREEN_WIDTH,SCREEN_HEIGHT));
        this.setContentPane(板);
        this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);

    }
 

distanceBetween 方法计算两个点之间的距离施加一个数学公式

 公共双distanceBetween(点P1,P2点){

        双DE​​LTAX = p1.getX() -  p2.getX();
        双移动deltaY = p1.getY() -  p2.getY();
        双deltaXSquare = Math.pow(DELTAX,2);
        双deltaYSquare = Math.pow(移动deltaY,2);

        返回的Math.sqrt(deltaXSquare + deltaYSquare);

    }

}
 

下面是执行的说明:

每一次运行似乎产生相同类型的点的分布,正如你可以在上面的图片中看到的点似乎避免了中心区。我不明白为什么它是表现这种方式。有人可以帮助我理解这种行为?是否有任何其他的方法(或称为算法)的显著提高米切尔的最佳人选算法?我执行米切尔的最佳人选算法(不超过$ C $三)正在审查<一href="http://$c$creview.stackexchange.com/questions/87843/implementing-mitchells-best-candidate-algorithm">$c$c回顾

感谢您的帮助。

解决方案
  

每次运行似乎产生相同类型的点的分布,并作为   你可以在图片中看到了上述几点似乎是避免   中心区。我不明白为什么它是表现这种方式。能够   有人帮我理解这种行为?

前面已经指出@笔扇69 回答,基本上你将最终的边缘之间振荡你的空间,如果你基础的新点的选择要添加从previous它的一个距离(这一点本身就是完全相反的正好相反)。

  

还有没有其他的方法(或称为算法)的显著   提高米切尔的最佳人选算法?

有关你提到的问题,我相信一个数据结构,是专门用来为K维空间数据模型,并允许快速搜索为最近邻的占用空间给定的新坐标,是有意义的。在 KD树 是这样的结构:

  

在计算机科学中, kd树(简称K维树)是   空间分区数据结构中的一个组织点    K 维空间。 K-ð树是一个有用的数据结构数   应用程序,如搜索涉及多维搜索关键字   (如范围搜索和近邻搜索)。 K-ð树是一个   二进制空间分割树的。

通常,构造kd树时,这是使用一组(排序)的数据点,并递归划分(分割)沿着利用沿该轴剩余点之间的中间值的尺寸轴之一的搜索空间进行。

我添加了一个非常简单而幼稚的做法,仅包含在你的问题中使用这些行动,并且不执行重新平衡树的插入。由于点的具体性质插入(在现有分最大距离),这似乎工作pretty的好,见下文(评估500,每轮10名候选人,5000图片,1000分的总分别为左,右图)。

这是在C#,因为我有一些地区已经上市,但它应该是非常简单翻译这个去渣。我省略了code为分数类。

  //一个非常幼稚的部分KD树实现与K = 2分。
公共类TwoDTree
{
    私人节点_root;

    公共无效插入(点坐标)
    {
        _root = Node.Insert(坐标,_root,0);
    }

    公共点FindNearest(点来,出双bestDistance)
    {
        bestDistance = double.MaxValue;
        VAR最好= Node.FindNearest(于_root,0,空,裁判bestDistance);
        最好的回报!= NULL? best.Coordinate:空;
    }

    公开的IEnumerable&LT;点&GT; GetPoints()
    {
        如果(_root!= NULL)
            返回_root.GetPoints();
        返回Enumerable.Empty&LT;点&GT;();
    }

    私有类节点
    {
        私人节点_Left;
        私人节点_right;

        公共节点(点坐标)
        {
            坐标=坐标;
        }

        公共只读点坐标;

        公开的IEnumerable&LT;点&GT; GetPoints()
        {
            如果(_Left!= NULL)
            {
                的foreach(在_left.GetPoints变种角())
                    得到的回报磅;
            }
            得到的回报协调;

            如果(_right!= NULL)
            {
                的foreach(在_right.GetPoints变种角())
                    得到的回报磅;
            }
        }

        //递归插入件(非平衡)。
        公共静态节点插入(点坐标,节点根,INT级)
        {
            如果(根== NULL)
                返回新节点(坐标);

            VAR compareResult =((%2)== 0)
                ? coord.X.CompareTo(root.Coordinate.X)
                :coord.Y.CompareTo(root.Coordinate.Y);

            如果(compareResult大于0)
                root._right =插入(坐标,root._right,等级+ 1);
            其他
                root._left =插入(坐标,root._left,等级+ 1);
            返回根;
        }

        公共静态节点FindNearest(点坐标,节点根,诚信水平,最好的节点,楼盘双bestDistance)
        {
            如果(根== NULL)
                最好回报;

            VAR axis_dif =((%2)== 0)
                ? coord.X  -  root.Coordinate.X
                :coord.Y  -  root.Coordinate.Y;

            //递归附近及放大器;也许远以及
            VAR附近= axis_dif&LT; = 0.0D? root._left:root._right;
            最好= Node.FindNearest(坐标附近,水平+ 1,最好的,裁判bestDistance);
            如果(axis_dif * axis_dif&LT; bestDistance)
            {
                VAR远= axis_dif&LT; = 0.0D? root._right:root._left;
                最好= Node.FindNearest(坐标,到目前为止,等级+ 1,最好的,裁判bestDistance);
            }

            //我们击败了老最好的。
            VAR DIST = root.Coordinate.DistanceTo(坐标);
            如果(DIST&LT; bestDistance)
            {
                bestDistance = DIST;
                返回根;
            }
            最好回报;
        }
    }
}

//米切尔最佳人选算法,使用KD树。
公共类MitchellBestCandidate
{
    私人const int的MAXX = 420;
    私人const int的MAXY = 320;
    私人只读INT _maxCandidates;
    私人只读INT _maxPoints;
    私人只读随机_rnd;
    私人只读TwoDTree _tree =新TwoDTree();

    公共MitchellBestCandidate(INT maxPoints,INT maxCandidatesPerRound)
    {
        _maxPoints = maxPoints;
        _maxCandidates = maxCandidatesPerRound;
        _rnd =新的随机();
    }

    公开的IEnumerable&LT;点&GT; CurrentPoints
    {
        {返回_tree.GetPoints(); }
    }

    公共无效生成()
    {
        _tree.Insert(GetRandomPoint(_rnd,MAXX,MAXY));
        对于(VAR I = 1; I&LT; _maxPoints;我++)
        {
            VAR bestDistance = double.MinValue;
            VAR bestCandidate =默认(点);
            为(变种CI = 0; CI&LT; _maxCandidates; CI ++)
            {
                VAR距离=默认值(双);
                VAR候选人= GetRandomPoint(_rnd,MAXX,MAXY);
                VAR最近= _tree.FindNearest(候选人出的距离);
                如果(距离&GT; bestDistance)
                {
                    bestDistance =距离;
                    bestCandidate =候选人;
                }
            }
            _tree.Insert(bestCandidate);
        }
    }

    私人静点GetRandomPoint(随机RND,诠释MAXX,INT MAXY)
    {
        返回新的点(rnd.Next(MAXX),rnd.Next(MAXY));
    }
}
 

I have successfully implemented Mitchell's best candidate algorithm. Mitchell’s best-candidate algorithm generates a new random sample by creating k candidate samples and picking the best of k. Here the "best" sample is defined as the sample that is farthest away from previous samples. The algorithm approximates Poisson-disc sampling, producing a much more natural appearance (better blue noise spectral characteristics) than uniform random sampling.

I am trying to improve it especially in the area of speed. So the first idea that came to my mind was to compare the candidate samples only to the last added element instead of comparing them to the whole previous sample. This would bias Poisson-disc sampling but may produce some interesting results.

Here is the main part of my implementation

public class MitchellBestCandidateII extends JFrame {

    private List<Point> mitchellPoints = new ArrayList<Point>();
    private Point currentPoint;
    private int currentPointIndex =0;
    private boolean isBeginning = true;
    private Point[] candidatesBunch = new Point[MAX_CANDIDATES_AT_TIME];

    public MitchellBestCandidateII() {      
        computeBestPoints();
        initComponents();
    }

The method computeBestPointscomputes the point differently from Mitchell's algorithm in this sense that it compares candidates only to the last added point instead of comparing it to the whole sample.

    private void computeBestPoints() {

        do {
            if (isBeginning) {
                currentPoint = getRandomPoint();
                mitchellPoints.add(currentPoint);
                isBeginning = false;
                currentPointIndex = 0;              
            } 

            setCandidates();
            Point bestCandidate = pickUpCandidateFor(currentPoint);
            mitchellPoints.add(bestCandidate);
            currentPoint = bestCandidate;
            currentPointIndex++;    
        } while (currentPointIndex <MAX_NUMBER_OF_POINTS);

    }

    private Point pickUpCandidateFor(Point p) {
        double biggestDistance = 0.0D;
        Point  result = null;
        for (int i = 0; i < MAX_CANDIDATES_AT_TIME; i++) {

            double d = distanceBetween(p, candidatesBunch[i]);
            if (biggestDistance < d) {
                biggestDistance = d;
                result = candidatesBunch[i];

            }
        }

        return result;
    }

The setCandidates method generates random candidates. Only one of them will end up being part of the sample: the others will be discarded.

    private void setCandidates() {
        for (int i = 0; i < MAX_CANDIDATES_AT_TIME; i++) {
            candidatesBunch[i] = getRandomPoint();
        }
    }


    private Point getRandomPoint() {
        return new Point(Randomizer.getHelper().nextInt(SCREEN_WIDTH),     Randomizer.getHelper().nextInt(SCREEN_HEIGHT));

    }

The initComponents sets up the JFrame and the JPanel and passes the list of points to draw to the JPanel

    private void initComponents() {
        this.setSize(SCREEN_WIDTH,SCREEN_HEIGHT);
        PaintPanel panel = new PaintPanel(mitchellPoints);
        panel.setPreferredSize(new Dimension(SCREEN_WIDTH,SCREEN_HEIGHT));
        this.setContentPane(panel);
        this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);

    }

The distanceBetween method computes the distance between two points applying a mathematical formula.

    public double distanceBetween(Point p1, Point p2) {

        double deltaX = p1.getX() - p2.getX();
        double deltaY = p1.getY() - p2.getY();
        double deltaXSquare = Math.pow(deltaX, 2);
        double deltaYSquare = Math.pow(deltaY, 2);

        return Math.sqrt(deltaXSquare + deltaYSquare);

    }

}

Here is an illustration of the execution:

Every run seems to produce the same type of points distribution and as you can see in the pictures above the points seems to be avoiding the central area. I can't understand why it is behaving this way. Can someone help me to understand this behavior? Is there any other approach (or known algorithm) that significantly improve Mitchell's best candidate algorithm? My implementation of Mitchell's best candidate algorithm (not the above code) is under review on Code Review

Thank you for helping.

解决方案

Every run seems to produce the same type of points distribution and as you can see in the pictures above the points seems to be avoiding the central area. I can't understand why it is behaving this way. Can someone help me to understand this behavior?

As already pointed out in @pens-fan-69 answer, Basically you will end up oscillating between the edges of your space if you base the selection of the new point to add on its distance from the previous one (the exact opposite of the exact opposite of a point is itself).

Is there any other approach (or known algorithm) that significantly improve Mitchell's best candidate algorithm?

For the problem you described, I believe a data structure, that is specifically intended to model spatial data in K dimensions and allows fast searching in the occupied space for a nearest neighbor to a given new coordinate, would make sense. The K-D Tree is such a structure:

In computer science, a k-d tree (short for k-dimensional tree) is a space-partitioning data structure for organizing points in a k-dimensional space. k-d trees are a useful data structure for several applications, such as searches involving a multidimensional search key (e.g. range searches and nearest neighbor searches). k-d trees are a special case of binary space partitioning trees.

Generally, when constructing a K-D Tree, this is done using a set of (sorted) data points, and recursively dividing (partitioning) the search space along one of the dimensional axes using the median value among the remaining points along that axis.

I have added a very simple and naive implementation, that contains only those operations that are used in your problem, and performs no tree rebalancing for inserts. Due to the specific nature of the points being inserted (largest distance from existing points), this seems to work pretty well, see image below (evaluating 500, 10 candidates per round, 5000, 1000 points total respectively for the left and right image).

It is in C#, because I had some parts already available, but it should be very straightforward to translate this to Java. I omitted the code for the Points class.

// A very naive partial K-D Tree implementation with K = 2 for Points.
public class TwoDTree
{
    private Node _root;

    public void Insert(Point coordinate)
    {
        _root = Node.Insert(coordinate, _root, 0);
    }

    public Point FindNearest(Point to, out double bestDistance)
    {
        bestDistance = double.MaxValue;
        var best = Node.FindNearest(to, _root, 0, null, ref bestDistance);
        return best != null ? best.Coordinate : null;
    }

    public IEnumerable<Point> GetPoints()
    {
        if (_root != null)
            return _root.GetPoints();
        return Enumerable.Empty<Point>();
    }

    private class Node
    {
        private Node _left;
        private Node _right;

        public Node(Point coord)
        {
            Coordinate = coord;
        }

        public readonly Point Coordinate;

        public IEnumerable<Point> GetPoints()
        {
            if (_left != null)
            {
                foreach (var pt in _left.GetPoints())
                    yield return pt;
            }
            yield return Coordinate;

            if (_right != null)
            {
                foreach (var pt in _right.GetPoints())
                    yield return pt;
            }
        }

        // recursive insert (non-balanced).
        public static Node Insert(Point coord, Node root, int level)
        {
            if (root == null)
                return new Node(coord);

            var compareResult = ((level % 2) == 0)
                ? coord.X.CompareTo(root.Coordinate.X)
                : coord.Y.CompareTo(root.Coordinate.Y);

            if (compareResult > 0)
                root._right = Insert(coord, root._right, level + 1);
            else
                root._left = Insert(coord, root._left, level + 1);
            return root;
        }

        public static Node FindNearest(Point coord, Node root, int level, Node best, ref double bestDistance)
        {
            if (root == null)
                return best;

            var axis_dif = ((level % 2) == 0)
                ? coord.X - root.Coordinate.X
                : coord.Y - root.Coordinate.Y;

            // recurse near & maybe far as well
            var near = axis_dif <= 0.0d ? root._left : root._right;
            best = Node.FindNearest(coord, near, level + 1, best, ref bestDistance);
            if (axis_dif * axis_dif < bestDistance)
            {
                var far = axis_dif <= 0.0d ? root._right : root._left;
                best = Node.FindNearest(coord, far, level + 1, best, ref bestDistance);
            }

            // do we beat the old best.
            var dist = root.Coordinate.DistanceTo(coord);
            if (dist < bestDistance)
            {
                bestDistance = dist;
                return root;
            }
            return best;
        }
    }
}

// Mitchell Best Candidate algorithm, using the K-D Tree.
public class MitchellBestCandidate
{
    private const int MaxX = 420;
    private const int MaxY = 320;
    private readonly int _maxCandidates;
    private readonly int _maxPoints;
    private readonly Random _rnd;
    private readonly TwoDTree _tree = new TwoDTree();

    public MitchellBestCandidate(int maxPoints, int maxCandidatesPerRound)
    {
        _maxPoints = maxPoints;
        _maxCandidates = maxCandidatesPerRound;
        _rnd = new Random();
    }

    public IEnumerable<Point> CurrentPoints
    {
        get { return _tree.GetPoints(); }
    }

    public void Generate()
    {
        _tree.Insert(GetRandomPoint(_rnd, MaxX, MaxY));
        for (var i = 1; i < _maxPoints; i++)
        {
            var bestDistance = double.MinValue;
            var bestCandidate = default(Point);
            for (var ci = 0; ci < _maxCandidates; ci++)
            {
                var distance = default(double);
                var candidate = GetRandomPoint(_rnd, MaxX, MaxY);
                var nearest = _tree.FindNearest(candidate, out distance);
                if (distance > bestDistance)
                {
                    bestDistance = distance;
                    bestCandidate = candidate;
                }
            }
            _tree.Insert(bestCandidate);
        }
    }

    private static Point GetRandomPoint(Random rnd, int maxX, int maxY)
    {
        return new Point(rnd.Next(maxX), rnd.Next(maxY));
    }
}

这篇关于提高米切尔的最佳人选算法的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆