Design and implement a web browser


  • 0
    R

    Design and implement a web browser which supports the functionality that at any given instance you can efficiently tell the top 5 visited websites on basis of number of visits.

    struct Webpage
    {
        std::string url;
        size_t numberOfVisits;
    };
     
    struct History
    {
        void visit(const std::string & url) {
        }
     
        void printTop5() {
        }
    };
    

    Any ideas?


  • 0
    N

    // Basically You can use a priority queue to maintain the history of the web browser
    // You will also need a map that will map the url to the webpage reference.

    // I think the code is pretty self explanatory, but let me know if any part needs explanation.

    public class WebBrowser {
    
    private PriorityQueue<WebPage> history;
    private Map<String,WebPage> webPageMap;
    
    class WebPage {
        private String url;
        private int visitCount;
    
        public WebPage(String url){
            this.url = url;
            this.visitCount = 0;
        }
    
        public void visited() {
            this.visitCount++;
        }
    }
    
    public WebBrowser() {
        this.history = new PriorityQueue<>(new Comparator<WebPage>() {
            @Override
            public int compare(WebPage o1, WebPage o2) {
                return o2.visitCount-o1.visitCount;
            }
        });
        this.webPageMap = new HashMap<>();
    }
    
    
    public void navigate(String url) {
    
        if(!this.webPageMap.containsKey(url)){
            this.webPageMap.put(url,new WebPage(url));
            WebPage page = this.webPageMap.get(url);
            page.visited();
            this.history.add(page);
        }else {
            WebPage page = this.webPageMap.get(url);
            this.history.remove(page);
            page.visited();
            this.history.add(page);
        }
    
    
    
    
    
    }
    
    public List<WebPage> mostVisisted() {
    
        List<WebPage> list = new ArrayList<>();
        Iterator<WebPage> itr = this.history.iterator();
    
        int count  = 5;
        while(itr.hasNext() && count>0) {
            list.add(itr.next());
            count--;
        }
        return list;
    }
    
    
    public static void main(String[] args) {
    
        WebBrowser webBrowser = new WebBrowser();
        webBrowser.navigate("https://www.google.com");
        webBrowser.navigate("https://www.twitter.com");
        webBrowser.navigate("https://www.yahoo.com");
        webBrowser.navigate("https://www.bloomberg.com");
        webBrowser.navigate("https://www.gmail.com");
        webBrowser.navigate("https://www.google.com");
        webBrowser.navigate("https://www.yahoo.com");
        webBrowser.navigate("https://www.yahoo.com");
    
    
        List<WebPage> list = webBrowser.mostVisisted();
        for(WebPage page : list ) {
            System.out.println(page.url);
        }
        System.out.println("");
    }
    

    }


  • 0
    A

    Create a multiset, that will store the web pages sorted using number of visits.
    Create a page map that will map the url string to the iterator to multiset

    Insertion / update is O(logn) -> multiset insertion
    Get 5 top urls is O(k), where k is 5

    #include <string>
    #include <unordered_map>
    #include <set>
    #include <list>
    #include <iostream>
    
    class Webpage
    {
    private:
        std::string url;
        size_t numVisits;
    
    public:
        Webpage( const std::string& url )
        {
            this->url = url;
            numVisits = 1;
        }
    
        void visited()
        {
            numVisits++;
        }
    
        friend class History;
    };
    
    class History
    {
    private:
        class Comp
        {
        public:
            Comp() {}
            ~Comp() {}
            bool operator()( const Webpage& p1, const Webpage& p2 )
            {
                return p1.numVisits > p2.numVisits;
            }
        };
    
        std::multiset<Webpage, Comp> pageRank;
        std::unordered_map<std::string, std::multiset<Webpage>::iterator> pageMap;
    
    public:
    
        // Time complexity: O(logn)
        void visit( const std::string& url )
        {
            // Two cases, adding webpage for the first time, or updating the count of webpages
            // Case 1: adding webpage for the first time
            if( pageMap.find( url ) == pageMap.end() )
            {
                Webpage p( url );
                std::multiset<Webpage>::iterator it = pageRank.insert( p );
                pageMap[ url ] = it;
            }
    
            // Case 2: updating the count of webpages
            else
            {
                std::multiset<Webpage>::iterator it = pageMap[ url ];
                Webpage p = *it;
                p.visited();
                pageRank.erase( it );
                it = pageRank.insert( p );
                pageMap[ url ] = it;
            }
        }
    
        // Time complexity: O(k)
        void printTop5()
        {
            std::cout << "Printing the top 5 most visited websites:" << std::endl;
            std::multiset<Webpage>::iterator it;
            int count = 0;
            for( it = pageRank.begin(); it != pageRank.end() && count < 5; it++, count++ )
            {            
                std::cout << (*it).url << " " << it->numVisits << std::endl;
            }
            std::cout << std::endl;
        }
    };
    
    int main()
    {
        History h;
        h.visit( "www.facebook.com" );
        h.visit( "www.quora.com" );
        h.visit( "www.leetcode.com" );
        h.visit( "www.google.com" );
        h.visit( "www.google.com" );
        h.printTop5();
        h.visit( "www.reddit.com" );
        h.visit( "www.facebook.com" );
        h.visit( "www.geeksforgeeks.com" );
        h.visit( "www.facebook.com" );
        h.visit( "www.amazon.com" );
        h.visit( "www.leetcode.com" );
        h.printTop5();
    }
    

Log in to reply
 

Looks like your connection to LeetCode Discuss was lost, please wait while we try to reconnect.