0

I am making a Facebook group member scraper and in that i found the page auto loads when taken scroll to down to settle that automatically i used the following code:-

  setInterval(function(){
        $(document).scrollTop($(document).height());
              },1000); 

its working fine but my rest code is also working at the same time.

 var main = [];      
    $('#groupsMemberSection_self_bio').find('.uiList').each(function(){ 
            $(this).find('._6a').each(function(){   
                 $(this).find('div._6a:last').each(function(){  
                    var href = $(this).find('a').attr('href');
                    if(href != undefined){  
                        var obj = {};
                         var profile_url = $(this).find('a').attr('href');
                         var name = $(this).find('a').text();
                         var company_name_position = $(this).find('._60rj').next().text();

                         obj['url'] = profile_url;
                         obj['name'] = name;
                         obj['company_name'] = company_name_position;
                     }            
                    if(obj != undefined){       
                    main.push(obj)
                }
                })  
                })
              });   


    $('#groupsMemberSection_admins_moderators').find('.uiList').each(function(){ 
            $(this).find('._6a').each(function(){   
                 $(this).find('div._6a:last').each(function(){  
                    var href = $(this).find('a').attr('href');
                    if(href != undefined){  
                        var obj = {};
                         var profile_url = $(this).find('a').attr('href');
                         var name = $(this).find('a').text();
                         var company_name_position = $(this).find('._60rj').next().text();

                         obj['url'] = profile_url;
                         obj['name'] = name;
                         obj['company_name'] = company_name_position;
                     }            
                    if(obj != undefined){       
                    main.push(obj)
                }
                })  
                })
              });     


    $('#groupsMemberSection_things_in_common').find('.uiList').each(function(){ 
             $(this).find('._6a').each(function(){  
                 $(this).find('div._6a:last').each(function(){  
                    var href = $(this).find('a').attr('href');
                    if(href != undefined){  
                        var obj = {};
                         var profile_url = $(this).find('a').attr('href');
                         var name = $(this).find('a').text();
                         var company_name_position = $(this).find('._60rj').next().text();

                         obj['url'] = profile_url;
                         obj['name'] = name;
                         obj['company_name'] = company_name_position;


                    }             
                    if(obj != undefined){       
                    main.push(obj)
                    }
                 }) 
                })
              });            

    $('#groupsMemberSection_recently_joined').find('.uiList').each(function(i,v){ 
              $(this).find('._6a').each(function(){ 
                 $(this).find('div._6a:last').each(function(){  
                    var href = $(this).find('a').attr('href');
                    if(href != undefined){  
                        var obj = {};
                         var profile_url = $(this).find('a').attr('href');
                         var name = $(this).find('a').text();
                         var company_name_position = $(this).find('._60rj').next().text();

                         obj['url'] = profile_url;
                         obj['name'] = name;
                         obj['company_name'] = company_name_position;


                    }
                   if(obj != undefined){        
                    main.push(obj)
                    }
                 }) 
                })
              });                                


      console.log('data', main);   
      downloadCSV('data.csv', main)

    function convertArrayOfObjectsToCSV(args) {  
         var result, ctr, keys, columnDelimiter, lineDelimiter, data;

            data = args.data || null;
                if (data == null || !data.length) {
                    return null;
                }

                columnDelimiter = args.columnDelimiter || ',';
                lineDelimiter = args.lineDelimiter || '\n';

                keys = Object.keys(data[0]);

                result = '';
                result += keys.join(columnDelimiter);
                result += lineDelimiter;

                data.forEach(function(item) {
                    ctr = 0;
                    keys.forEach(function(key) {
                        if (ctr > 0) result += columnDelimiter;

                        result += item[key];
                        ctr++;
                    });
                    result += lineDelimiter;
                });

                return result;
            }

    function downloadCSV(args, stockData) {  
        var data, filename, link;
        var csv = convertArrayOfObjectsToCSV({
            data: stockData
        });
        if (csv == null) return;

        filename = args.filename || 'export.csv';

        if (!csv.match(/^data:text\/csv/i)) {
            csv = 'data:text/csv;charset=utf-8,' + csv;
        }
        data = encodeURI(csv);

        link = document.createElement('a');
        link.setAttribute('href', data);
        link.setAttribute('download', filename);
        link.click();
    }

What i want is the csv file should download once only when the page load is completed i mean the pagination that is set is auto-loaded to last page please help..how can i match the above condition so that once the last page is reached my csv is downloaded according to that data on page or if there is any other way then kindly help

Rahul shukla
  • 368
  • 1
  • 12
  • Just a helpful bit of advice - be prepared for Facebook to shut down your account if they manage to find that you are trying to programmatically access information they are not providing via their API. [According to their API](https://developers.facebook.com/docs/graph-api/reference/v2.12/group/members), only the group owner has access to the `/{group-id}/members` endpoint. – Lix Apr 03 '18 at 13:37
  • actually i am making this for group owners only they can get the data of the members in there group. – Rahul shukla Apr 03 '18 at 13:40
  • you are in breach of their terms of service (chapter 3.2.) unless you get written consent to do this. this is subject to change without notice. technically, there is some information already on SO, start here: https://stackoverflow.com/q/10404699/1132334 – Cee McSharpface Apr 03 '18 at 13:40
  • So why not use the official API endpoint? There is no need to scrape any pages for this. Have your users authenticate your facebook app and then with their user access token you can query for the group members. – Lix Apr 03 '18 at 13:41
  • so is der any official api?? how can i get the access for it? – Rahul shukla Apr 03 '18 at 13:42
  • Yes - there is an official Facebook API - Take a look at this page of the API docs - https://developers.facebook.com/docs/graph-api/reference/v2.12/group/members – Lix Apr 03 '18 at 13:42
  • And here is a link to the "getting started" page for the Facebook Javascript SDK - https://developers.facebook.com/docs/javascript/quickstart – Lix Apr 03 '18 at 13:43
  • ok thanks but is der any way to set the function work when my page is loaded complete. – Rahul shukla Apr 03 '18 at 13:44
  • Perhaps - but there is a large possibility of your code breaking the moment Facebook makes any changes to their frontend code. I would recommend reading up on the official API and using their official SDK libraries to access the data. Besides that - you are breaking Facebook's rules and will not be able to provide your users ANY data once they suspend your account. – Lix Apr 03 '18 at 13:47
  • ok thanks for clearing my doubts a last question if i wana create a scrapper like grouply and others what i need to do for that. – Rahul shukla Apr 03 '18 at 13:51

0 Answers0