sqlmapproject / sqlmap

Automatic SQL injection and database takeover tool
http://sqlmap.org
Other
32.36k stars 5.7k forks source link

Only get the last result when crawling using sqlmapapi #2503

Open walio opened 7 years ago

walio commented 7 years ago

System information

python version:2.7.13
Operating system:Windows 7
Sqlmap version:1.1.4.48#dev

Related webpages:

intro.php

<a href="test.php?username="></a>
<a href="test2.php?username="></a>

test.php and test2.php are the same like:

<?php
       $conn=@mysql_connect("localhost",'root','')or die("connect failed!");;
      mysql_select_db("test",$conn) or die("db not exist!");
      $name=$_GET['username'];
      $query=mysql_query("select * from users where username='$name'");
      $arr=mysql_fetch_array($query);
      if(is_array($arr)){
             echo "success";
       }else{
             echo "failed";
       }
?>

Issue detail

I uses python sqlmapapi.py -s to start a server,and uses the following script to use crawl to detect injection:

#!/usr/bin/python
import requests
import time
import json
from pprint import pprint

class AutoSqli(object):

    def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
        super(AutoSqli, self).__init__()
        self.server = server
        if self.server[-1] != '/':
            self.server = self.server + '/'
        self.target = target
        self.taskid = ''
        self.engineid = ''
        self.status = ''
        self.data = data
        self.referer = referer
        self.cookie = cookie
        self.start_time = time.time()

    def task_new(self):
        self.taskid = json.loads(requests.get(self.server + 'task/new').text)['taskid']

    def task_delete(self):
        if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
            return True
        return False

    def scan_start(self):
        headers = {'Content-Type': 'application/json'}
        payload = {'url': self.target}
        url = self.server + 'scan/' + self.taskid + '/start'
        t = json.loads(requests.post(url, data=json.dumps(payload), headers=headers).text)
        self.engineid = t['engineid']

    def scan_status(self):
        self.status = json.loads(
            requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
        if self.status == 'running':
            return 'running'
        elif self.status == 'terminated':
            return 'terminated'
        else:
            return 'error'

    def scan_data(self):
        self.data = json.loads(requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']

    def option_set(self):
        headers = {'Content-Type': 'application/json'}
        option = {
                    "smart": True,
                    "crawlDepth" : 1
                 }
        url = self.server + 'option/' + self.taskid + '/set'
        requests.post(url, data=json.dumps(option), headers=headers)

    def scan_stop(self):
        requests.get(self.server + 'scan/' + self.taskid + '/stop')         

    def scan_kill(self):
        requests.get(self.server + 'scan/' + self.taskid + '/kill')

    def run(self):
        self.task_new();
        self.option_set()
        self.scan_start();
        while True:
            if self.scan_status() == 'running':
                time.sleep(1)
            elif self.scan_status() == 'terminated':
                break
            else:
                break
            if time.time() - self.start_time > 3000:
                error = True
                self.scan_stop()
                self.scan_kill()
                break
        self.scan_data()
        self.task_delete()
        for result in self.data:
            if result['type']==0:
                pprint(result)

if __name__ == '__main__':
    t = AutoSqli('http://127.0.0.1:8775', 'http://localhost/intro.php')
    t.run()

The script start a new scan from intro.php and the crawldepth is 1.I expect to return the result of scanning test.php and test2.php because they are the same. but it only return the result of scanning test2.php:

{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test2.php'}}

Suggest Solution

When I comment the if in the write function of class StdDbOut in /lib/utils/api.py line 241 to line 244:

if len(output) > 0:
                    for index in xrange(len(output)):
                        conf.databaseCursor.execute("DELETE FROM data WHERE id = ?",
                                                     (output[index][0],))

I found it can return the result as I expect

{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test.php'}}
{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test2.php'}}

I guess this is not a good solution,but it worked just as well so far.I hope the problem can be really solved.

686583615708183 commented 2 years ago

https://www.facebook.com/profile.php?id=100069654810948#!/usr/bin/python import requests import time import json from pprint import pprint

class AutoSqli(object):

def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
    super(AutoSqli, self).__init__()
    self.server = server
    if self.server[-1] != '/':
        self.server = self.server + '/'
    self.target = target
    self.taskid = ''
    self.engineid = ''
    self.status = ''
    self.data = data
    self.referer = referer
    self.cookie = cookie
    self.start_time = time.time()

def task_new(self):
    self.taskid = json.loads(requests.get(self.server + 'task/new').text)['taskid']

def task_delete(self):
    if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
        return True
    return False

def scan_start(self):
    headers = {'Content-Type': 'application/json'}
    payload = {'url': self.target}
    url = self.server + 'scan/' + self.taskid + '/start'
    t = json.loads(requests.post(url, data=json.dumps(payload), headers=headers).text)
    self.engineid = t['engineid']

def scan_status(self):
    self.status = json.loads(
        requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
    if self.status == 'running':
        return 'running'
    elif self.status == 'terminated':
        return 'terminated'
    else:
        return 'error'

def scan_data(self):
    self.data = json.loads(requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']

def option_set(self):
    headers = {'Content-Type': 'application/json'}
    option = {
                "smart": True,
                "crawlDepth" : 1
             }
    url = self.server + 'option/' + self.taskid + '/set'
    requests.post(url, data=json.dumps(option), headers=headers)

def scan_stop(self):
    requests.get(self.server + 'scan/' + self.taskid + '/stop')         

def scan_kill(self):
    requests.get(self.server + 'scan/' + self.taskid + '/kill')

def run(self):
    self.task_new();
    self.option_set()
    self.scan_start();
    while True:
        if self.scan_status() == 'running':
            time.sleep(1)
        elif self.scan_status() == 'terminated':
            break
        else:
            break
        if time.time() - self.start_time > 3000:
            error = True
            self.scan_stop()
            self.scan_kill()
            break
    self.scan_data()
    self.task_delete()
    for result in self.data:
        if result['type']==0:
            pprint(result)

if name == 'main': t = AutoSqli('http://127.0.0.1:8775', 'http://localhost/intro.php') t.run()