2019-05-08 12:47:52 +02:00
#!/usr/bin/env python
2008-10-15 15:38:22 +00:00
"""
2024-01-03 23:11:52 +01:00
Copyright (c) 2006-2024 sqlmap developers (https://sqlmap.org/)
2017-10-11 14:50:46 +02:00
See the file ' LICENSE ' for copying permission
2008-10-15 15:38:22 +00:00
"""
2015-09-12 15:13:30 +02:00
import binascii
2021-01-06 15:39:51 +01:00
import inspect
2013-02-19 09:46:51 +01:00
import logging
2020-10-29 13:51:11 +01:00
import os
2019-07-19 12:17:07 +02:00
import random
2008-10-15 15:38:22 +00:00
import re
2008-11-09 16:57:47 +00:00
import socket
2012-08-20 22:17:39 +02:00
import string
2014-12-07 16:11:07 +01:00
import struct
2021-01-06 15:39:51 +01:00
import sys
2008-11-09 16:57:47 +00:00
import time
2014-03-21 20:28:16 +01:00
import traceback
2008-10-15 15:38:22 +00:00
2015-03-24 22:25:16 +08:00
try :
import websocket
from websocket import WebSocketException
except ImportError :
class WebSocketException ( Exception ) :
pass
2008-10-15 15:38:22 +00:00
2010-10-29 16:11:50 +00:00
from lib . core . agent import agent
2011-10-23 17:02:48 +00:00
from lib . core . common import asciifyUrl
2010-12-07 19:19:12 +00:00
from lib . core . common import calculateDeltaSeconds
2020-10-29 13:51:11 +01:00
from lib . core . common import checkFile
2016-12-20 09:53:44 +01:00
from lib . core . common import checkSameHost
2019-03-19 14:07:39 +01:00
from lib . core . common import chunkSplitPostData
2010-12-21 01:09:39 +00:00
from lib . core . common import clearConsoleLine
2014-03-06 21:08:31 +01:00
from lib . core . common import dataToStdout
2018-04-11 15:19:44 +02:00
from lib . core . common import escapeJsonValue
2012-02-16 14:42:28 +00:00
from lib . core . common import evaluateCode
2011-03-29 14:16:28 +00:00
from lib . core . common import extractRegexResult
2019-03-29 02:28:16 +01:00
from lib . core . common import filterNone
2012-10-16 12:32:58 +02:00
from lib . core . common import findMultipartPostBoundary
2010-12-20 22:45:01 +00:00
from lib . core . common import getCurrentThreadData
2015-06-05 17:02:56 +02:00
from lib . core . common import getHeader
2011-11-11 11:28:27 +00:00
from lib . core . common import getHostHeader
2012-08-31 12:15:09 +02:00
from lib . core . common import getRequestHeader
2015-09-10 15:51:33 +02:00
from lib . core . common import getSafeExString
2010-11-08 11:22:47 +00:00
from lib . core . common import logHTTPTraffic
2019-08-02 20:29:52 +02:00
from lib . core . common import openFile
2013-05-17 16:04:05 +02:00
from lib . core . common import popValue
2022-03-07 20:17:51 +01:00
from lib . core . common import parseJson
2019-06-04 14:44:06 +02:00
from lib . core . common import pushValue
2011-08-29 12:50:52 +00:00
from lib . core . common import randomizeParameterValue
2013-02-28 20:20:08 +01:00
from lib . core . common import randomInt
from lib . core . common import randomStr
2010-11-16 10:42:42 +00:00
from lib . core . common import readInput
2011-02-25 09:22:44 +00:00
from lib . core . common import removeReflectiveValues
2017-10-10 16:08:13 +02:00
from lib . core . common import safeVariableNaming
2013-02-19 09:46:51 +01:00
from lib . core . common import singleTimeLogMessage
2011-06-08 14:35:23 +00:00
from lib . core . common import singleTimeWarnMessage
2010-12-08 11:26:54 +00:00
from lib . core . common import stdev
2022-02-10 22:30:17 +01:00
from lib . core . common import unArrayizeValue
2017-10-10 16:08:13 +02:00
from lib . core . common import unsafeVariableNaming
2013-02-12 17:01:47 +01:00
from lib . core . common import urldecode
2012-07-31 11:03:44 +02:00
from lib . core . common import urlencode
2019-06-04 14:44:06 +02:00
from lib . core . common import wasLastResponseDelayed
2019-05-02 16:54:54 +02:00
from lib . core . compat import patchHeaders
2019-03-28 16:04:38 +01:00
from lib . core . compat import xrange
2022-02-10 22:30:17 +01:00
from lib . core . convert import encodeBase64
2019-05-03 13:20:15 +02:00
from lib . core . convert import getBytes
2019-08-02 20:29:52 +02:00
from lib . core . convert import getText
2019-05-06 00:54:21 +02:00
from lib . core . convert import getUnicode
2020-12-10 22:47:29 +01:00
from lib . core . data import cmdLineOptions
2008-10-15 15:38:22 +00:00
from lib . core . data import conf
from lib . core . data import kb
from lib . core . data import logger
2018-12-10 14:53:11 +01:00
from lib . core . datatype import AttribDict
2018-04-01 12:45:47 +02:00
from lib . core . decorators import stackedmethod
2012-10-04 11:25:44 +02:00
from lib . core . dicts import POST_HINT_CONTENT_TYPES
2012-10-09 15:19:47 +02:00
from lib . core . enums import ADJUST_TIME_DELAY
2013-03-12 21:16:44 +01:00
from lib . core . enums import AUTH_TYPE
2011-12-26 12:24:39 +00:00
from lib . core . enums import CUSTOM_LOGGING
2018-10-26 12:08:04 +02:00
from lib . core . enums import HINT
2013-03-20 11:10:24 +01:00
from lib . core . enums import HTTP_HEADER
2010-11-08 09:44:32 +00:00
from lib . core . enums import HTTPMETHOD
2010-11-08 09:49:57 +00:00
from lib . core . enums import NULLCONNECTION
2011-05-26 21:54:19 +00:00
from lib . core . enums import PAYLOAD
2010-11-08 09:20:02 +00:00
from lib . core . enums import PLACE
2012-10-04 11:25:44 +02:00
from lib . core . enums import POST_HINT
2011-12-04 22:42:19 +00:00
from lib . core . enums import REDIRECTION
2018-12-21 11:29:57 +01:00
from lib . core . enums import WEB_PLATFORM
2012-12-06 14:14:19 +01:00
from lib . core . exception import SqlmapCompressionException
from lib . core . exception import SqlmapConnectionException
2014-11-05 10:03:19 +01:00
from lib . core . exception import SqlmapGenericException
2020-04-29 14:36:11 +02:00
from lib . core . exception import SqlmapSkipTargetException
2012-12-06 14:14:19 +01:00
from lib . core . exception import SqlmapSyntaxException
2014-10-23 11:23:53 +02:00
from lib . core . exception import SqlmapTokenException
2012-12-06 14:14:19 +01:00
from lib . core . exception import SqlmapValueException
2013-02-13 12:24:42 +01:00
from lib . core . settings import ASTERISK_MARKER
2016-02-05 12:00:57 +01:00
from lib . core . settings import BOUNDARY_BACKSLASH_MARKER
2012-10-04 11:25:44 +02:00
from lib . core . settings import DEFAULT_CONTENT_TYPE
2013-07-31 17:28:22 +02:00
from lib . core . settings import DEFAULT_COOKIE_DELIMITER
2012-12-10 11:55:31 +01:00
from lib . core . settings import DEFAULT_GET_POST_DELIMITER
2018-11-29 00:09:05 +01:00
from lib . core . settings import DEFAULT_USER_AGENT
2019-03-05 12:24:41 +01:00
from lib . core . settings import EVALCODE_ENCODED_PREFIX
2012-07-23 14:14:22 +02:00
from lib . core . settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
2019-06-04 14:44:06 +02:00
from lib . core . settings import HTTP_ACCEPT_HEADER_VALUE
from lib . core . settings import IPS_WAF_CHECK_PAYLOAD
from lib . core . settings import IS_WIN
2019-11-07 16:23:52 +01:00
from lib . core . settings import JAVASCRIPT_HREF_REGEX
2019-06-13 10:59:56 +02:00
from lib . core . settings import LARGE_READ_TRIM_MARKER
2020-10-29 13:51:11 +01:00
from lib . core . settings import LIVE_COOKIES_TIMEOUT
2019-06-13 10:58:21 +02:00
from lib . core . settings import MAX_CONNECTION_READ_SIZE
2012-10-02 13:36:15 +02:00
from lib . core . settings import MAX_CONNECTIONS_REGEX
2012-08-07 00:50:58 +02:00
from lib . core . settings import MAX_CONNECTION_TOTAL_SIZE
2016-10-02 11:13:40 +02:00
from lib . core . settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
2016-09-27 14:03:59 +02:00
from lib . core . settings import MAX_MURPHY_SLEEP_TIME
2011-03-29 14:16:28 +00:00
from lib . core . settings import META_REFRESH_REGEX
2018-11-22 08:07:27 +01:00
from lib . core . settings import MAX_TIME_RESPONSES
2019-06-04 14:44:06 +02:00
from lib . core . settings import MIN_TIME_RESPONSES
2012-12-10 11:55:31 +01:00
from lib . core . settings import PAYLOAD_DELIMITER
2012-10-02 13:36:15 +02:00
from lib . core . settings import PERMISSION_DENIED_REGEX
2013-03-27 13:39:27 +01:00
from lib . core . settings import PLAIN_TEXT_CONTENT_TYPE
2016-01-09 17:32:19 +01:00
from lib . core . settings import RANDOM_INTEGER_MARKER
from lib . core . settings import RANDOM_STRING_MARKER
2014-02-26 09:30:37 +01:00
from lib . core . settings import REPLACEMENT_MARKER
2023-10-09 11:07:09 +02:00
from lib . core . settings import SAFE_HEX_MARKER
2014-08-20 23:42:40 +02:00
from lib . core . settings import TEXT_CONTENT_TYPE_REGEX
2011-10-25 09:53:44 +00:00
from lib . core . settings import UNENCODED_ORIGINAL_VALUE
2017-05-04 15:45:15 +02:00
from lib . core . settings import UNICODE_ENCODING
2011-04-19 14:50:09 +00:00
from lib . core . settings import URI_HTTP_HEADER
2012-04-06 08:42:36 +00:00
from lib . core . settings import WARN_TIME_STDEV
2019-11-28 13:53:47 +01:00
from lib . core . settings import WEBSOCKET_INITIAL_TIMEOUT
2021-03-03 23:28:27 +01:00
from lib . core . settings import YUGE_FACTOR
2010-01-02 02:02:12 +00:00
from lib . request . basic import decodePage
2008-10-15 15:38:22 +00:00
from lib . request . basic import forgeHeaders
2010-12-25 10:16:20 +00:00
from lib . request . basic import processResponse
2008-12-05 15:34:13 +00:00
from lib . request . comparison import comparison
2019-06-04 14:44:06 +02:00
from lib . request . direct import direct
2010-09-15 12:45:41 +00:00
from lib . request . methodrequest import MethodRequest
2019-09-11 14:05:25 +02:00
from lib . utils . safe2bin import safecharencode
2019-03-28 13:53:54 +01:00
from thirdparty import six
2019-03-11 14:36:01 +01:00
from thirdparty . odict import OrderedDict
2019-05-15 10:57:22 +02:00
from thirdparty . six import unichr as _unichr
2019-03-27 02:46:59 +01:00
from thirdparty . six . moves import http_client as _http_client
from thirdparty . six . moves import urllib as _urllib
2014-10-22 13:41:36 +02:00
from thirdparty . socks . socks import ProxyError
2008-10-15 15:38:22 +00:00
2012-12-06 10:42:53 +01:00
class Connect ( object ) :
2008-10-15 15:38:22 +00:00
"""
This class defines methods used to perform HTTP requests
"""
2008-12-04 17:40:03 +00:00
@staticmethod
2012-12-06 14:14:19 +01:00
def _getPageProxy ( * * kwargs ) :
2022-01-24 13:38:44 +01:00
try :
if ( len ( inspect . stack ( ) ) > sys . getrecursionlimit ( ) / / 2 ) : # Note: https://github.com/sqlmapproject/sqlmap/issues/4525
warnMsg = " unable to connect to the target URL "
raise SqlmapConnectionException ( warnMsg )
2022-06-29 15:30:34 +02:00
except ( TypeError , UnicodeError ) :
2022-01-24 13:38:44 +01:00
pass
2021-01-06 15:39:51 +01:00
2016-06-17 16:51:23 +02:00
try :
return Connect . getPage ( * * kwargs )
except RuntimeError :
return None , None , None
2008-12-04 17:40:03 +00:00
2011-06-19 09:57:41 +00:00
@staticmethod
2012-12-06 14:14:19 +01:00
def _retryProxy ( * * kwargs ) :
2011-06-19 09:57:41 +00:00
threadData = getCurrentThreadData ( )
threadData . retriesCount + = 1
2019-05-16 01:10:49 +02:00
if conf . proxyList and threadData . retriesCount > = conf . retries and not kb . locks . handlers . locked ( ) :
2013-08-12 14:25:51 +02:00
warnMsg = " changing proxy "
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2013-08-12 14:25:51 +02:00
2013-08-20 19:35:49 +02:00
conf . proxy = None
2015-10-25 15:58:43 +01:00
threadData . retriesCount = 0
2015-11-08 16:37:46 +01:00
setHTTPHandlers ( )
2013-08-12 14:25:51 +02:00
2011-06-19 09:57:41 +00:00
if kb . testMode and kb . previousMethod == PAYLOAD . METHOD . TIME :
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
2017-02-27 22:14:52 +01:00
warnMsg = " most likely web server instance hasn ' t recovered yet "
2011-11-10 10:30:53 +00:00
warnMsg + = " from previous timed based payload. If the problem "
2017-02-27 22:03:15 +01:00
warnMsg + = " persists please wait for a few minutes and rerun "
2016-05-30 10:51:35 +02:00
warnMsg + = " without flag ' T ' in option ' --technique ' "
2013-01-17 19:55:56 +01:00
warnMsg + = " (e.g. ' --flush-session --technique=BEUS ' ) or try to "
warnMsg + = " lower the value of option ' --time-sec ' (e.g. ' --time-sec=2 ' ) "
2011-06-19 09:57:41 +00:00
singleTimeWarnMessage ( warnMsg )
2015-09-27 16:17:58 +02:00
2011-06-19 09:57:41 +00:00
elif kb . originalPage is None :
2011-11-23 21:17:08 +00:00
if conf . tor :
warnMsg = " please make sure that you have "
warnMsg + = " Tor installed and running so "
warnMsg + = " you could successfully use "
2012-02-01 14:49:42 +00:00
warnMsg + = " switch ' --tor ' "
2011-11-23 21:17:08 +00:00
if IS_WIN :
2021-01-06 15:21:33 +01:00
warnMsg + = " (e.g. ' https://www.torproject.org/download/ ' ) "
2011-11-23 21:17:08 +00:00
else :
2013-01-17 19:55:56 +01:00
warnMsg + = " (e.g. ' https://help.ubuntu.com/community/Tor ' ) "
2011-11-23 21:17:08 +00:00
else :
warnMsg = " if the problem persists please check that the provided "
2021-01-06 15:47:50 +01:00
warnMsg + = " target URL is reachable "
items = [ ]
2018-12-26 23:23:49 +01:00
if not conf . randomAgent :
2021-01-06 15:47:50 +01:00
items . append ( " switch ' --random-agent ' " )
if not any ( ( conf . proxy , conf . proxyFile , conf . tor ) ) :
items . append ( " proxy switches ( ' --proxy ' , ' --proxy-file ' ...) " )
if items :
warnMsg + = " . In case that it is, "
warnMsg + = " you can try to rerun with "
warnMsg + = " and/or " . join ( items )
2011-06-19 09:57:41 +00:00
singleTimeWarnMessage ( warnMsg )
2015-09-27 16:17:58 +02:00
2011-06-19 09:57:41 +00:00
elif conf . threads > 1 :
warnMsg = " if the problem persists please try to lower "
2013-01-17 19:55:56 +01:00
warnMsg + = " the number of used threads (option ' --threads ' ) "
2011-06-19 09:57:41 +00:00
singleTimeWarnMessage ( warnMsg )
kwargs [ ' retrying ' ] = True
2012-12-06 14:14:19 +01:00
return Connect . _getPageProxy ( * * kwargs )
2011-06-19 09:57:41 +00:00
2012-04-06 08:42:36 +00:00
@staticmethod
2012-12-06 14:14:19 +01:00
def _connReadProxy ( conn ) :
2019-05-02 00:45:44 +02:00
retVal = b " "
2012-05-26 21:28:43 +00:00
2012-09-02 22:48:41 +02:00
if not kb . dnsMode and conn :
2012-11-20 12:10:29 +01:00
headers = conn . info ( )
2018-03-19 00:33:30 +01:00
if kb . pageCompress and headers and hasattr ( headers , " getheader " ) and ( headers . getheader ( HTTP_HEADER . CONTENT_ENCODING , " " ) . lower ( ) in ( " gzip " , " deflate " ) or " text " not in headers . getheader ( HTTP_HEADER . CONTENT_TYPE , " " ) . lower ( ) ) :
2012-12-18 09:36:26 +01:00
retVal = conn . read ( MAX_CONNECTION_TOTAL_SIZE )
if len ( retVal ) == MAX_CONNECTION_TOTAL_SIZE :
warnMsg = " large compressed response detected. Disabling compression "
singleTimeWarnMessage ( warnMsg )
kb . pageCompress = False
2018-03-19 00:33:30 +01:00
raise SqlmapCompressionException
2012-07-23 14:46:43 +02:00
else :
while True :
2015-10-07 09:25:14 +02:00
if not conn :
break
else :
2019-06-13 10:58:21 +02:00
try :
part = conn . read ( MAX_CONNECTION_READ_SIZE )
except AssertionError :
2020-11-30 23:33:08 +01:00
part = b " "
2015-10-07 09:25:14 +02:00
2019-06-13 10:58:21 +02:00
if len ( part ) == MAX_CONNECTION_READ_SIZE :
2012-07-23 14:46:43 +02:00
warnMsg = " large response detected. This could take a while "
singleTimeWarnMessage ( warnMsg )
2021-01-06 16:09:40 +01:00
part = re . sub ( getBytes ( r " (?si) %s .+? %s " % ( kb . chars . stop , kb . chars . start ) ) , getBytes ( " %s %s %s " % ( kb . chars . stop , LARGE_READ_TRIM_MARKER , kb . chars . start ) ) , part )
2019-06-13 10:58:21 +02:00
retVal + = part
2012-07-23 14:46:43 +02:00
else :
2019-06-13 10:58:21 +02:00
retVal + = part
2012-07-23 14:46:43 +02:00
break
2012-05-26 21:28:43 +00:00
2012-08-07 00:50:58 +02:00
if len ( retVal ) > MAX_CONNECTION_TOTAL_SIZE :
warnMsg = " too large response detected. Automatically trimming it "
singleTimeWarnMessage ( warnMsg )
break
2021-03-03 23:08:00 +01:00
if conf . yuge :
2021-03-03 23:28:27 +01:00
retVal = YUGE_FACTOR * retVal
2021-03-03 23:08:00 +01:00
2012-04-06 08:42:36 +00:00
return retVal
2008-10-15 15:38:22 +00:00
@staticmethod
def getPage ( * * kwargs ) :
"""
2013-04-09 11:48:42 +02:00
This method connects to the target URL or proxy and returns
the target URL page content
2008-10-15 15:38:22 +00:00
"""
2015-07-10 16:10:24 +02:00
if conf . offline :
return None , None , None
2010-11-08 11:22:47 +00:00
2018-03-13 13:45:42 +01:00
url = kwargs . get ( " url " , None ) or conf . url
get = kwargs . get ( " get " , None )
post = kwargs . get ( " post " , None )
method = kwargs . get ( " method " , None )
cookie = kwargs . get ( " cookie " , None )
ua = kwargs . get ( " ua " , None ) or conf . agent
referer = kwargs . get ( " referer " , None ) or conf . referer
host = kwargs . get ( " host " , None ) or conf . host
direct_ = kwargs . get ( " direct " , False )
multipart = kwargs . get ( " multipart " , None )
silent = kwargs . get ( " silent " , False )
raise404 = kwargs . get ( " raise404 " , True )
timeout = kwargs . get ( " timeout " , None ) or conf . timeout
auxHeaders = kwargs . get ( " auxHeaders " , None )
response = kwargs . get ( " response " , False )
2016-10-13 23:17:54 +02:00
ignoreTimeout = kwargs . get ( " ignoreTimeout " , False ) or kb . ignoreTimeout or conf . ignoreTimeouts
2018-03-13 13:45:42 +01:00
refreshing = kwargs . get ( " refreshing " , False )
retrying = kwargs . get ( " retrying " , False )
crawling = kwargs . get ( " crawling " , False )
checking = kwargs . get ( " checking " , False )
skipRead = kwargs . get ( " skipRead " , False )
2019-01-09 16:26:11 +01:00
finalCode = kwargs . get ( " finalCode " , False )
2019-03-19 14:07:39 +01:00
chunked = kwargs . get ( " chunked " , False ) or conf . chunked
2015-05-11 10:56:10 +02:00
2019-05-24 10:58:47 +02:00
start = time . time ( )
if isinstance ( conf . delay , ( int , float ) ) and conf . delay > 0 :
time . sleep ( conf . delay )
threadData = getCurrentThreadData ( )
with kb . locks . request :
kb . requestCounter + = 1
threadData . lastRequestUID = kb . requestCounter
2020-12-26 23:04:48 +01:00
if conf . proxyFreq :
2022-12-13 23:52:04 +01:00
if kb . requestCounter % conf . proxyFreq == 0 :
2020-12-26 23:04:48 +01:00
conf . proxy = None
warnMsg = " changing proxy "
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2020-12-26 23:04:48 +01:00
setHTTPHandlers ( )
2019-05-24 10:58:47 +02:00
if conf . dummy or conf . murphyRate and randomInt ( ) % conf . murphyRate == 0 :
if conf . murphyRate :
time . sleep ( randomInt ( ) % ( MAX_MURPHY_SLEEP_TIME + 1 ) )
page , headers , code = randomStr ( int ( randomInt ( ) ) , alphabet = [ _unichr ( _ ) for _ in xrange ( 256 ) ] ) , None , None if not conf . murphyRate else randomInt ( 3 )
threadData . lastPage = page
threadData . lastCode = code
return page , headers , code
2020-10-29 13:51:11 +01:00
if conf . liveCookies :
with kb . locks . liveCookies :
if not checkFile ( conf . liveCookies , raiseOnError = False ) or os . path . getsize ( conf . liveCookies ) == 0 :
warnMsg = " [ %s ] [WARNING] live cookies file ' %s ' is empty or non-existent. Waiting for timeout ( %d seconds) " % ( time . strftime ( " %X " ) , conf . liveCookies , LIVE_COOKIES_TIMEOUT )
dataToStdout ( warnMsg )
valid = False
for _ in xrange ( LIVE_COOKIES_TIMEOUT ) :
if checkFile ( conf . liveCookies , raiseOnError = False ) and os . path . getsize ( conf . liveCookies ) > 0 :
valid = True
break
else :
dataToStdout ( ' . ' )
time . sleep ( 1 )
dataToStdout ( " \n " )
if not valid :
errMsg = " problem occurred while loading cookies from file ' %s ' " % conf . liveCookies
raise SqlmapValueException ( errMsg )
cookie = openFile ( conf . liveCookies ) . read ( ) . strip ( )
cookie = re . sub ( r " (?i) \ ACookie: \ s* " , " " , cookie )
2016-09-02 14:14:17 +02:00
if multipart :
post = multipart
2019-05-24 15:01:43 +02:00
else :
if not post :
chunked = False
2019-03-19 14:07:39 +01:00
2019-05-24 15:01:43 +02:00
elif chunked :
post = _urllib . parse . unquote ( post )
post = chunkSplitPostData ( post )
2016-09-02 14:14:17 +02:00
2019-06-06 23:13:34 +02:00
webSocket = url . lower ( ) . startswith ( " ws " )
2011-05-22 07:46:09 +00:00
2019-03-27 02:46:59 +01:00
if not _urllib . parse . urlsplit ( url ) . netloc :
url = _urllib . parse . urljoin ( conf . url , url )
2011-05-24 05:26:51 +00:00
2011-05-22 07:46:09 +00:00
# flag to know if we are dealing with the same target host
2016-12-20 09:53:44 +01:00
target = checkSameHost ( url , conf . url )
2011-05-22 07:46:09 +00:00
2011-05-22 10:59:56 +00:00
if not retrying :
# Reset the number of connection retries
threadData . retriesCount = 0
2011-05-22 07:46:09 +00:00
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
url = url . replace ( " " , " % 20 " )
2011-04-30 13:20:05 +00:00
2017-05-21 22:52:27 +02:00
if " :// " not in url :
url = " http:// %s " % url
2013-04-30 17:46:26 +02:00
conn = None
2011-11-11 11:07:49 +00:00
page = None
2017-05-17 00:22:18 +02:00
code = None
status = None
2012-09-08 17:58:03 +02:00
2019-03-27 02:46:59 +01:00
_ = _urllib . parse . urlsplit ( url )
2017-07-04 12:14:17 +02:00
requestMsg = u " HTTP request [# %d ]: \r \n %s " % ( threadData . lastRequestUID , method or ( HTTPMETHOD . POST if post is not None else HTTPMETHOD . GET ) )
2017-06-18 13:19:11 +02:00
requestMsg + = getUnicode ( ( " %s %s " % ( _ . path or " / " , ( " ? %s " % _ . query ) if _ . query else " " ) ) if not any ( ( refreshing , crawling , checking ) ) else url )
2011-11-14 11:39:18 +00:00
responseMsg = u " HTTP response "
requestHeaders = u " "
2011-01-25 16:05:06 +00:00
responseHeaders = None
2011-11-14 11:39:18 +00:00
logHeaders = u " "
2012-03-14 14:31:41 +00:00
skipLogTraffic = False
2008-10-15 15:38:22 +00:00
2011-12-05 09:25:56 +00:00
raise404 = raise404 and not kb . ignoreNotFound
2011-10-23 20:19:42 +00:00
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
2011-10-23 17:02:48 +00:00
# support those by default
url = asciifyUrl ( url )
2008-10-15 15:38:22 +00:00
try :
2013-03-19 19:24:14 +01:00
socket . setdefaulttimeout ( timeout )
2010-06-09 14:40:36 +00:00
2012-12-06 11:15:05 +01:00
if direct_ :
2013-05-27 10:38:47 +02:00
if ' ? ' in url :
url , params = url . split ( ' ? ' , 1 )
2010-02-09 14:02:47 +00:00
params = urlencode ( params )
url = " %s ? %s " % ( url , params )
2010-06-09 14:40:36 +00:00
2017-05-07 23:12:42 +02:00
elif any ( ( refreshing , crawling , checking ) ) :
2011-05-27 16:26:00 +00:00
pass
2011-03-29 14:16:28 +00:00
2011-05-13 09:56:12 +00:00
elif target :
2019-06-06 23:45:30 +02:00
if conf . forceSSL :
url = re . sub ( r " (?i) \ A(http|ws): " , r " \ g<1>s: " , url )
2017-10-31 11:38:09 +01:00
url = re . sub ( r " (?i):80/ " , " :443/ " , url )
2013-04-24 12:35:39 +02:00
2012-07-14 11:01:30 +02:00
if PLACE . GET in conf . parameters and not get :
2010-11-08 08:02:36 +00:00
get = conf . parameters [ PLACE . GET ]
2010-06-09 14:40:36 +00:00
2013-06-04 00:05:25 +02:00
if not conf . skipUrlEncode :
get = urlencode ( get , limit = True )
2010-02-09 14:02:47 +00:00
if get :
2015-01-17 17:31:00 +01:00
if ' ? ' in url :
url = " %s %s %s " % ( url , DEFAULT_GET_POST_DELIMITER , get )
requestMsg + = " %s %s " % ( DEFAULT_GET_POST_DELIMITER , get )
else :
url = " %s ? %s " % ( url , get )
requestMsg + = " ? %s " % get
2010-06-10 14:42:17 +00:00
2014-11-21 09:41:39 +01:00
if PLACE . POST in conf . parameters and not post and method != HTTPMETHOD . GET :
2013-06-03 15:14:56 +02:00
post = conf . parameters [ PLACE . POST ]
2010-03-23 10:27:39 +00:00
2011-05-13 09:56:12 +00:00
elif get :
url = " %s ? %s " % ( url , get )
requestMsg + = " ? %s " % get
2019-03-27 02:46:59 +01:00
requestMsg + = " %s " % _http_client . HTTPConnection . _http_vsn_str
2010-02-09 14:02:47 +00:00
2011-12-20 12:52:41 +00:00
# Prepare HTTP headers
2023-06-29 15:43:38 +02:00
headers = forgeHeaders ( { HTTP_HEADER . COOKIE : cookie , HTTP_HEADER . USER_AGENT : ua , HTTP_HEADER . REFERER : referer , HTTP_HEADER . HOST : getHeader ( dict ( conf . httpHeaders ) , HTTP_HEADER . HOST ) or getHostHeader ( url ) } , base = None if target else { } )
2010-09-15 12:45:41 +00:00
2015-12-03 01:43:37 +01:00
if HTTP_HEADER . COOKIE in headers :
cookie = headers [ HTTP_HEADER . COOKIE ]
2010-10-18 08:54:08 +00:00
if kb . authHeader :
2013-03-20 11:10:24 +01:00
headers [ HTTP_HEADER . AUTHORIZATION ] = kb . authHeader
2010-10-18 08:54:08 +00:00
2010-10-18 09:02:56 +00:00
if kb . proxyAuthHeader :
2013-03-20 11:10:24 +01:00
headers [ HTTP_HEADER . PROXY_AUTHORIZATION ] = kb . proxyAuthHeader
2010-10-18 09:02:56 +00:00
2018-11-29 00:09:05 +01:00
if not conf . requestFile or not target :
if not getHeader ( headers , HTTP_HEADER . ACCEPT ) :
headers [ HTTP_HEADER . ACCEPT ] = HTTP_ACCEPT_HEADER_VALUE
2015-03-20 00:56:36 +01:00
2018-11-29 00:09:05 +01:00
if not getHeader ( headers , HTTP_HEADER . ACCEPT_ENCODING ) :
headers [ HTTP_HEADER . ACCEPT_ENCODING ] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb . pageCompress else " identity "
elif conf . requestFile and getHeader ( headers , HTTP_HEADER . USER_AGENT ) == DEFAULT_USER_AGENT :
for header in headers :
if header . upper ( ) == HTTP_HEADER . USER_AGENT . upper ( ) :
del headers [ header ]
break
2015-03-03 14:37:36 +01:00
2016-09-02 14:14:17 +02:00
if post is not None and not multipart and not getHeader ( headers , HTTP_HEADER . CONTENT_TYPE ) :
2022-02-10 22:30:17 +01:00
headers [ HTTP_HEADER . CONTENT_TYPE ] = POST_HINT_CONTENT_TYPES . get ( kb . postHint , DEFAULT_CONTENT_TYPE if unArrayizeValue ( conf . base64Parameter ) != HTTPMETHOD . POST else PLAIN_TEXT_CONTENT_TYPE )
2012-10-04 11:25:44 +02:00
2013-03-20 11:10:24 +01:00
if headers . get ( HTTP_HEADER . CONTENT_TYPE ) == POST_HINT_CONTENT_TYPES [ POST_HINT . MULTIPART ] :
warnMsg = " missing ' boundary parameter ' in ' %s ' header. " % HTTP_HEADER . CONTENT_TYPE
2012-10-16 12:32:58 +02:00
warnMsg + = " Will try to reconstruct "
singleTimeWarnMessage ( warnMsg )
boundary = findMultipartPostBoundary ( conf . data )
if boundary :
2013-03-20 11:10:24 +01:00
headers [ HTTP_HEADER . CONTENT_TYPE ] = " %s ; boundary= %s " % ( headers [ HTTP_HEADER . CONTENT_TYPE ] , boundary )
2012-10-16 12:32:58 +02:00
2016-05-17 10:47:17 +02:00
if conf . keepAlive :
headers [ HTTP_HEADER . CONNECTION ] = " keep-alive "
2019-04-19 13:54:48 +02:00
2019-03-19 20:26:29 +08:00
if chunked :
2019-03-19 14:07:39 +01:00
headers [ HTTP_HEADER . TRANSFER_ENCODING ] = " chunked "
2016-05-17 10:47:17 +02:00
2010-09-16 08:43:10 +00:00
if auxHeaders :
2017-08-16 03:08:58 +02:00
headers = forgeHeaders ( auxHeaders , headers )
2010-09-16 08:43:10 +00:00
2019-08-02 20:29:52 +02:00
if kb . headersFile :
content = openFile ( kb . headersFile , " rb " ) . read ( )
for line in content . split ( " \n " ) :
line = getText ( line . strip ( ) )
if ' : ' in line :
header , value = line . split ( ' : ' , 1 )
headers [ header ] = value
2021-02-21 22:49:57 +01:00
if conf . localhost :
headers [ HTTP_HEADER . HOST ] = " localhost "
2019-05-02 00:45:44 +02:00
for key , value in list ( headers . items ( ) ) :
2022-07-20 20:56:40 +02:00
if key . upper ( ) == HTTP_HEADER . ACCEPT_ENCODING . upper ( ) :
value = re . sub ( r " (?i)(,)br(,)? " , lambda match : ' , ' if match . group ( 1 ) and match . group ( 2 ) else " " , value ) or " identity "
2011-02-25 09:43:04 +00:00
del headers [ key ]
2019-05-05 23:37:48 +02:00
if isinstance ( value , six . string_types ) :
for char in ( r " \ r " , r " \ n " ) :
value = re . sub ( r " ( %s )([^ \ t]) " % char , r " \ g<1> \ t \ g<2> " , value )
2019-05-07 15:49:03 +02:00
headers [ getBytes ( key ) if six . PY2 else key ] = getBytes ( value . strip ( " \r \n " ) ) # Note: Python3 has_header() expects non-bytes value
2010-12-28 14:40:34 +00:00
2019-05-03 16:03:08 +02:00
if six . PY2 :
url = getBytes ( url ) # Note: Python3 requires text while Python2 has problems when mixing text with binary POST
2019-06-06 23:13:34 +02:00
if webSocket :
2015-03-24 18:21:50 +08:00
ws = websocket . WebSocket ( )
2019-11-28 13:53:47 +01:00
ws . settimeout ( WEBSOCKET_INITIAL_TIMEOUT if kb . webSocketRecvCount is None else timeout )
2015-05-11 10:56:10 +02:00
ws . connect ( url , header = ( " %s : %s " % _ for _ in headers . items ( ) if _ [ 0 ] not in ( " Host " , ) ) , cookie = cookie ) # WebSocket will add Host field of headers automatically
ws . send ( urldecode ( post or " " ) )
2019-11-28 13:53:47 +01:00
_page = [ ]
if kb . webSocketRecvCount is None :
while True :
try :
_page . append ( ws . recv ( ) )
except websocket . WebSocketTimeoutException :
kb . webSocketRecvCount = len ( _page )
break
else :
for i in xrange ( max ( 1 , kb . webSocketRecvCount ) ) :
_page . append ( ws . recv ( ) )
page = " \n " . join ( _page )
2015-03-24 18:21:50 +08:00
ws . close ( )
2015-05-11 10:56:10 +02:00
code = ws . status
2019-03-27 13:33:46 +01:00
status = _http_client . responses [ code ]
2018-06-09 23:38:00 +02:00
2015-05-11 10:56:10 +02:00
class _ ( dict ) :
pass
2018-06-09 23:38:00 +02:00
2015-05-11 10:56:10 +02:00
responseHeaders = _ ( ws . getheaders ( ) )
responseHeaders . headers = [ " %s : %s \r \n " % ( _ [ 0 ] . capitalize ( ) , _ [ 1 ] ) for _ in responseHeaders . items ( ) ]
2023-05-22 11:23:04 +02:00
requestHeaders + = " \r \n " . join ( [ " %s : %s " % ( u " - " . join ( _ . capitalize ( ) for _ in getUnicode ( key ) . split ( u ' - ' ) ) if hasattr ( key , " capitalize " ) else getUnicode ( key ) , getUnicode ( value ) ) for ( key , value ) in responseHeaders . items ( ) ] )
2017-07-04 12:14:17 +02:00
requestMsg + = " \r \n %s " % requestHeaders
2015-05-11 10:56:10 +02:00
if post is not None :
2017-07-04 12:14:17 +02:00
requestMsg + = " \r \n \r \n %s " % getUnicode ( post )
2015-03-24 17:19:37 +08:00
2017-07-04 12:14:17 +02:00
requestMsg + = " \r \n "
2015-05-11 10:56:10 +02:00
threadData . lastRequestMsg = requestMsg
logger . log ( CUSTOM_LOGGING . TRAFFIC_OUT , requestMsg )
2010-09-15 12:45:41 +00:00
else :
2021-02-08 11:18:27 +01:00
post = getBytes ( post )
2022-02-10 22:30:17 +01:00
if unArrayizeValue ( conf . base64Parameter ) == HTTPMETHOD . POST :
if kb . place != HTTPMETHOD . POST :
conf . data = getattr ( conf . data , UNENCODED_ORIGINAL_VALUE , conf . data )
else :
post = urldecode ( post , convall = True )
post = encodeBase64 ( post )
2020-12-10 22:47:29 +01:00
if target and cmdLineOptions . method or method and method not in ( HTTPMETHOD . GET , HTTPMETHOD . POST ) :
2015-05-11 10:56:10 +02:00
req = MethodRequest ( url , post , headers )
2020-12-10 22:47:29 +01:00
req . set_method ( cmdLineOptions . method or method )
2018-04-24 19:45:53 +02:00
elif url is not None :
2019-03-27 02:46:59 +01:00
req = _urllib . request . Request ( url , post , headers )
2018-04-24 19:45:53 +02:00
else :
return None , None , None
2008-12-04 17:40:03 +00:00
2020-09-21 17:04:44 +02:00
for function in kb . preprocessFunctions :
try :
function ( req )
except Exception as ex :
errMsg = " error occurred while running preprocess "
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
raise SqlmapGenericException ( errMsg )
else :
post , headers = req . data , req . headers
2023-05-22 11:23:04 +02:00
requestHeaders + = " \r \n " . join ( [ " %s : %s " % ( u " - " . join ( _ . capitalize ( ) for _ in getUnicode ( key ) . split ( u ' - ' ) ) if hasattr ( key , " capitalize " ) else getUnicode ( key ) , getUnicode ( value ) ) for ( key , value ) in req . header_items ( ) ] )
2010-10-28 23:22:13 +00:00
2015-05-11 10:56:10 +02:00
if not getRequestHeader ( req , HTTP_HEADER . COOKIE ) and conf . cj :
conf . cj . _policy . _now = conf . cj . _now = int ( time . time ( ) )
2022-10-21 19:10:43 +02:00
with conf . cj . _cookies_lock :
cookies = conf . cj . _cookies_for_request ( req )
requestHeaders + = " \r \n %s " % ( " Cookie: %s " % " ; " . join ( " %s = %s " % ( getUnicode ( cookie . name ) , getUnicode ( cookie . value ) ) for cookie in cookies ) )
2010-10-28 23:22:13 +00:00
2015-05-11 10:56:10 +02:00
if post is not None :
2019-03-19 20:26:29 +08:00
if not getRequestHeader ( req , HTTP_HEADER . CONTENT_LENGTH ) and not chunked :
2017-07-04 12:14:17 +02:00
requestHeaders + = " \r \n %s : %d " % ( string . capwords ( HTTP_HEADER . CONTENT_LENGTH ) , len ( post ) )
2012-08-20 22:17:39 +02:00
2015-05-11 10:56:10 +02:00
if not getRequestHeader ( req , HTTP_HEADER . CONNECTION ) :
2017-07-04 12:14:17 +02:00
requestHeaders + = " \r \n %s : %s " % ( HTTP_HEADER . CONNECTION , " close " if not conf . keepAlive else " keep-alive " )
2010-10-28 23:22:13 +00:00
2017-07-04 12:14:17 +02:00
requestMsg + = " \r \n %s " % requestHeaders
2008-10-15 15:38:22 +00:00
2015-05-11 10:56:10 +02:00
if post is not None :
2017-07-04 12:14:17 +02:00
requestMsg + = " \r \n \r \n %s " % getUnicode ( post )
2008-10-15 15:38:22 +00:00
2019-03-19 20:26:29 +08:00
if not chunked :
requestMsg + = " \r \n "
2008-10-15 15:38:22 +00:00
2016-09-02 14:14:17 +02:00
if not multipart :
threadData . lastRequestMsg = requestMsg
2011-09-28 08:13:46 +00:00
2016-09-02 14:14:17 +02:00
logger . log ( CUSTOM_LOGGING . TRAFFIC_OUT , requestMsg )
2008-10-15 15:38:22 +00:00
2015-10-13 13:31:28 +02:00
if conf . cj :
for cookie in conf . cj :
if cookie . value is None :
cookie . value = " "
2015-12-15 11:29:37 +01:00
else :
for char in ( r " \ r " , r " \ n " ) :
cookie . value = re . sub ( r " ( %s )([^ \ t]) " % char , r " \ g<1> \ t \ g<2> " , cookie . value )
2015-10-13 13:31:28 +02:00
2019-03-27 02:46:59 +01:00
conn = _urllib . request . urlopen ( req )
2010-12-22 13:41:36 +00:00
2015-05-11 10:56:10 +02:00
if not kb . authHeader and getRequestHeader ( req , HTTP_HEADER . AUTHORIZATION ) and ( conf . authType or " " ) . lower ( ) == AUTH_TYPE . BASIC . lower ( ) :
2020-09-01 15:35:14 +02:00
kb . authHeader = getUnicode ( getRequestHeader ( req , HTTP_HEADER . AUTHORIZATION ) )
2010-10-18 08:54:08 +00:00
2015-05-11 10:56:10 +02:00
if not kb . proxyAuthHeader and getRequestHeader ( req , HTTP_HEADER . PROXY_AUTHORIZATION ) :
kb . proxyAuthHeader = getRequestHeader ( req , HTTP_HEADER . PROXY_AUTHORIZATION )
2010-10-18 09:02:56 +00:00
2015-05-11 10:56:10 +02:00
# Return response object
if response :
return conn , None , None
2010-06-10 14:42:17 +00:00
2015-05-11 10:56:10 +02:00
# Get HTTP response
2017-01-16 14:29:23 +01:00
if hasattr ( conn , " redurl " ) :
2021-01-12 13:21:51 +01:00
page = ( threadData . lastRedirectMsg [ 1 ] if kb . choices . redirect == REDIRECTION . NO else Connect . _connReadProxy ( conn ) ) if not skipRead else None
skipLogTraffic = kb . choices . redirect == REDIRECTION . NO
2019-01-09 16:26:11 +01:00
code = conn . redcode if not finalCode else code
2015-05-11 10:56:10 +02:00
else :
page = Connect . _connReadProxy ( conn ) if not skipRead else None
2011-12-21 22:59:23 +00:00
2017-03-12 09:52:37 +01:00
if conn :
2018-08-09 15:39:37 +02:00
code = ( code or conn . code ) if conn . code == kb . originalCode else conn . code # do not override redirection code (for comparison purposes)
2017-03-12 09:52:37 +01:00
responseHeaders = conn . info ( )
2021-06-15 21:04:51 +02:00
responseHeaders [ URI_HTTP_HEADER ] = conn . geturl ( ) if hasattr ( conn , " geturl " ) else url
2020-12-01 15:56:47 +01:00
2023-10-06 19:48:30 +02:00
if getattr ( conn , " redurl " , None ) is not None :
2020-12-01 15:56:47 +01:00
responseHeaders [ HTTP_HEADER . LOCATION ] = conn . redurl
2022-03-07 18:34:34 +01:00
responseHeaders = patchHeaders ( responseHeaders )
2018-09-04 23:01:17 +02:00
kb . serverHeader = responseHeaders . get ( HTTP_HEADER . SERVER , kb . serverHeader )
2017-03-12 09:52:37 +01:00
else :
code = None
responseHeaders = { }
2019-11-12 22:51:11 +01:00
page = decodePage ( page , responseHeaders . get ( HTTP_HEADER . CONTENT_ENCODING ) , responseHeaders . get ( HTTP_HEADER . CONTENT_TYPE ) , percentDecode = not crawling )
2018-04-06 01:13:04 +02:00
status = getUnicode ( conn . msg ) if conn and getattr ( conn , " msg " , None ) else None
2010-01-02 02:02:12 +00:00
2016-10-02 11:13:40 +02:00
kb . connErrorCounter = 0
2017-01-16 14:29:23 +01:00
if not refreshing :
2017-01-16 15:23:38 +01:00
refresh = responseHeaders . get ( HTTP_HEADER . REFRESH , " " ) . split ( " url= " ) [ - 1 ] . strip ( )
2011-03-29 14:16:28 +00:00
2017-01-16 14:29:23 +01:00
if extractRegexResult ( META_REFRESH_REGEX , page ) :
refresh = extractRegexResult ( META_REFRESH_REGEX , page )
2011-03-29 14:16:28 +00:00
2017-01-16 14:29:23 +01:00
debugMsg = " got HTML meta refresh header "
logger . debug ( debugMsg )
2011-05-27 22:42:23 +00:00
2019-11-07 16:23:52 +01:00
if not refresh :
refresh = extractRegexResult ( JAVASCRIPT_HREF_REGEX , page )
2019-11-08 23:28:51 +01:00
if refresh :
2020-02-07 10:12:33 +01:00
debugMsg = " got Javascript redirect logic "
2019-11-08 23:28:51 +01:00
logger . debug ( debugMsg )
2019-11-07 16:23:52 +01:00
2017-01-16 14:29:23 +01:00
if refresh :
if kb . alwaysRefresh is None :
2020-02-07 10:12:33 +01:00
msg = " got a refresh intent "
2019-11-07 16:23:52 +01:00
msg + = " (redirect like response common to login pages) to ' %s ' . " % refresh
2020-02-07 10:12:33 +01:00
msg + = " Do you want to apply it from now on? [Y/n] "
2011-05-27 22:42:23 +00:00
2017-04-18 15:48:05 +02:00
kb . alwaysRefresh = readInput ( msg , default = ' Y ' , boolean = True )
2017-01-16 14:29:23 +01:00
if kb . alwaysRefresh :
if re . search ( r " \ Ahttps?:// " , refresh , re . I ) :
url = refresh
else :
2019-03-27 02:46:59 +01:00
url = _urllib . parse . urljoin ( url , refresh )
2017-01-16 14:29:23 +01:00
threadData . lastRedirectMsg = ( threadData . lastRequestUID , page )
kwargs [ " refreshing " ] = True
kwargs [ " url " ] = url
kwargs [ " get " ] = None
kwargs [ " post " ] = None
try :
return Connect . _getPageProxy ( * * kwargs )
except SqlmapSyntaxException :
pass
2011-03-29 14:16:28 +00:00
2010-12-26 14:36:51 +00:00
# Explicit closing of connection object
2015-05-11 10:56:10 +02:00
if conn and not conf . keepAlive :
2010-12-26 14:36:51 +00:00
try :
2011-10-21 09:06:00 +00:00
if hasattr ( conn . fp , ' _sock ' ) :
conn . fp . _sock . close ( )
2010-12-26 14:36:51 +00:00
conn . close ( )
2019-01-22 00:40:48 +01:00
except Exception as ex :
2015-09-10 15:51:33 +02:00
warnMsg = " problem occurred during connection closing ( ' %s ' ) " % getSafeExString ( ex )
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2010-12-26 14:36:51 +00:00
2019-01-22 00:40:48 +01:00
except SqlmapConnectionException as ex :
2017-08-28 11:08:36 +02:00
if conf . proxyList and not kb . threadException :
2021-09-02 21:44:41 +02:00
warnMsg = " unable to connect to the target URL ( ' %s ' ) " % getSafeExString ( ex )
2017-08-28 11:08:36 +02:00
logger . critical ( warnMsg )
threadData . retriesCount = conf . retries
return Connect . _retryProxy ( * * kwargs )
2017-08-23 13:52:51 +02:00
else :
raise
2019-03-27 02:46:59 +01:00
except _urllib . error . HTTPError as ex :
2011-01-03 22:02:58 +00:00
page = None
2011-01-25 16:05:06 +00:00
responseHeaders = None
2011-01-31 22:51:14 +00:00
2017-05-07 23:12:42 +02:00
if checking :
return None , None , None
2010-11-17 12:16:48 +00:00
try :
2015-09-10 15:51:33 +02:00
page = ex . read ( ) if not skipRead else None
responseHeaders = ex . info ( )
responseHeaders [ URI_HTTP_HEADER ] = ex . geturl ( )
2022-03-07 18:34:34 +01:00
responseHeaders = patchHeaders ( responseHeaders )
2019-11-12 22:51:11 +01:00
page = decodePage ( page , responseHeaders . get ( HTTP_HEADER . CONTENT_ENCODING ) , responseHeaders . get ( HTTP_HEADER . CONTENT_TYPE ) , percentDecode = not crawling )
2010-11-17 12:16:48 +00:00
except socket . timeout :
2011-04-30 13:20:05 +00:00
warnMsg = " connection timed out while trying "
2015-09-10 15:51:33 +02:00
warnMsg + = " to get error page information ( %d ) " % ex . code
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2011-08-12 16:48:11 +00:00
return None , None , None
2012-01-16 10:04:18 +00:00
except KeyboardInterrupt :
raise
2010-11-17 12:16:48 +00:00
except :
pass
2012-01-16 10:04:18 +00:00
finally :
2019-03-29 02:28:16 +01:00
page = getUnicode ( page )
2010-11-17 12:16:48 +00:00
2015-09-10 15:51:33 +02:00
code = ex . code
2019-05-06 12:19:27 +02:00
status = getUnicode ( getattr ( ex , " reason " , None ) or getSafeExString ( ex ) . split ( " : " , 1 ) [ - 1 ] )
2011-01-03 22:02:58 +00:00
2014-12-03 13:22:55 +01:00
kb . originalCode = kb . originalCode or code
2017-05-17 00:22:18 +02:00
threadData . lastHTTPError = ( threadData . lastRequestUID , code , status )
2012-03-15 11:10:58 +00:00
kb . httpErrorCodes [ code ] = kb . httpErrorCodes . get ( code , 0 ) + 1
2011-01-03 22:02:58 +00:00
2018-11-04 14:17:53 +01:00
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , code , status )
2011-01-03 22:02:58 +00:00
2023-02-27 18:21:35 +01:00
if responseHeaders and getattr ( responseHeaders , " headers " , None ) :
2020-12-07 11:42:46 +01:00
logHeaders = " " . join ( getUnicode ( responseHeaders . headers ) ) . strip ( )
2011-01-03 22:02:58 +00:00
2019-06-13 10:58:21 +02:00
logHTTPTraffic ( requestMsg , " %s %s \r \n \r \n %s " % ( responseMsg , logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] ) , start , time . time ( ) )
2010-11-17 12:04:33 +00:00
2012-03-15 14:51:16 +00:00
skipLogTraffic = True
2010-12-22 13:41:36 +00:00
if conf . verbose < = 5 :
responseMsg + = getUnicode ( logHeaders )
elif conf . verbose > 5 :
2019-06-13 10:58:21 +02:00
responseMsg + = " %s \r \n \r \n %s " % ( logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] )
2010-12-22 13:41:36 +00:00
2016-09-02 14:14:17 +02:00
if not multipart :
logger . log ( CUSTOM_LOGGING . TRAFFIC_IN , responseMsg )
2010-12-22 13:41:36 +00:00
2023-01-24 12:00:23 +01:00
if code in conf . abortCode :
errMsg = " aborting due to detected HTTP code ' %d ' " % code
singleTimeLogMessage ( errMsg , logging . CRITICAL )
raise SystemExit
2019-07-17 13:20:24 +02:00
if ex . code not in ( conf . ignoreCode or [ ] ) :
2019-03-27 02:46:59 +01:00
if ex . code == _http_client . UNAUTHORIZED :
2017-08-23 13:17:37 +02:00
errMsg = " not authorized, try to provide right HTTP "
2021-02-25 11:22:10 +01:00
errMsg + = " authentication type and valid credentials ( %d ). " % code
errMsg + = " If this is intended, try to rerun by providing "
errMsg + = " a valid value for option ' --ignore-code ' "
2013-01-03 23:20:55 +01:00
raise SqlmapConnectionException ( errMsg )
2019-04-18 10:24:38 +02:00
elif chunked and ex . code in ( _http_client . METHOD_NOT_ALLOWED , _http_client . LENGTH_REQUIRED ) :
2019-04-18 10:36:41 +02:00
warnMsg = " turning off HTTP chunked transfer encoding "
warnMsg + = " as it seems that the target site doesn ' t support it ( %d ) " % code
singleTimeWarnMessage ( warnMsg )
conf . chunked = kwargs [ " chunked " ] = False
return Connect . getPage ( * * kwargs )
2021-09-15 11:14:43 +02:00
elif ex . code == _http_client . REQUEST_URI_TOO_LONG :
warnMsg = " request URI is marked as too long by the target. "
warnMsg + = " you are advised to try a switch ' --no-cast ' and/or ' --no-escape ' "
singleTimeWarnMessage ( warnMsg )
2019-03-27 02:46:59 +01:00
elif ex . code == _http_client . NOT_FOUND :
2017-08-23 13:17:37 +02:00
if raise404 :
errMsg = " page not found ( %d ) " % code
raise SqlmapConnectionException ( errMsg )
2011-06-19 09:57:41 +00:00
else :
2017-08-23 13:17:37 +02:00
debugMsg = " page not found ( %d ) " % code
singleTimeLogMessage ( debugMsg , logging . DEBUG )
2019-03-27 02:46:59 +01:00
elif ex . code == _http_client . GATEWAY_TIMEOUT :
2017-08-23 13:17:37 +02:00
if ignoreTimeout :
return None if not conf . ignoreTimeouts else " " , None , None
else :
2019-03-27 13:33:46 +01:00
warnMsg = " unable to connect to the target URL ( %d - %s ) " % ( ex . code , _http_client . responses [ ex . code ] )
2017-08-23 13:17:37 +02:00
if threadData . retriesCount < conf . retries and not kb . threadException :
warnMsg + = " . sqlmap is going to retry the request "
logger . critical ( warnMsg )
return Connect . _retryProxy ( * * kwargs )
elif kb . testMode :
logger . critical ( warnMsg )
return None , None , None
else :
raise SqlmapConnectionException ( warnMsg )
else :
2019-11-09 00:54:47 +01:00
debugMsg = " got HTTP error code: %d ( ' %s ' ) " % ( code , status )
2017-08-23 13:17:37 +02:00
logger . debug ( debugMsg )
2010-01-19 10:27:54 +00:00
2023-06-02 11:25:07 +02:00
except ( _urllib . error . URLError , socket . error , socket . timeout , _http_client . HTTPException , struct . error , binascii . Error , ProxyError , SqlmapCompressionException , WebSocketException , TypeError , ValueError , OverflowError , AttributeError , OSError , AssertionError , KeyError ) :
2008-12-04 17:40:03 +00:00
tbMsg = traceback . format_exc ( )
2010-12-21 10:31:56 +00:00
2019-05-08 12:28:50 +02:00
if conf . debug :
dataToStdout ( tbMsg )
2017-05-07 23:12:42 +02:00
if checking :
return None , None , None
2023-06-02 11:25:07 +02:00
elif " KeyError: " in tbMsg :
if " content-length " in tbMsg :
return None , None , None
else :
raise
2021-03-28 19:58:06 +02:00
elif " AttributeError: " in tbMsg :
if " WSAECONNREFUSED " in tbMsg :
return None , None , None
else :
raise
2017-05-07 23:12:42 +02:00
elif " no host given " in tbMsg :
2013-04-09 11:48:42 +02:00
warnMsg = " invalid URL address used ( %s ) " % repr ( url )
2013-01-03 23:20:55 +01:00
raise SqlmapSyntaxException ( warnMsg )
2022-06-29 15:30:34 +02:00
elif any ( _ in tbMsg for _ in ( " forcibly closed " , " Connection is already closed " , " ConnectionAbortedError " ) ) :
2013-04-09 11:48:42 +02:00
warnMsg = " connection was forcibly closed by the target URL "
2011-01-03 13:04:20 +00:00
elif " timed out " in tbMsg :
2014-09-08 14:33:13 +02:00
if kb . testMode and kb . testType not in ( None , PAYLOAD . TECHNIQUE . TIME , PAYLOAD . TECHNIQUE . STACKED ) :
2018-09-14 10:01:31 +02:00
singleTimeWarnMessage ( " there is a possibility that the target (or WAF/IPS) is dropping ' suspicious ' requests " )
2017-05-26 14:14:35 +02:00
kb . droppingRequests = True
2013-04-09 11:48:42 +02:00
warnMsg = " connection timed out to the target URL "
2017-03-30 12:05:05 +02:00
elif " Connection reset " in tbMsg :
if not conf . disablePrecon :
singleTimeWarnMessage ( " turning off pre-connect mechanism because of connection reset(s) " )
conf . disablePrecon = True
if kb . testMode :
2018-09-14 10:01:31 +02:00
singleTimeWarnMessage ( " there is a possibility that the target (or WAF/IPS) is resetting ' suspicious ' requests " )
2017-05-26 14:14:35 +02:00
kb . droppingRequests = True
2017-03-30 12:05:05 +02:00
warnMsg = " connection reset to the target URL "
2010-12-11 21:28:11 +00:00
elif " URLError " in tbMsg or " error " in tbMsg :
2013-04-09 11:48:42 +02:00
warnMsg = " unable to connect to the target URL "
2019-05-31 15:42:20 +02:00
match = re . search ( r " Errno \ d+ \ ] ([^> \ n]+) " , tbMsg )
2016-06-01 10:53:32 +02:00
if match :
2016-06-15 07:54:47 +02:00
warnMsg + = " ( ' %s ' ) " % match . group ( 1 ) . strip ( )
2014-12-07 16:14:48 +01:00
elif " NTLM " in tbMsg :
2014-12-07 16:11:07 +01:00
warnMsg = " there has been a problem with NTLM authentication "
2017-06-05 10:38:05 +02:00
elif " Invalid header name " in tbMsg : # (e.g. PostgreSQL ::Text payload)
return None , None , None
2008-12-04 17:40:03 +00:00
elif " BadStatusLine " in tbMsg :
2012-04-20 20:33:15 +00:00
warnMsg = " connection dropped or unknown HTTP "
2014-06-16 09:51:24 +02:00
warnMsg + = " status code received "
if not conf . agent and not conf . randomAgent :
warnMsg + = " . Try to force the HTTP User-Agent "
warnMsg + = " header with option ' --user-agent ' or switch ' --random-agent ' "
2010-11-12 22:57:33 +00:00
elif " IncompleteRead " in tbMsg :
warnMsg = " there was an incomplete read error while retrieving data "
2013-04-09 11:48:42 +02:00
warnMsg + = " from the target URL "
2015-03-24 18:21:50 +08:00
elif " Handshake status " in tbMsg :
2017-10-31 11:38:09 +01:00
status = re . search ( r " Handshake status ([ \ d] {3} ) " , tbMsg )
2015-05-11 11:01:21 +02:00
errMsg = " websocket handshake status %s " % status . group ( 1 ) if status else " unknown "
2015-03-24 18:21:50 +08:00
raise SqlmapConnectionException ( errMsg )
2018-03-19 00:33:30 +01:00
elif " SqlmapCompressionException " in tbMsg :
warnMsg = " problems with response (de)compression "
retrying = True
2009-12-31 12:34:18 +00:00
else :
2013-04-09 11:48:42 +02:00
warnMsg = " unable to connect to the target URL "
2009-12-31 12:34:18 +00:00
2016-06-15 07:57:10 +02:00
if " BadStatusLine " not in tbMsg and any ( ( conf . proxy , conf . tor ) ) :
2008-12-05 15:34:13 +00:00
warnMsg + = " or proxy "
2017-06-05 10:38:05 +02:00
if silent :
return None , None , None
2016-10-02 11:13:40 +02:00
with kb . locks . connError :
kb . connErrorCounter + = 1
2021-01-12 13:21:51 +01:00
if kb . connErrorCounter > = MAX_CONSECUTIVE_CONNECTION_ERRORS and kb . choices . connError is None :
2016-10-02 11:13:40 +02:00
message = " there seems to be a continuous problem with connection to the target. "
2019-08-21 15:19:42 +02:00
message + = " Are you sure that you want to continue? [y/N] "
2017-04-18 15:48:05 +02:00
2021-01-12 13:21:51 +01:00
kb . choices . connError = readInput ( message , default = ' N ' , boolean = True )
2016-10-02 11:13:40 +02:00
2021-01-12 13:21:51 +01:00
if kb . choices . connError is False :
2020-04-29 14:36:11 +02:00
raise SqlmapSkipTargetException
2016-10-02 11:13:40 +02:00
2017-06-05 10:38:05 +02:00
if " forcibly closed " in tbMsg :
2011-01-03 13:04:20 +00:00
logger . critical ( warnMsg )
2011-08-12 16:48:11 +00:00
return None , None , None
2019-08-21 15:29:51 +02:00
elif ignoreTimeout and any ( _ in tbMsg for _ in ( " timed out " , " IncompleteRead " , " Interrupted system call " ) ) :
2016-10-13 23:25:46 +02:00
return None if not conf . ignoreTimeouts else " " , None , None
2012-08-20 11:40:49 +02:00
elif threadData . retriesCount < conf . retries and not kb . threadException :
2012-10-04 18:28:36 +02:00
warnMsg + = " . sqlmap is going to retry the request "
2015-09-27 16:36:20 +02:00
if not retrying :
warnMsg + = " (s) "
logger . critical ( warnMsg )
else :
logger . debug ( warnMsg )
2012-12-06 14:14:19 +01:00
return Connect . _retryProxy ( * * kwargs )
2021-09-29 16:13:13 +02:00
elif kb . testMode or kb . multiThreadMode :
2011-05-22 10:59:56 +00:00
logger . critical ( warnMsg )
2011-08-12 16:48:11 +00:00
return None , None , None
2008-11-15 12:25:19 +00:00
else :
2013-01-03 23:20:55 +01:00
raise SqlmapConnectionException ( warnMsg )
2008-11-15 12:25:19 +00:00
2011-06-19 09:57:41 +00:00
finally :
2019-03-28 14:13:52 +01:00
if isinstance ( page , six . binary_type ) :
2014-08-20 23:42:40 +02:00
if HTTP_HEADER . CONTENT_TYPE in ( responseHeaders or { } ) and not re . search ( TEXT_CONTENT_TYPE_REGEX , responseHeaders [ HTTP_HEADER . CONTENT_TYPE ] ) :
2019-03-28 14:13:52 +01:00
page = six . text_type ( page , errors = " ignore " )
2014-08-20 23:42:40 +02:00
else :
page = getUnicode ( page )
2009-12-17 22:04:01 +00:00
2020-09-21 17:04:44 +02:00
for function in kb . postprocessFunctions :
2019-03-20 11:33:10 +01:00
try :
page , responseHeaders , code = function ( page , responseHeaders , code )
except Exception as ex :
2020-09-21 17:04:44 +02:00
errMsg = " error occurred while running postprocess "
2019-03-29 02:28:16 +01:00
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
2019-03-20 11:33:10 +01:00
raise SqlmapGenericException ( errMsg )
2023-01-24 12:08:02 +01:00
for _ in ( getattr ( conn , " redcode " , None ) , code ) :
if _ is not None and _ in conf . abortCode :
errMsg = " aborting due to detected HTTP code ' %d ' " % _
singleTimeLogMessage ( errMsg , logging . CRITICAL )
raise SystemExit
2023-01-24 12:00:23 +01:00
2019-03-20 11:33:10 +01:00
threadData . lastPage = page
threadData . lastCode = code
socket . setdefaulttimeout ( conf . timeout )
2019-03-04 15:24:12 +01:00
2022-05-09 15:15:06 +02:00
# Dirty patch for Python3.11.0a7 (e.g. https://github.com/sqlmapproject/sqlmap/issues/5091)
if not sys . version . startswith ( " 3.11. " ) :
if conf . retryOn and re . search ( conf . retryOn , page , re . I ) :
if threadData . retriesCount < conf . retries :
warnMsg = " forced retry of the request because of undesired page content "
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2022-05-09 15:15:06 +02:00
return Connect . _retryProxy ( * * kwargs )
2021-11-01 21:50:16 +01:00
2019-05-24 13:54:10 +02:00
processResponse ( page , responseHeaders , code , status )
2010-05-04 08:43:14 +00:00
2019-04-18 10:55:58 +02:00
if not skipLogTraffic :
if conn and getattr ( conn , " redurl " , None ) :
_ = _urllib . parse . urlsplit ( conn . redurl )
_ = ( " %s %s " % ( _ . path or " / " , ( " ? %s " % _ . query ) if _ . query else " " ) )
requestMsg = re . sub ( r " ( \ n[A-Z]+ ).+?( HTTP/ \ d) " , r " \ g<1> %s \ g<2> " % getUnicode ( _ ) . replace ( " \\ " , " \\ \\ " ) , requestMsg , 1 )
2015-06-16 12:00:56 +02:00
2019-04-18 10:55:58 +02:00
if kb . resendPostOnRedirect is False :
requestMsg = re . sub ( r " ( \ [# \ d+ \ ]: \ n)POST " , r " \ g<1>GET " , requestMsg )
requestMsg = re . sub ( r " (?i)Content-length: \ d+ \ n " , " " , requestMsg )
requestMsg = re . sub ( r " (?s) \ n \ n.+ " , " \n " , requestMsg )
2015-06-16 12:00:56 +02:00
2019-04-18 10:55:58 +02:00
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , conn . code , status )
elif " \n " not in responseMsg :
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , code , status )
2013-04-30 17:46:26 +02:00
2019-04-18 10:55:58 +02:00
if responseHeaders :
2020-12-07 11:42:46 +01:00
logHeaders = " " . join ( getUnicode ( responseHeaders . headers ) ) . strip ( )
2011-03-17 12:35:40 +00:00
2019-06-13 10:58:21 +02:00
logHTTPTraffic ( requestMsg , " %s %s \r \n \r \n %s " % ( responseMsg , logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] ) , start , time . time ( ) )
2010-05-04 08:43:14 +00:00
2019-04-18 10:55:58 +02:00
if conf . verbose < = 5 :
responseMsg + = getUnicode ( logHeaders )
elif conf . verbose > 5 :
2019-06-13 10:58:21 +02:00
responseMsg + = " %s \r \n \r \n %s " % ( logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] )
2010-05-04 08:43:14 +00:00
2019-04-18 10:55:58 +02:00
if not multipart :
logger . log ( CUSTOM_LOGGING . TRAFFIC_IN , responseMsg )
2008-10-15 15:38:22 +00:00
2011-08-12 16:48:11 +00:00
return page , responseHeaders , code
2008-10-15 15:38:22 +00:00
@staticmethod
2018-04-01 12:45:47 +02:00
@stackedmethod
2019-10-09 20:41:33 +03:00
def queryPage ( value = None , place = None , content = False , getRatioValue = False , silent = False , method = None , timeBasedCompare = False , noteResponseTime = True , auxHeaders = None , response = False , raise404 = None , removeReflection = True , disableTampering = False , ignoreSecondOrder = False ) :
2008-10-15 15:38:22 +00:00
"""
2013-04-09 11:48:42 +02:00
This method calls a function to get the target URL page content
2017-09-11 10:00:35 +02:00
and returns its page ratio (0 <= ratio <= 1) or a boolean value
representing False/True match in case of !getRatioValue
2008-10-15 15:38:22 +00:00
"""
2010-03-26 23:23:25 +00:00
if conf . direct :
2010-03-31 10:50:47 +00:00
return direct ( value , content )
2010-03-26 23:23:25 +00:00
2011-04-30 13:20:05 +00:00
get = None
post = None
cookie = None
ua = None
referer = None
2011-12-20 12:52:41 +00:00
host = None
2011-04-30 13:20:05 +00:00
page = None
pageLength = None
uri = None
2012-03-15 20:17:40 +00:00
code = None
2008-10-15 15:38:22 +00:00
if not place :
2011-07-06 05:44:47 +00:00
place = kb . injection . place or PLACE . GET
2022-02-10 22:30:17 +01:00
kb . place = place
2014-07-10 08:49:20 +02:00
if not auxHeaders :
auxHeaders = { }
2011-07-06 05:44:47 +00:00
raise404 = place != PLACE . URI if raise404 is None else raise404
2014-11-21 09:41:39 +01:00
method = method or conf . method
2010-10-14 11:06:28 +00:00
2017-11-19 02:51:29 +01:00
postUrlEncode = kb . postUrlEncode
2017-11-13 14:07:12 +01:00
2012-05-22 09:33:22 +00:00
value = agent . adjustLateValues ( value )
2010-11-07 21:55:24 +00:00
payload = agent . extractPayload ( value )
2010-12-20 22:45:01 +00:00
threadData = getCurrentThreadData ( )
2010-10-29 23:00:48 +00:00
2013-03-27 13:39:27 +01:00
if conf . httpHeaders :
2014-10-22 13:41:36 +02:00
headers = OrderedDict ( conf . httpHeaders )
2021-08-31 13:07:38 +02:00
contentType = max ( headers [ _ ] or " " if _ . upper ( ) == HTTP_HEADER . CONTENT_TYPE . upper ( ) else " " for _ in headers ) or None
2013-03-27 13:39:27 +01:00
2017-11-19 02:51:29 +01:00
if ( kb . postHint or conf . skipUrlEncode ) and postUrlEncode :
postUrlEncode = False
2022-09-25 16:34:40 +02:00
if not ( conf . skipUrlEncode and contentType ) : # NOTE: https://github.com/sqlmapproject/sqlmap/issues/5092
conf . httpHeaders = [ _ for _ in conf . httpHeaders if _ [ 1 ] != contentType ]
contentType = POST_HINT_CONTENT_TYPES . get ( kb . postHint , PLAIN_TEXT_CONTENT_TYPE )
conf . httpHeaders . append ( ( HTTP_HEADER . CONTENT_TYPE , contentType ) )
2023-11-12 20:38:47 +01:00
if " urlencoded " in contentType :
postUrlEncode = True
2012-08-31 12:38:02 +02:00
2010-11-07 21:55:24 +00:00
if payload :
2018-10-26 12:08:04 +02:00
delimiter = conf . paramDel or ( DEFAULT_GET_POST_DELIMITER if place != PLACE . COOKIE else DEFAULT_COOKIE_DELIMITER )
2018-04-11 14:48:54 +02:00
if not disableTampering and kb . tamperFunctions :
2010-10-29 16:11:50 +00:00
for function in kb . tamperFunctions :
2018-10-26 12:08:04 +02:00
hints = { }
2014-11-05 10:03:19 +01:00
try :
2018-10-26 12:08:04 +02:00
payload = function ( payload = payload , headers = auxHeaders , delimiter = delimiter , hints = hints )
2019-01-22 00:40:48 +01:00
except Exception as ex :
2014-11-05 10:03:19 +01:00
errMsg = " error occurred while running tamper "
2019-03-29 02:28:16 +01:00
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
2014-11-05 10:03:19 +01:00
raise SqlmapGenericException ( errMsg )
2019-03-28 13:53:54 +01:00
if not isinstance ( payload , six . string_types ) :
2019-03-29 02:28:16 +01:00
errMsg = " tamper function ' %s ' returns " % function . __name__
2012-11-10 11:01:29 +01:00
errMsg + = " invalid payload type ( ' %s ' ) " % type ( payload )
2013-01-03 23:20:55 +01:00
raise SqlmapValueException ( errMsg )
2010-10-29 23:00:48 +00:00
2010-10-29 16:11:50 +00:00
value = agent . replacePayload ( value , payload )
2018-10-26 12:08:04 +02:00
if hints :
if HINT . APPEND in hints :
value = " %s %s %s " % ( value , delimiter , hints [ HINT . APPEND ] )
if HINT . PREPEND in hints :
2018-10-26 14:00:51 +02:00
if place == PLACE . URI :
match = re . search ( r " \ w+ \ s*= \ s* %s " % PAYLOAD_DELIMITER , value ) or re . search ( r " [^? %s /]= \ s* %s " % ( re . escape ( delimiter ) , PAYLOAD_DELIMITER ) , value )
if match :
value = value . replace ( match . group ( 0 ) , " %s %s %s " % ( hints [ HINT . PREPEND ] , delimiter , match . group ( 0 ) ) )
else :
value = " %s %s %s " % ( hints [ HINT . PREPEND ] , delimiter , value )
2018-10-26 12:08:04 +02:00
2016-02-05 12:00:57 +01:00
logger . log ( CUSTOM_LOGGING . PAYLOAD , safecharencode ( payload . replace ( ' \\ ' , BOUNDARY_BACKSLASH_MARKER ) ) . replace ( BOUNDARY_BACKSLASH_MARKER , ' \\ ' ) )
2010-11-07 21:18:09 +00:00
2013-12-04 10:09:54 +01:00
if place == PLACE . CUSTOM_POST and kb . postHint :
2012-10-04 18:44:12 +02:00
if kb . postHint in ( POST_HINT . SOAP , POST_HINT . XML ) :
# payloads in SOAP/XML should have chars > and < replaced
2012-10-04 11:25:44 +02:00
# with their HTML encoded counterparts
2023-10-09 11:07:09 +02:00
payload = payload . replace ( " &# " , SAFE_HEX_MARKER )
2019-11-14 11:49:30 +01:00
payload = payload . replace ( ' & ' , " & " ) . replace ( ' > ' , " > " ) . replace ( ' < ' , " < " ) . replace ( ' " ' , " " " ) . replace ( " ' " , " ' " ) # Reference: https://stackoverflow.com/a/1091953
2023-10-09 11:07:09 +02:00
payload = payload . replace ( SAFE_HEX_MARKER , " &# " )
2012-10-04 11:25:44 +02:00
elif kb . postHint == POST_HINT . JSON :
2018-04-11 15:19:44 +02:00
payload = escapeJsonValue ( payload )
2014-02-26 08:56:17 +01:00
elif kb . postHint == POST_HINT . JSON_LIKE :
2014-02-26 09:30:37 +01:00
payload = payload . replace ( " ' " , REPLACEMENT_MARKER ) . replace ( ' " ' , " ' " ) . replace ( REPLACEMENT_MARKER , ' " ' )
2018-04-11 15:19:44 +02:00
payload = escapeJsonValue ( payload )
2014-02-26 09:30:37 +01:00
payload = payload . replace ( " ' " , REPLACEMENT_MARKER ) . replace ( ' " ' , " ' " ) . replace ( REPLACEMENT_MARKER , ' " ' )
2012-09-22 20:59:40 +02:00
value = agent . replacePayload ( value , payload )
else :
2015-03-04 13:31:29 +01:00
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
2018-11-28 00:29:17 +01:00
if ( place in ( PLACE . GET , PLACE . URI , PLACE . COOKIE ) or place == PLACE . CUSTOM_HEADER and value . split ( ' , ' ) [ 0 ] . upper ( ) == HTTP_HEADER . COOKIE . upper ( ) ) and not conf . skipUrlEncode or place in ( PLACE . POST , PLACE . CUSTOM_POST ) and postUrlEncode :
2016-05-30 17:47:08 +02:00
skip = False
2018-11-28 00:29:17 +01:00
if place == PLACE . COOKIE or place == PLACE . CUSTOM_HEADER and value . split ( ' , ' ) [ 0 ] . upper ( ) == HTTP_HEADER . COOKIE . upper ( ) :
2021-01-12 13:21:51 +01:00
if kb . choices . cookieEncode is None :
2016-05-30 17:47:08 +02:00
msg = " do you want to URL encode cookie values (implementation specific)? %s " % ( " [Y/n] " if not conf . url . endswith ( " .aspx " ) else " [y/N] " ) # Reference: https://support.microsoft.com/en-us/kb/313282
2021-01-12 13:21:51 +01:00
kb . choices . cookieEncode = readInput ( msg , default = ' Y ' if not conf . url . endswith ( " .aspx " ) else ' N ' , boolean = True )
if not kb . choices . cookieEncode :
2016-05-30 17:47:08 +02:00
skip = True
if not skip :
2018-03-11 02:46:37 +01:00
if place in ( PLACE . POST , PLACE . CUSTOM_POST ) : # potential problems in other cases (e.g. URL encoding of whole URI - including path)
value = urlencode ( value , spaceplus = kb . postSpaceToPlus )
payload = urlencode ( payload , safe = ' % ' , spaceplus = kb . postSpaceToPlus )
2016-05-30 17:47:08 +02:00
value = agent . replacePayload ( value , payload )
2017-11-19 02:51:29 +01:00
postUrlEncode = False
2012-05-10 13:39:54 +00:00
2012-12-10 11:55:31 +01:00
if conf . hpp :
2018-12-21 11:29:57 +01:00
if not any ( conf . url . lower ( ) . endswith ( _ . lower ( ) ) for _ in ( WEB_PLATFORM . ASP , WEB_PLATFORM . ASPX ) ) :
2012-12-10 11:55:31 +01:00
warnMsg = " HTTP parameter pollution should work only against "
warnMsg + = " ASP(.NET) targets "
singleTimeWarnMessage ( warnMsg )
if place in ( PLACE . GET , PLACE . POST ) :
_ = re . escape ( PAYLOAD_DELIMITER )
2017-10-31 11:38:09 +01:00
match = re . search ( r " (?P<name> \ w+)= %s (?P<value>.+?) %s " % ( _ , _ ) , value )
2012-12-10 11:55:31 +01:00
if match :
2012-12-10 13:05:41 +01:00
payload = match . group ( " value " )
2012-12-10 12:00:15 +01:00
for splitter in ( urlencode ( ' ' ) , ' ' ) :
2012-12-10 12:58:17 +01:00
if splitter in payload :
2012-12-10 12:00:15 +01:00
prefix , suffix = ( " */ " , " /* " ) if splitter == ' ' else ( urlencode ( _ ) for _ in ( " */ " , " /* " ) )
2012-12-10 13:07:36 +01:00
parts = payload . split ( splitter )
2012-12-10 12:00:15 +01:00
parts [ 0 ] = " %s %s " % ( parts [ 0 ] , suffix )
2012-12-10 12:54:01 +01:00
parts [ - 1 ] = " %s %s = %s %s " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) , prefix , parts [ - 1 ] )
2012-12-10 12:00:15 +01:00
for i in xrange ( 1 , len ( parts ) - 1 ) :
2012-12-10 12:54:01 +01:00
parts [ i ] = " %s %s = %s %s %s " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) , prefix , parts [ i ] , suffix )
2012-12-10 12:00:15 +01:00
payload = " " . join ( parts )
2012-12-10 13:05:41 +01:00
2012-12-10 12:58:17 +01:00
for splitter in ( urlencode ( ' , ' ) , ' , ' ) :
payload = payload . replace ( splitter , " %s %s = " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) ) )
2012-12-10 13:05:41 +01:00
2012-12-10 12:58:17 +01:00
value = agent . replacePayload ( value , payload )
2012-12-10 11:55:31 +01:00
else :
warnMsg = " HTTP parameter pollution works only with regular "
warnMsg + = " GET and POST parameters "
singleTimeWarnMessage ( warnMsg )
2012-05-10 13:39:54 +00:00
if place :
2011-01-27 19:44:24 +00:00
value = agent . removePayloadDelimiters ( value )
2008-10-15 15:38:22 +00:00
2010-11-08 08:02:36 +00:00
if PLACE . GET in conf . parameters :
2011-08-29 12:50:52 +00:00
get = conf . parameters [ PLACE . GET ] if place != PLACE . GET or not value else value
2016-05-27 13:33:14 +02:00
elif place == PLACE . GET : # Note: for (e.g.) checkWaf() when there are no GET parameters
get = value
2008-10-15 15:38:22 +00:00
2010-11-08 08:02:36 +00:00
if PLACE . POST in conf . parameters :
2011-08-29 12:50:52 +00:00
post = conf . parameters [ PLACE . POST ] if place != PLACE . POST or not value else value
2016-05-27 13:33:14 +02:00
elif place == PLACE . POST :
post = value
2008-10-15 15:38:22 +00:00
2012-04-17 14:23:00 +00:00
if PLACE . CUSTOM_POST in conf . parameters :
2017-07-20 02:41:47 +02:00
post = conf . parameters [ PLACE . CUSTOM_POST ] . replace ( kb . customInjectionMark , " " ) if place != PLACE . CUSTOM_POST or not value else value
2013-02-13 12:24:42 +01:00
post = post . replace ( ASTERISK_MARKER , ' * ' ) if post else post
2012-04-17 14:23:00 +00:00
2010-11-08 08:02:36 +00:00
if PLACE . COOKIE in conf . parameters :
cookie = conf . parameters [ PLACE . COOKIE ] if place != PLACE . COOKIE or not value else value
2010-05-14 15:20:34 +00:00
2012-07-26 12:26:57 +02:00
if PLACE . USER_AGENT in conf . parameters :
ua = conf . parameters [ PLACE . USER_AGENT ] if place != PLACE . USER_AGENT or not value else value
2008-10-15 15:38:22 +00:00
2011-02-11 23:07:03 +00:00
if PLACE . REFERER in conf . parameters :
referer = conf . parameters [ PLACE . REFERER ] if place != PLACE . REFERER or not value else value
2011-12-20 12:52:41 +00:00
if PLACE . HOST in conf . parameters :
host = conf . parameters [ PLACE . HOST ] if place != PLACE . HOST or not value else value
2010-11-08 08:02:36 +00:00
if PLACE . URI in conf . parameters :
uri = conf . url if place != PLACE . URI or not value else value
2010-09-23 14:07:23 +00:00
else :
uri = conf . url
2010-09-22 11:56:35 +00:00
2013-01-25 12:41:51 +01:00
if value and place == PLACE . CUSTOM_HEADER :
2016-10-20 00:47:53 +02:00
if value . split ( ' , ' ) [ 0 ] . capitalize ( ) == PLACE . COOKIE :
2018-03-08 01:21:34 +01:00
cookie = value . split ( ' , ' , 1 ) [ - 1 ]
2016-10-20 00:47:53 +02:00
else :
2018-03-08 01:21:34 +01:00
auxHeaders [ value . split ( ' , ' ) [ 0 ] ] = value . split ( ' , ' , 1 ) [ - 1 ]
2013-01-13 16:22:43 +01:00
2014-10-23 11:23:53 +02:00
if conf . csrfToken :
2020-06-10 12:49:35 +02:00
token = AttribDict ( )
2014-10-23 11:23:53 +02:00
def _adjustParameter ( paramString , parameter , newValue ) :
retVal = paramString
2019-03-06 17:35:19 +01:00
if urlencode ( parameter ) in paramString :
parameter = urlencode ( parameter )
2018-12-10 14:53:11 +01:00
match = re . search ( r " %s =[^&]* " % re . escape ( parameter ) , paramString , re . I )
2014-10-23 11:23:53 +02:00
if match :
2019-12-15 16:33:03 +01:00
retVal = re . sub ( r " (?i) %s " % re . escape ( match . group ( 0 ) ) , ( " %s = %s " % ( parameter , newValue ) ) . replace ( ' \\ ' , r ' \\ ' ) , paramString )
2016-05-26 16:47:38 +02:00
else :
2018-12-10 14:53:11 +01:00
match = re . search ( r " ( %s [ \" ' ]:[ \" ' ])([^ \" ' ]+) " % re . escape ( parameter ) , paramString , re . I )
2016-05-26 16:47:38 +02:00
if match :
2019-12-15 16:33:03 +01:00
retVal = re . sub ( r " (?i) %s " % re . escape ( match . group ( 0 ) ) , " %s %s " % ( match . group ( 1 ) , newValue ) , paramString )
2019-03-06 17:35:19 +01:00
2014-10-23 11:23:53 +02:00
return retVal
2020-06-10 12:49:35 +02:00
for attempt in xrange ( conf . csrfRetries + 1 ) :
if token :
break
if attempt > 0 :
warnMsg = " unable to find anti-CSRF token ' %s ' at ' %s ' " % ( conf . csrfToken . _original , conf . csrfUrl or conf . url )
warnMsg + = " . sqlmap is going to retry the request "
2022-06-22 12:04:34 +02:00
logger . warning ( warnMsg )
2019-03-06 11:20:57 +01:00
2022-10-17 11:52:22 +02:00
page , headers , code = Connect . getPage ( url = conf . csrfUrl or conf . url , data = conf . csrfData or ( conf . data if conf . csrfUrl == conf . url else None ) , method = conf . csrfMethod or ( conf . method if conf . csrfUrl == conf . url else None ) , cookie = conf . parameters . get ( PLACE . COOKIE ) , direct = True , silent = True , ua = conf . parameters . get ( PLACE . USER_AGENT ) , referer = conf . parameters . get ( PLACE . REFERER ) , host = conf . parameters . get ( PLACE . HOST ) )
2020-06-10 12:49:35 +02:00
page = urldecode ( page ) # for anti-CSRF tokens with special characters in their name (e.g. 'foo:bar=...')
2014-10-23 11:23:53 +02:00
2020-06-10 12:49:35 +02:00
match = re . search ( r " (?i)<input[^>]+ \ bname=[ \" ' ]?(?P<name> %s ) \ b[^>]* \ bvalue=[ \" ' ]?(?P<value>[^> ' \" ]*) " % conf . csrfToken , page or " " , re . I )
2017-08-20 10:00:04 +02:00
2018-12-10 14:53:11 +01:00
if not match :
2020-06-10 12:49:35 +02:00
match = re . search ( r " (?i)<input[^>]+ \ bvalue=[ \" ' ]?(?P<value>[^> ' \" ]*)[ \" ' ]?[^>]* \ bname=[ \" ' ]?(?P<name> %s ) \ b " % conf . csrfToken , page or " " , re . I )
2016-05-26 16:08:59 +02:00
2018-12-10 14:53:11 +01:00
if not match :
2020-06-10 12:49:35 +02:00
match = re . search ( r " (?P<name> %s )[ \" ' ]:[ \" ' ](?P<value>[^ \" ' ]+) " % conf . csrfToken , page or " " , re . I )
2018-09-18 22:05:52 +02:00
2018-12-10 14:53:11 +01:00
if not match :
2020-12-04 11:45:40 +01:00
match = re . search ( r " \ b(?P<name> %s ) \ s*[:=] \ s*(?P<value> \ w+) " % conf . csrfToken , getUnicode ( headers ) , re . I )
2018-09-18 22:05:52 +02:00
2020-06-10 12:49:35 +02:00
if not match :
match = re . search ( r " \ b(?P<name> %s ) \ s*= \ s*[ ' \" ]?(?P<value>[^; ' \" ]+) " % conf . csrfToken , page or " " , re . I )
2018-09-18 22:05:52 +02:00
2020-09-06 23:32:47 +02:00
if not match :
match = re . search ( r " <meta \ s+name=[ \" ' ]?(?P<name> %s )[ \" ' ]?[^>]+ \ b(value|content)=[ \" ' ]?(?P<value>[^> \" ' ]+) " % conf . csrfToken , page or " " , re . I )
2018-12-10 14:53:11 +01:00
if match :
2020-06-10 12:49:35 +02:00
token . name , token . value = match . group ( " name " ) , match . group ( " value " )
2018-09-18 22:05:52 +02:00
2020-06-10 12:49:35 +02:00
match = re . search ( r " String \ .fromCharCode \ (([ \ d+, ]+) \ ) " , token . value )
if match :
token . value = " " . join ( _unichr ( int ( _ ) ) for _ in match . group ( 1 ) . replace ( ' ' , " " ) . split ( ' , ' ) )
2014-10-24 09:37:51 +02:00
2014-10-23 14:33:22 +02:00
if not token :
2020-06-10 12:49:35 +02:00
if conf . csrfUrl and conf . csrfToken and conf . csrfUrl != conf . url and code == _http_client . OK :
2022-02-10 22:30:17 +01:00
if headers and PLAIN_TEXT_CONTENT_TYPE in headers . get ( HTTP_HEADER . CONTENT_TYPE , " " ) :
2020-06-10 12:49:35 +02:00
token . name = conf . csrfToken
token . value = page
if not token and conf . cj and any ( re . search ( conf . csrfToken , _ . name , re . I ) for _ in conf . cj ) :
for _ in conf . cj :
if re . search ( conf . csrfToken , _ . name , re . I ) :
token . name , token . value = _ . name , _ . value
if not any ( re . search ( conf . csrfToken , ' ' . join ( _ ) , re . I ) for _ in ( conf . paramDict . get ( PLACE . GET , { } ) , conf . paramDict . get ( PLACE . POST , { } ) ) ) :
if post :
post = " %s %s %s = %s " % ( post , conf . paramDel or DEFAULT_GET_POST_DELIMITER , token . name , token . value )
elif get :
get = " %s %s %s = %s " % ( get , conf . paramDel or DEFAULT_GET_POST_DELIMITER , token . name , token . value )
else :
get = " %s = %s " % ( token . name , token . value )
break
if not token :
errMsg = " anti-CSRF token ' %s ' can ' t be found at ' %s ' " % ( conf . csrfToken . _original , conf . csrfUrl or conf . url )
if not conf . csrfUrl :
errMsg + = " . You can try to rerun by providing "
errMsg + = " a valid value for option ' --csrf-url ' "
raise SqlmapTokenException ( errMsg )
2014-10-23 11:23:53 +02:00
if token :
2018-12-10 14:53:11 +01:00
token . value = token . value . strip ( " ' \" " )
2017-08-20 10:00:04 +02:00
2020-08-13 22:18:31 +08:00
for candidate in ( PLACE . GET , PLACE . POST , PLACE . CUSTOM_POST , PLACE . URI ) :
2019-05-30 21:27:00 +02:00
if candidate in conf . parameters :
2020-08-13 22:18:31 +08:00
if candidate == PLACE . URI and uri :
uri = _adjustParameter ( uri , token . name , token . value )
elif candidate == PLACE . GET and get :
2018-12-10 14:53:11 +01:00
get = _adjustParameter ( get , token . name , token . value )
2020-08-13 16:22:09 +02:00
elif candidate in ( PLACE . POST , PLACE . CUSTOM_POST ) and post :
2018-12-10 14:53:11 +01:00
post = _adjustParameter ( post , token . name , token . value )
2014-10-23 11:23:53 +02:00
2014-10-23 14:33:22 +02:00
for i in xrange ( len ( conf . httpHeaders ) ) :
2018-12-10 14:53:11 +01:00
if conf . httpHeaders [ i ] [ 0 ] . lower ( ) == token . name . lower ( ) :
conf . httpHeaders [ i ] = ( conf . httpHeaders [ i ] [ 0 ] , token . value )
2014-10-23 14:33:22 +02:00
2011-08-29 12:50:52 +00:00
if conf . rParam :
def _randomizeParameter ( paramString , randomParameter ) :
retVal = paramString
2019-02-28 02:29:13 +01:00
match = re . search ( r " ( \ A| \ b) %s =(?P<value>[^&;]*) " % re . escape ( randomParameter ) , paramString )
2011-08-29 12:50:52 +00:00
if match :
origValue = match . group ( " value " )
2019-07-19 12:17:07 +02:00
newValue = randomizeParameterValue ( origValue ) if randomParameter not in kb . randomPool else random . sample ( kb . randomPool [ randomParameter ] , 1 ) [ 0 ]
retVal = re . sub ( r " ( \ A| \ b) %s =[^&;]* " % re . escape ( randomParameter ) , " %s = %s " % ( randomParameter , newValue ) , paramString )
2022-02-10 23:11:15 +01:00
else :
match = re . search ( r " ( \ A| \ b)( %s \ b[^ \ w]+)(?P<value> \ w+) " % re . escape ( randomParameter ) , paramString )
if match :
origValue = match . group ( " value " )
newValue = randomizeParameterValue ( origValue ) if randomParameter not in kb . randomPool else random . sample ( kb . randomPool [ randomParameter ] , 1 ) [ 0 ]
retVal = paramString . replace ( match . group ( 0 ) , " %s %s " % ( match . group ( 2 ) , newValue ) )
2011-08-29 12:50:52 +00:00
return retVal
2011-08-29 13:08:25 +00:00
for randomParameter in conf . rParam :
2015-01-17 21:47:57 +01:00
for item in ( PLACE . GET , PLACE . POST , PLACE . COOKIE , PLACE . URI , PLACE . CUSTOM_POST ) :
2011-08-29 13:08:25 +00:00
if item in conf . parameters :
if item == PLACE . GET and get :
get = _randomizeParameter ( get , randomParameter )
2015-01-17 21:47:57 +01:00
elif item in ( PLACE . POST , PLACE . CUSTOM_POST ) and post :
2011-08-29 13:08:25 +00:00
post = _randomizeParameter ( post , randomParameter )
elif item == PLACE . COOKIE and cookie :
cookie = _randomizeParameter ( cookie , randomParameter )
2015-01-17 21:47:57 +01:00
elif item == PLACE . URI and uri :
uri = _randomizeParameter ( uri , randomParameter )
2011-08-29 12:50:52 +00:00
2011-11-21 16:41:02 +00:00
if conf . evalCode :
2014-04-06 16:48:46 +02:00
delimiter = conf . paramDel or DEFAULT_GET_POST_DELIMITER
2020-05-05 23:57:15 +02:00
variables = { " uri " : uri , " lastPage " : threadData . lastPage , " _locals " : locals ( ) , " cookie " : cookie }
2011-11-21 16:41:02 +00:00
originals = { }
2015-08-25 02:03:56 +02:00
if not get and PLACE . URI in conf . parameters :
2019-03-27 02:46:59 +01:00
query = _urllib . parse . urlsplit ( uri ) . query or " "
2015-08-25 02:03:56 +02:00
else :
query = None
2019-03-29 02:28:16 +01:00
for item in filterNone ( ( get , post if not kb . postHint else None , query ) ) :
2011-11-21 17:39:18 +00:00
for part in item . split ( delimiter ) :
if ' = ' in part :
name , value = part . split ( ' = ' , 1 )
2017-10-10 16:08:13 +02:00
name = name . strip ( )
if safeVariableNaming ( name ) != name :
conf . evalCode = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , conf . evalCode )
name = safeVariableNaming ( name )
2018-03-13 13:45:42 +01:00
value = urldecode ( value , convall = True , spaceplus = ( item == post and kb . postSpaceToPlus ) )
2015-02-25 10:19:51 +01:00
variables [ name ] = value
2011-11-21 16:41:02 +00:00
2022-03-07 20:17:51 +01:00
if post and kb . postHint in ( POST_HINT . JSON , POST_HINT . JSON_LIKE ) :
for name , value in ( parseJson ( post ) or { } ) . items ( ) :
if safeVariableNaming ( name ) != name :
conf . evalCode = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , conf . evalCode )
name = safeVariableNaming ( name )
variables [ name ] = value
2013-07-31 17:28:22 +02:00
if cookie :
2014-04-06 16:50:58 +02:00
for part in cookie . split ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) :
2013-07-31 17:28:22 +02:00
if ' = ' in part :
name , value = part . split ( ' = ' , 1 )
2017-10-10 16:08:13 +02:00
name = name . strip ( )
if safeVariableNaming ( name ) != name :
conf . evalCode = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , conf . evalCode )
name = safeVariableNaming ( name )
2013-07-31 17:28:22 +02:00
value = urldecode ( value , convall = True )
2015-02-25 10:19:51 +01:00
variables [ name ] = value
2015-01-09 15:33:53 +01:00
while True :
try :
2021-07-14 01:10:33 +02:00
compile ( getBytes ( re . sub ( r " \ s*; \ s* " , " \n " , conf . evalCode ) ) , " " , " exec " )
2019-01-22 00:40:48 +01:00
except SyntaxError as ex :
2017-06-23 23:46:25 +02:00
if ex . text :
2023-03-20 11:50:44 +01:00
original = replacement = getUnicode ( ex . text . strip ( ) )
2019-03-05 12:24:41 +01:00
2017-10-10 16:08:13 +02:00
if ' = ' in original :
name , value = original . split ( ' = ' , 1 )
name = name . strip ( )
if safeVariableNaming ( name ) != name :
replacement = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , replacement )
else :
for _ in re . findall ( r " [A-Za-z_]+ " , original ) [ : : - 1 ] :
2019-03-05 12:24:41 +01:00
if safeVariableNaming ( _ ) != _ :
replacement = replacement . replace ( _ , safeVariableNaming ( _ ) )
2017-10-10 16:08:13 +02:00
break
2019-03-05 12:24:41 +01:00
2017-06-23 23:46:25 +02:00
if original == replacement :
2019-03-05 12:24:41 +01:00
conf . evalCode = conf . evalCode . replace ( EVALCODE_ENCODED_PREFIX , " " )
2015-01-09 15:33:53 +01:00
break
2017-06-23 23:46:25 +02:00
else :
conf . evalCode = conf . evalCode . replace ( getUnicode ( ex . text . strip ( ) , UNICODE_ENCODING ) , replacement )
2015-01-09 15:33:53 +01:00
else :
2017-06-23 23:46:25 +02:00
break
2015-01-09 15:33:53 +01:00
else :
break
2013-07-31 17:28:22 +02:00
2011-11-21 16:41:02 +00:00
originals . update ( variables )
2012-02-16 14:42:28 +00:00
evaluateCode ( conf . evalCode , variables )
2015-01-09 15:33:53 +01:00
2019-01-22 03:00:44 +01:00
for variable in list ( variables . keys ( ) ) :
2017-10-10 16:08:13 +02:00
if unsafeVariableNaming ( variable ) != variable :
value = variables [ variable ]
del variables [ variable ]
variables [ unsafeVariableNaming ( variable ) ] = value
2014-09-28 13:38:09 +02:00
uri = variables [ " uri " ]
2020-05-05 23:57:15 +02:00
cookie = variables [ " cookie " ]
2011-11-21 16:41:02 +00:00
for name , value in variables . items ( ) :
if name != " __builtins__ " and originals . get ( name , " " ) != value :
2020-12-04 12:28:13 +01:00
if isinstance ( value , ( int , float , six . string_types , six . binary_type ) ) :
2013-08-31 00:28:51 +02:00
found = False
2017-05-04 15:45:15 +02:00
value = getUnicode ( value , UNICODE_ENCODING )
2013-08-31 00:28:51 +02:00
2022-03-07 22:05:00 +01:00
if kb . postHint == POST_HINT . MULTIPART :
boundary = " -- %s " % re . search ( r " boundary=([^ \ s]+) " , contentType ) . group ( 1 )
if boundary :
parts = post . split ( boundary )
match = re . search ( r ' \ bname= " %s " ' % re . escape ( name ) , post )
if not match and parts :
parts . insert ( 2 , parts [ 1 ] )
parts [ 2 ] = re . sub ( r ' \ bname= " [^ " ]+ " .* ' , ' name= " %s " ' % re . escape ( name ) , parts [ 2 ] )
for i in xrange ( len ( parts ) ) :
part = parts [ i ]
if re . search ( r ' \ bname= " %s " ' % re . escape ( name ) , part ) :
match = re . search ( r " (?s) \ A.+? \ r? \ n \ r? \ n " , part )
if match :
found = True
first = match . group ( 0 )
second = part [ len ( first ) : ]
second = re . sub ( r " (?s).+?( \ r? \ n? \ -* \ Z) " , r " %s \ g<1> " % re . escape ( value ) , second )
parts [ i ] = " %s %s " % ( first , second )
post = boundary . join ( parts )
elif kb . postHint and re . search ( r " \ b %s \ b " % re . escape ( name ) , post or " " ) :
2017-03-30 10:16:35 +02:00
if kb . postHint in ( POST_HINT . XML , POST_HINT . SOAP ) :
if re . search ( r " < %s \ b " % re . escape ( name ) , post ) :
found = True
2018-06-09 23:38:00 +02:00
post = re . sub ( r " (?s)(< %s \ b[^>]*>)(.*?)(</ %s ) " % ( re . escape ( name ) , re . escape ( name ) ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-03-30 10:16:35 +02:00
elif re . search ( r " \ b %s > " % re . escape ( name ) , post ) :
found = True
2018-06-09 23:38:00 +02:00
post = re . sub ( r " (?s)( \ b %s >)(.*?)(</[^<]* \ b %s >) " % ( re . escape ( name ) , re . escape ( name ) ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-03-30 10:16:35 +02:00
2022-03-07 17:30:49 +01:00
elif kb . postHint in ( POST_HINT . JSON , POST_HINT . JSON_LIKE ) :
match = re . search ( r " [ ' \" ] %s [ ' \" ]: " % re . escape ( name ) , post )
if match :
quote = match . group ( 0 ) [ 0 ]
post = post . replace ( " \\ %s " % quote , BOUNDARY_BACKSLASH_MARKER )
match = re . search ( r " ( %s %s %s : \ s*)( \ d+| %s [^ %s ]* %s ) " % ( quote , re . escape ( name ) , quote , quote , quote , quote ) , post )
if match :
found = True
post = post . replace ( match . group ( 0 ) , " %s %s " % ( match . group ( 1 ) , value if value . isdigit ( ) else " %s %s %s " % ( match . group ( 0 ) [ 0 ] , value , match . group ( 0 ) [ 0 ] ) ) )
post = post . replace ( BOUNDARY_BACKSLASH_MARKER , " \\ %s " % quote )
2017-03-30 10:16:35 +02:00
regex = r " \ b( %s ) \ b([^ \ w]+)( \ w+) " % re . escape ( name )
if not found and re . search ( regex , ( post or " " ) ) :
found = True
2018-06-09 23:38:00 +02:00
post = re . sub ( regex , r " \ g<1> \ g<2> %s " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-02-06 13:57:33 +01:00
2014-10-28 14:02:55 +01:00
regex = r " (( \ A| %s ) %s =).+?( %s | \ Z) " % ( re . escape ( delimiter ) , re . escape ( name ) , re . escape ( delimiter ) )
2017-03-30 10:16:35 +02:00
if not found and re . search ( regex , ( post or " " ) ) :
2013-08-31 00:28:51 +02:00
found = True
2018-06-09 23:38:00 +02:00
post = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2013-08-31 00:28:51 +02:00
2017-03-30 10:16:35 +02:00
if re . search ( regex , ( get or " " ) ) :
2013-08-31 00:28:51 +02:00
found = True
2018-06-09 23:38:00 +02:00
get = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , get )
2013-08-31 00:28:51 +02:00
2015-08-25 02:03:56 +02:00
if re . search ( regex , ( query or " " ) ) :
found = True
2018-06-09 23:38:00 +02:00
uri = re . sub ( regex . replace ( r " \ A " , r " \ ? " ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , uri )
2015-08-25 02:03:56 +02:00
2022-03-07 14:38:14 +01:00
regex = r " (( \ A| %s \ s*) %s =).+?( %s | \ Z) " % ( re . escape ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) , re . escape ( name ) , re . escape ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) )
2013-08-31 00:28:51 +02:00
if re . search ( regex , ( cookie or " " ) ) :
found = True
2018-06-09 23:38:00 +02:00
cookie = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , cookie )
2013-08-31 00:28:51 +02:00
if not found :
if post is not None :
2022-03-07 20:17:51 +01:00
if kb . postHint in ( POST_HINT . JSON , POST_HINT . JSON_LIKE ) :
match = re . search ( r " [ ' \" ] " , post )
if match :
quote = match . group ( 0 )
post = re . sub ( r " \ } \ Z " , " %s %s } " % ( ' , ' if re . search ( r " \ w " , post ) else " " , " %s %s %s : %s " % ( quote , name , quote , value if value . isdigit ( ) else " %s %s %s " % ( quote , value , quote ) ) ) , post )
else :
post + = " %s %s = %s " % ( delimiter , name , value )
2013-08-31 00:28:51 +02:00
elif get is not None :
get + = " %s %s = %s " % ( delimiter , name , value )
elif cookie is not None :
2014-04-06 16:50:58 +02:00
cookie + = " %s %s = %s " % ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER , name , value )
2011-11-28 11:21:39 +00:00
2013-03-27 13:39:27 +01:00
if not conf . skipUrlEncode :
2013-01-15 10:14:02 +01:00
get = urlencode ( get , limit = True )
2012-11-13 10:21:11 +01:00
if post is not None :
2013-03-27 13:39:27 +01:00
if place not in ( PLACE . POST , PLACE . CUSTOM_POST ) and hasattr ( post , UNENCODED_ORIGINAL_VALUE ) :
2012-07-20 09:48:09 +02:00
post = getattr ( post , UNENCODED_ORIGINAL_VALUE )
2017-11-19 02:51:29 +01:00
elif postUrlEncode :
2013-01-19 18:06:36 +01:00
post = urlencode ( post , spaceplus = kb . postSpaceToPlus )
2011-08-29 12:50:52 +00:00
2017-02-14 13:14:35 +01:00
if timeBasedCompare and not conf . disableStats :
2016-01-09 17:32:19 +01:00
if len ( kb . responseTimes . get ( kb . responseTimeMode , [ ] ) ) < MIN_TIME_RESPONSES :
2010-12-21 01:09:39 +00:00
clearConsoleLine ( )
2016-01-09 17:32:19 +01:00
kb . responseTimes . setdefault ( kb . responseTimeMode , [ ] )
2011-11-20 23:17:57 +00:00
if conf . tor :
2012-02-01 14:49:42 +00:00
warnMsg = " it ' s highly recommended to avoid usage of switch ' --tor ' for "
2017-12-04 13:59:35 +01:00
warnMsg + = " time-based injections because of inherent high latency time "
2011-11-20 23:17:57 +00:00
singleTimeWarnMessage ( warnMsg )
2016-01-09 17:32:19 +01:00
warnMsg = " [ %s ] [WARNING] %s time-based comparison requires " % ( time . strftime ( " %X " ) , " (case) " if kb . responseTimeMode else " " )
2018-06-29 23:57:20 +02:00
warnMsg + = " %s statistical model, please wait " % ( " larger " if len ( kb . responseTimes ) == 1 else " reset of " )
2014-03-06 21:08:31 +01:00
dataToStdout ( warnMsg )
2010-12-09 07:49:18 +00:00
2016-01-09 17:32:19 +01:00
while len ( kb . responseTimes [ kb . responseTimeMode ] ) < MIN_TIME_RESPONSES :
value = kb . responseTimePayload . replace ( RANDOM_INTEGER_MARKER , str ( randomInt ( 6 ) ) ) . replace ( RANDOM_STRING_MARKER , randomStr ( ) ) if kb . responseTimePayload else kb . responseTimePayload
Connect . queryPage ( value = value , content = True , raise404 = False )
2014-03-06 21:08:31 +01:00
dataToStdout ( ' . ' )
2016-01-09 17:32:19 +01:00
dataToStdout ( " (done) \n " )
2010-12-09 07:49:18 +00:00
2013-05-18 21:30:21 +02:00
elif not kb . testMode :
2016-09-29 14:55:43 +02:00
warnMsg = " it is very important to not stress the network connection "
2014-03-06 21:08:31 +01:00
warnMsg + = " during usage of time-based payloads to prevent potential "
2016-01-09 17:32:19 +01:00
warnMsg + = " disruptions "
2013-05-18 21:30:21 +02:00
singleTimeWarnMessage ( warnMsg )
if not kb . laggingChecked :
kb . laggingChecked = True
2016-01-09 17:32:19 +01:00
deviation = stdev ( kb . responseTimes [ kb . responseTimeMode ] )
2011-08-12 13:47:38 +00:00
2022-02-14 14:36:47 +01:00
if deviation is not None and deviation > WARN_TIME_STDEV :
2012-10-09 15:19:47 +02:00
kb . adjustTimeDelay = ADJUST_TIME_DELAY . DISABLE
2011-04-19 14:28:51 +00:00
2014-09-08 14:48:31 +02:00
warnMsg = " considerable lagging has been detected "
2011-08-12 13:47:38 +00:00
warnMsg + = " in connection response(s). Please use as high "
2012-02-01 15:10:06 +00:00
warnMsg + = " value for option ' --time-sec ' as possible (e.g. "
2013-05-18 21:30:21 +02:00
warnMsg + = " 10 or more) "
2011-04-19 14:50:09 +00:00
logger . critical ( warnMsg )
2012-11-10 11:01:29 +01:00
2019-05-02 00:45:44 +02:00
if ( conf . safeFreq or 0 ) > 0 :
2010-04-16 12:44:47 +00:00
kb . queryCounter + = 1
2015-04-21 00:02:47 +02:00
if kb . queryCounter % conf . safeFreq == 0 :
2015-04-22 16:28:54 +02:00
if conf . safeUrl :
Connect . getPage ( url = conf . safeUrl , post = conf . safePost , cookie = cookie , direct = True , silent = True , ua = ua , referer = referer , host = host )
elif kb . safeReq :
Connect . getPage ( url = kb . safeReq . url , post = kb . safeReq . post , method = kb . safeReq . method , auxHeaders = kb . safeReq . headers )
2010-09-16 09:32:09 +00:00
2010-12-07 23:32:33 +00:00
start = time . time ( )
2010-12-07 23:49:00 +00:00
2010-12-20 16:45:41 +00:00
if kb . nullConnection and not content and not response and not timeBasedCompare :
2012-06-12 14:22:14 +00:00
noteResponseTime = False
2015-07-18 17:01:34 +02:00
try :
pushValue ( kb . pageCompress )
kb . pageCompress = False
2010-10-10 18:56:43 +00:00
2015-07-18 17:01:34 +02:00
if kb . nullConnection == NULLCONNECTION . HEAD :
method = HTTPMETHOD . HEAD
elif kb . nullConnection == NULLCONNECTION . RANGE :
auxHeaders [ HTTP_HEADER . RANGE ] = " bytes=-1 "
2010-10-10 18:56:43 +00:00
2015-07-18 17:01:34 +02:00
_ , headers , code = Connect . getPage ( url = uri , get = get , post = post , method = method , cookie = cookie , ua = ua , referer = referer , host = host , silent = silent , auxHeaders = auxHeaders , raise404 = raise404 , skipRead = ( kb . nullConnection == NULLCONNECTION . SKIP_READ ) )
2010-09-16 09:47:33 +00:00
2015-07-18 17:01:34 +02:00
if headers :
2018-12-31 01:03:40 +01:00
try :
if kb . nullConnection in ( NULLCONNECTION . HEAD , NULLCONNECTION . SKIP_READ ) and headers . get ( HTTP_HEADER . CONTENT_LENGTH ) :
pageLength = int ( headers [ HTTP_HEADER . CONTENT_LENGTH ] . split ( ' , ' ) [ 0 ] )
elif kb . nullConnection == NULLCONNECTION . RANGE and headers . get ( HTTP_HEADER . CONTENT_RANGE ) :
pageLength = int ( headers [ HTTP_HEADER . CONTENT_RANGE ] [ headers [ HTTP_HEADER . CONTENT_RANGE ] . find ( ' / ' ) + 1 : ] )
except ValueError :
pass
2015-07-18 17:01:34 +02:00
finally :
kb . pageCompress = popValue ( )
2013-05-17 16:04:05 +02:00
2018-12-31 01:01:19 +01:00
if pageLength is None :
2012-12-18 09:36:26 +01:00
try :
2014-11-21 10:31:55 +01:00
page , headers , code = Connect . getPage ( url = uri , get = get , post = post , method = method , cookie = cookie , ua = ua , referer = referer , host = host , silent = silent , auxHeaders = auxHeaders , response = response , raise404 = raise404 , ignoreTimeout = timeBasedCompare )
2012-12-18 09:36:26 +01:00
except MemoryError :
page , headers , code = None , None , None
warnMsg = " site returned insanely large response "
if kb . testMode :
warnMsg + = " in testing phase. This is a common "
2018-09-14 10:01:31 +02:00
warnMsg + = " behavior in custom WAF/IPS solutions "
2012-12-18 09:36:26 +01:00
singleTimeWarnMessage ( warnMsg )
2010-12-07 23:49:00 +00:00
2019-10-09 20:41:33 +03:00
if not ignoreSecondOrder :
if conf . secondUrl :
page , headers , code = Connect . getPage ( url = conf . secondUrl , cookie = cookie , ua = ua , silent = silent , auxHeaders = auxHeaders , response = response , raise404 = False , ignoreTimeout = timeBasedCompare , refreshing = True )
elif kb . secondReq and IPS_WAF_CHECK_PAYLOAD not in _urllib . parse . unquote ( value or " " ) :
def _ ( value ) :
if kb . customInjectionMark in ( value or " " ) :
if payload is None :
value = value . replace ( kb . customInjectionMark , " " )
else :
2022-08-04 08:57:35 +02:00
try :
value = re . sub ( r " \ w* %s " % re . escape ( kb . customInjectionMark ) , payload , value )
except re . error :
value = re . sub ( r " \ w* %s " % re . escape ( kb . customInjectionMark ) , re . escape ( payload ) , value )
2019-10-09 20:41:33 +03:00
return value
page , headers , code = Connect . getPage ( url = _ ( kb . secondReq [ 0 ] ) , post = _ ( kb . secondReq [ 2 ] ) , method = kb . secondReq [ 1 ] , cookie = kb . secondReq [ 3 ] , silent = silent , auxHeaders = dict ( auxHeaders , * * dict ( kb . secondReq [ 4 ] ) ) , response = response , raise404 = False , ignoreTimeout = timeBasedCompare , refreshing = True )
2012-07-26 14:07:05 +02:00
2010-12-20 22:45:01 +00:00
threadData . lastQueryDuration = calculateDeltaSeconds ( start )
2010-11-08 09:44:32 +00:00
2019-03-20 11:33:10 +01:00
kb . originalCode = code if kb . originalCode is None else kb . originalCode
kb . originalPage = page if kb . originalPage is None else kb . originalPage
2012-03-15 20:17:40 +00:00
2010-12-07 13:34:06 +00:00
if kb . testMode :
kb . testQueryCount + = 1
2011-01-11 21:46:21 +00:00
2010-12-08 11:26:54 +00:00
if timeBasedCompare :
2013-01-29 20:53:11 +01:00
return wasLastResponseDelayed ( )
2010-12-08 14:33:10 +00:00
elif noteResponseTime :
2016-01-09 17:32:19 +01:00
kb . responseTimes . setdefault ( kb . responseTimeMode , [ ] )
kb . responseTimes [ kb . responseTimeMode ] . append ( threadData . lastQueryDuration )
2019-01-29 17:40:06 +01:00
if len ( kb . responseTimes [ kb . responseTimeMode ] ) > MAX_TIME_RESPONSES :
kb . responseTimes [ kb . responseTimeMode ] = kb . responseTimes [ kb . responseTimeMode ] [ - MAX_TIME_RESPONSES / / 2 : ]
2010-12-07 16:04:53 +00:00
2011-10-24 00:46:54 +00:00
if not response and removeReflection :
page = removeReflectiveValues ( page , payload )
2012-10-02 13:36:15 +02:00
kb . maxConnectionsFlag = re . search ( MAX_CONNECTIONS_REGEX , page or " " , re . I ) is not None
2018-03-16 14:20:43 +01:00
message = extractRegexResult ( PERMISSION_DENIED_REGEX , page or " " , re . I )
if message :
kb . permissionFlag = True
singleTimeWarnMessage ( " potential permission problems detected ( ' %s ' ) " % message )
2012-02-08 12:00:03 +00:00
2022-03-07 18:34:34 +01:00
headers = patchHeaders ( headers )
2019-05-02 10:22:44 +02:00
2010-10-10 18:56:43 +00:00
if content or response :
2017-06-05 16:28:19 +02:00
return page , headers , code
2011-02-25 09:22:44 +00:00
if getRatioValue :
2011-08-12 16:48:11 +00:00
return comparison ( page , headers , code , getRatioValue = False , pageLength = pageLength ) , comparison ( page , headers , code , getRatioValue = True , pageLength = pageLength )
2008-12-18 20:38:57 +00:00
else :
2013-06-10 12:20:58 +02:00
return comparison ( page , headers , code , getRatioValue , pageLength )
2013-08-20 19:35:49 +02:00
2018-03-21 14:29:54 +01:00
def setHTTPHandlers ( ) : # Cross-referenced function
2013-08-20 19:35:49 +02:00
raise NotImplementedError