首页 > 技术文章 > python之简单POST模拟登录

makefile 2014-07-11 20:08 原文

首先是对模拟登录的站点进行抓包分析,分析HTTP中的GET/POST参数,密码加密方式等。抓包工具包括不限于浏览器开发者工具、Wireshark、tcpdump等。

python3 urllib

先来一个简单代码示例。下面是用python3写的简单的HTTP POST请求,没有对cookie的处理。采用内置的urllib模块,注意python2需要略做修改,因为python3已经把urllib2,urlparse等五个模块都并入了urllib中。

#!/usr/bin/env python3
# coding=utf-8

# XX 网站模拟登录
import sys
import urllib
import urllib.request
import urllib.parse

url = 'http://xxx'

def login():
    action = 'login'
    # 可将参数放在配置文件
    username = 'xxx'
    password = 'xxx'
    data = {
        'action': action,
        'username': username,
        'password': password,
        # ...
    }

    # 请求并解析,根据情况编解码,与服务端支持的编码类型保持一致
    postdata = urllib.parse.urlencode(data).encode('utf-8')
    try:
        request = urllib.request.Request(url, postdata)
        response = urllib.request.urlopen(request)
        # 从结果内容中查找是否有特定字符串
        if response.read().decode('utf-8').find('login_ok') > 0:
            print('login_ok')
    except Exception as e:
        print('oops!Please check network!')
        print(e)

def logout():
    logoutdata = {'action': 'logout'}
    postdata = urllib.parse.urlencode(logoutdata).encode('utf-8')
    request = urllib.request.Request(url, postdata)
    response = urllib.request.urlopen(request)
    print(response.read().decode('utf-8'))

if __name__ == '__main__':
    if len(sys.argv) == 1:
        login()
    else: # 如果有额外参数,则退出登录
        logout()

python2 urllib、urllib2

#!/usr/bin/python2
import errno
import urllib
import urllib2
import time

# python2中 urllib与urllib2有很多相同的接口
data = urllib.urlopen(url).read()
status_code = urllib.urlopen(url).getcode() # 获取状态码
website_is_up = status_code == 200

# urllib2 超时重试
def retrying_urlopen(retries, *args, **kwargs):
    for i in range(retries):
        try:
            return urllib2.urlopen(*args, **kwargs)
        except URLError as e:
            # time.sleep(1) # seconds
            if e.reason.errno == errno.EINPROGRESS:
                # retry if meet timeout error
                continue
            raise

# data = urllib2.urlopen(upload_url, timeout=20).read()
response = retrying_urlopen(3, upload_url, timeout=20)

# body 数据
data = response.read()
# header 键值对
header_val = response.info().getheader('X-Error')

https SSL证书问题处理

访问未经过CA认证的https网站时,可能报错 CERTIFICATE_VERIFY_FAILED,解决办法是忽略这个错误。

# 1. 导入SSL处理模块
import ssl
# 2. 创建context: 忽略未经核实的SSL证书认证
context = ssl._create_unverified_context()
# 3. 在urlopen()方法里 指明添加 context 参数
response = urllib2.urlopen(request, context=context)  # python2
response = urllib.request.urlopen(request,context=context) # python3

requests

支持header、cookie的代码示例

#encoding: utf-8
# conda/pip install requests
# pip install -i https://pypi.tuna.tsinghua.edu.cn/simple requests==1.2.3   这个旧版本适用于python2.6, 但对SSL支持不好
import os
from contextlib import closing
import requests
from requests.adapters import HTTPAdapter
import json

# lower python version does not support SNI used for SSL connection
# or throws exception: "hostname doesn't match", refer to https://docs.python-requests.org/zh_CN/latest/community/faq.html
assert sys.version_info >= (2, 7, 9)

session = requests.Session()
session.mount('http://', HTTPAdapter(max_retries=2))
session.mount('https://', HTTPAdapter(max_retries=2))

timeout = 5 # seconds

referer = 'https://xxx'
url = 'https://xxx'
headers = {
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36',
    'Referer': referer,
    'Content-Type': 'application/x-www-form-urlencoded',
}
cookies = {
    'xx': 'xx',
    'yy':'yy',
}
params = {'xx': 'xx'}

def download(url, save_path):
    with closing(session.get(img_url, stream=True, headers=headers, timeout=timeout)) as r:
        rc = r.status_code
        if 299 < rc or rc < 200:
            print('returnCode%s\t%s' % (rc, url))
            return
        content_length = int(r.headers.get('content-length', '0'))
        if content_length == 0:
            print('size0\t%s' % url)
            return
        try:
            with open(save_path, 'wb') as f:
                for data in r.iter_content(1024):
                    f.write(data)
        except:
            print('savefail\t%s' % url)

response = requests.post(url, json=json.dumps(params), headers=headers, cookies=cookies)
#print(response.json())
r = response.text
print(r)

参考:

推荐阅读