自学内容网 自学内容网

kaggle房价预测

目录

1 数据

2 代码

3 调试记录

4 submission


1 数据

数据可以在kaggle官网下载,也可利用代码进行下载。

kaggle_house_pred_train.csv

kaggle_house_pred_test.csv

2 代码

代码包含两部分,一部分为基础版,另一部分为模型改进版。

import hashlib  # 计算SHA1哈希值,加密
import os  # 与操作系统进行交互,系统包
import tarfile  # 处理.tar文件,解压
import zipfile  # 处理.zip文件,解压
import requests  # 发送HTTP请求
import numpy as np
import pandas as pd
import torch # 开源的机器学习库
from torch import nn #用于构建神经网络的模块
from d2l import torch as d2l #深度学习

#1.下载和缓存数据
# @save
DATA_HUB = dict()  # 创建一个字典,二元组,用于存储数据集的URL和SHA1哈希值
DATA_URL = 'http://d2l-data.s3-accelerate.amazonaws.com/'

#下载数据集
def download(name, cache_dir=os.path.join('..', 'data')):  # @save
    """下载一个DATA_HUB中的文件,返回本地文件名"""
    # name: 要下载的数据集的名称;cache_dir: 下载文件的缓存目录
    assert name in DATA_HUB, f"{name} 不存在于 {DATA_HUB}"  # 断言:判断文件是否在data_hub中,否则抛出异常
    # SHA-1曾广泛用于确保数据完整性(文件传输或存储前后的值来对比)
    url, sha1_hash = DATA_HUB[name]  #从字典中取出地址和密钥
    os.makedirs(cache_dir, exist_ok=True)  # 创建缓存目录,如果目录已存在则不建
    #os.path.join 拼接方法,把url以'/'划分,索引值未-1的值,最后一个
    fname = os.path.join(cache_dir, url.split('/')[-1])  # 构建完整的本地文件路径
    if os.path.exists(fname):  # 如果文件已存在于本地
        sha1 = hashlib.sha1()  # 创建一个SHA1哈希对象
        with open(fname, 'rb') as f:  # 打开文件
            while True:
                data = f.read(1048576)  # 读取文件内容,每次读取1MB
                if not data:  # 如果读取到文件末尾
                    break
                sha1.update(data)  # 更新哈希对象
        if sha1.hexdigest() == sha1_hash:  # 如果计算出的哈希值与预期的哈希值匹配
            return fname  # 返回文件名,表示命中缓存
    #如果文件不存在本地
    print(f'正在从{url}下载{fname}...')  # 输出下载信息
    r = requests.get(url, stream=True, verify=True)  # 发送GET请求下载文件
    with open(fname, 'wb') as f:  # 以二进制写入模式打开文件
        f.write(r.content)  # 将下载的内容写入文件
    return fname  # 返回下载后的文件名

#解压
def download_extract(name, folder=None):  # @save
    """下载并解压zip/tar文件"""
    fname = download(name)  # 调用之前定义的download函数来下载文件
    base_dir = os.path.dirname(fname)  # 获取下载文件所在的目录
    data_dir, ext = os.path.splitext(fname)  # 分离文件名和扩展名
    if ext == '.zip':
        fp = zipfile.ZipFile(fname, 'r')  # 如果是.zip文件,则创建一个ZipFile对象用于解压
    elif ext in ('.tar', '.gz'):
        fp = tarfile.open(fname, 'r')  # 如果是.tar或.gz文件,则创建一个tarfile对象用于解压
    else:
        assert False, '只有zip/tar文件可以被解压缩'  # 如果文件既不是.zip也不是.tar或.gz,则抛出异常
    fp.extractall(base_dir)  # 解压文件到base_dir目录
    # 如果提供了folder参数,则返回base_dir与folder的组合路径,否则返回data_dir
    return os.path.join(base_dir, folder) if folder else data_dir

#将数据集从data_hub下载到缓存目录中
def download_all():  # @save
    """下载DATA_HUB中的所有文件"""
    for name in DATA_HUB:
        download(name)  # 对DATA_HUB字典中的每个数据集名称,调用download函数下载文件

#2.访问和读取数据
#训练集
DATA_HUB['kaggle_house_train'] = (  # @save
    DATA_URL + 'kaggle_house_pred_train.csv',
    '585e9cc93e70b39160e7921475f9bcd7d31219ce')

#测试集
DATA_HUB['kaggle_house_test'] = (  # @save
    DATA_URL + 'kaggle_house_pred_test.csv',
    'fa19780a7b011d9b009e8bff8e99922a8ee2eb90')

#使用pandas分别加载包含训练数据和测试数据的2个csv文件
train_data = pd.read_csv(download('kaggle_house_train'))
test_data = pd.read_csv(download('kaggle_house_test'))

#打印前四行的前四个和最后两个特征,以及房价(-1)
#print(train_data.iloc[0:4, [0, 1, 2, 3, -3, -2, -1]])  # 最后一个是真实房价(标签),得把它拿出来

#打印前四行的前四个和最后两个特征
#print(test_data.iloc[0:4, [0, 1, 2, 3, -2, -1]])

# 将train_data和test_data合并(concat拼接方法)成all_features,方便下面算均值和方差
# 第一列是ID无用,把它拿掉
# 把训练集中的房间标签去掉
# 测试集中没有房价标签
# 形式:
# 训练集
# 测试集
all_features = pd.concat((train_data.iloc[:, 1:-1], test_data.iloc[:, 1:]))

#3.数据预处理
# 这里把train和test集拿到一起算均值和方差
# 如果是写论文,只能在训练集上拿均值
# pandas的object是python里的str,需要去除后取均值
numeric_features = all_features.dtypes[all_features.dtypes != 'object'].index  # 返回数据类型!=object的列索引
# 数值型数据标准化:均值为0,方差为1
all_features[numeric_features] = all_features[numeric_features].apply(
    lambda x: (x - x.mean()) / (x.std()))
# 在标准化数据之后,所有均值消失,将缺失值设置为0
# fillna(0):将所有缺失值替换为0
all_features[numeric_features] = all_features[numeric_features].fillna(0)

# get_dummies 函数通常用于将分类变量(categorical variables)转换为一系列的二进制(0和1)列,也称为独热编码
# “Dummy_na=True”将“na”(缺失值)视为有效的特征值,并为其创建指示符特征(一个额外的二进制列)
all_features = pd.get_dummies(all_features, dummy_na=True)
#由于pandas的原因,shape可能会显示2019.330
#print(all_features.shape)

# 强制转换,原本的代码里没有这一行,报错np的'object'不能转成torch
# 解决的问题:原本的代码试图将 all_features 数组转换为 PyTorch 张量(tensor),但是失败了。
# 因为 PyTorch 无法直接将包含非数值类型(如字符串)的 NumPy 数组转换为张量。
# 通过先将 all_features 转换为全浮点数数组,就可以避免这个问题,因为 PyTorch 可以轻松地将浮点数 NumPy 数组转换为张量。
all_features = all_features.astype(float)

n_train = train_data.shape[0]  # 获取 train_data 数据框的行数
#把预处理前合并在一起的train和test数据集,拆分开
train_features = torch.tensor(all_features[:n_train].values, dtype=torch.float32)
test_features = torch.tensor(all_features[n_train:].values, dtype=torch.float32)
train_labels = torch.tensor(  # 测试集没有真实房价test_labels
    train_data.SalePrice.values.reshape(-1, 1), dtype=torch.float32)  # 把train集的SalePrice列拿出来

#4.训练
# 均方误差损失
loss = nn.MSELoss()
# 获取列(特征)数,原则上是331,但由于pandas版本的原因,可能会显示330
in_features = train_features.shape[1]
#print(in_features)

# 单层线性回归
def get_net():
    net = nn.Sequential(nn.Linear(in_features, 1))  # 331个输入,1个输出(预测的房价)
    return net

#5.预测
# 评估模型性能的指标:log rmse 对数均方根误差(衡量相对差异)
def log_rmse(net, features, labels):
    # 预测值(房价)用clamp剪裁到1至正无穷,到1是防止对数计算出问题,到无穷是因为真实的房价∈(1,+无穷)
    # clamp 函数将预测值net(features)中的所有小于1的值都被设置为1
    # inf代表无穷大(infinity的缩写)
    clipped_preds = torch.clamp(net(features), 1, float('inf'))
    #均方根误差数学公式
    rmse = torch.sqrt(loss(torch.log(clipped_preds), torch.log(labels)))
    # 返回计算得到的均方根误差
    # .item() 方法用于从包含单个数值的张量中提取该数值
    # 只有一个值的时候,取出来
    return rmse.item()

# 使用Adam优化算法(小批量随机梯度下降SGD,Adam是比较平滑的SGD)
def train(net, train_features, train_labels, test_features, test_labels,
          num_epochs, learning_rate, weight_decay, batch_size):
    # 初始化训练集和测试(验证)集的损失列表
    train_ls, test_ls = [], []
    # 迭代器,每次迭代返回一个batch
    train_iter = d2l.load_array((train_features, train_labels), batch_size)
    optimizer = torch.optim.Adam(net.parameters(),
                                 lr = learning_rate,  # 学习率
                                 weight_decay = weight_decay)  # 权重衰减,用于正则化
    for epoch in range(num_epochs):  # 开始训练模型  # 遍历每个训练周期
        for X, y in train_iter:  # 遍历每个批量的训练数据
            optimizer.zero_grad() # 置0,清除过往梯度
            l = loss(net(X), y)  # 计算当前批量的损失
            l.backward()  # 反向传播计算梯度
            optimizer.step()  # 根据梯度更新模型参数
        train_ls.append(log_rmse(net, train_features, train_labels))  # 记录当前周期的训练集损失
        if test_labels is not None:  # 如果提供了测试(验证)集数据,则记录当前周期的验证误差
            test_ls.append(log_rmse(net, test_features, test_labels))
    return train_ls, test_ls  # 在本节里,在训练调参期间test_ls实际输出的是valid_ls,k_fold()里调用
    # 最后的测试集预测用不上test_ls,函数里的test只是为了区别于train,真实的输入并不是test(这个思想很重要)

#6.k折交叉验证
# 指定第i份切片作为验证集验证数据,其余的作为训练集
def get_k_fold_data(k, i, X, y):
    assert k > 1  # 确保k大于1,因为至少需要将数据分为两部分
    fold_size = X.shape[0] // k  # 每个折的大小
    X_train, y_train = None, None  # 初始化训练数据集和标签
    for j in range(k):  # 遍历每个折
        idx = slice(j * fold_size, (j + 1) * fold_size)  # 计算当前折的索引范围,返回结果是数值
        X_part, y_part = X[idx, :], y[idx]  # 获取当前折叠的数据和标签
        if j == i:  # 如果当前折是第i个折叠,则将其作为验证集
            X_valid, y_valid = X_part, y_part  # 做成验证集
        elif X_train is None:  # 否则,将当前折叠的数据和标签添加到训练集中
            X_train, y_train = X_part, y_part  # 做成训练集
        else:
            # cat连接方法
            X_train = torch.cat([X_train, X_part], 0)  # 沿着第 0 维(行)拼接训练集data
            y_train = torch.cat([y_train, y_part], 0)  # 拼接训练集label
    return X_train, y_train, X_valid, y_valid  # 返回训练集+验证集的data+label

#返回训练和验证误差的平均值
def k_fold(k, X_train, y_train, num_epochs, learning_rate, weight_decay,
           batch_size):
    train_l_sum, valid_l_sum = 0, 0
    for i in range(k):  # 做k次
        data = get_k_fold_data(k, i, X_train, y_train)  # 每次拿到第i折
        net = get_net()
        train_ls, valid_ls = train(net, *data, num_epochs, learning_rate,  # *data输入的是train和valid数据
                                   weight_decay, batch_size)  # 每次训练拿到两个loss
        train_l_sum += train_ls[-1]
        valid_l_sum += valid_ls[-1]  # 将最后一个 epoch 的训练损失加到总训练损失中
        if i == 0:
            d2l.plot(list(range(1, num_epochs + 1)), [train_ls, valid_ls],
                     xlabel='epoch', ylabel='rmse', xlim=[1, num_epochs],
                     legend=['train', 'valid'], yscale='log')
            d2l.plt.show()
        print(f'折{i + 1},训练log rmse{float(train_ls[-1]):f}, '
              f'验证log rmse{float(valid_ls[-1]):f}')
    return train_l_sum / k, valid_l_sum / k  # 求平均误差

#7.模型选择
k, num_epochs, lr, weight_decay, batch_size = 5, 100, 10, 0, 32
train_l, valid_l = k_fold(k, train_features, train_labels, num_epochs, lr,
                          weight_decay, batch_size)
#print(f'{k}-折验证: 平均训练log rmse: {float(train_l):f}, 'f'平均验证log rmse: {float(valid_l):f}')
# 看K-折交叉验证的结果图,train和valid重合得比较好,说明没有over fitting
# kaggle比赛:主要要做的就是不断地去调第一行的超参数,看平均验证log rmse(核心指标),损失越小越好
#有时一组超参数的训练误差可能非常低,但K折交叉验证的误差要高得多, 这表明模型过拟合了。
#K-折交叉验证的结果图,train和valid重合得比较好,说明没有over fitting
#在整个训练过程中,我们希望监控训练误差和验证误差这两个数字。
#较少的过拟合可能表明现有数据可以支撑一个更强大的模型, 较大的过拟合可能意味着我们可以通过正则化技术来获益。

#8.预测房价
def train_and_pred(train_features, test_features, train_labels, test_data,
                   num_epochs, lr, weight_decay, batch_size):
    net = get_net()  # 用上面get_net()选择的线性回归模型
    train_ls, _ = train(net, train_features, train_labels, None, None,
                        num_epochs, lr, weight_decay, batch_size)
    d2l.plot(np.arange(1, num_epochs + 1), [train_ls], xlabel='epoch',
             ylabel='log rmse', xlim=[1, num_epochs], yscale='log')
    print(f'{k}验证:平均训练log rmse:{float(train_ls[-1]):f},'
          f'平均验证log rmse:{float(valid_l):F}')# 训练出模型(得到权重,用于后几行预测)

    # 将神经网络应用于测试集
    preds = net(test_features).detach().numpy()  # 用detach()获取预测结果,确保代码健壮性,转为NumPy数组
    # 将结果重新格式化以导出到Kaggle
    # 在原本的test_data上新建一列,名为SalePrice
    test_data['SalePrice'] = pd.Series(preds.reshape(1, -1)[0])  # reshape成形状(1,n),[0]就是拿出唯一一行
    # 原本test_data有很多列,但这里只拿出'Id'和'SalePrice',放到新的submission容器里
    submission = pd.concat([test_data['Id'], test_data['SalePrice']], axis=1)  # axis=1 表示沿着列来拼接
    submission.to_csv('submission.csv', index=False)  # 结果存在submission.csv文件,kaggle上提交csv有得分

train_and_pred(train_features, test_features, train_labels, test_data,  # train features和labels用来训模型
               num_epochs, lr, weight_decay, batch_size)  # 训出来的模型导入test_features跑出预测值

#模型改进
#参考https://blog.csdn.net/scdifsn/article/details/139561935?spm=1001.2014.3001.5506

#如果m是nn.Linear,则将其权重(m.weight)初始化为正态分布(高斯分布)的随机数,标准差为0.01。
def init_weights(m):
    if type(m) == nn.Linear:
        nn.init.normal_(m.weight, std=0.01)
#该模型由nn.Flatten()(将输入展平),一个线性层(输入特征数为in_features,输出特征数为512),ReLU激活函数,和另一个线性层(输入特征数为512,输出特征数为1)组成。
#ReLU激活函数:对于进入神经元的来自上一层神经网络的输入向量 x,使用ReLU函数的神经元会输出至下一层神经元或作为整个神经网络的输出(取决现神经元在网络结构中所处位置)。
#net.apply(init_weights) 应用init_weights函数来初始化网络中所有线性层的权重。
def get_net_1():
    net = nn.Sequential(nn.Flatten(),
                        nn.Linear(in_features, 512),
                        nn.ReLU(),
                        nn.Linear(512, 1))

    net.apply(init_weights)
    return net

def k_fold_1(k, X_train, y_train, num_epochs, learning_rate, weight_decay,
                 batch_size):
    train_l_sum, valid_l_sum = 0, 0
    for i in range(k):
        data = get_k_fold_data(k, i, X_train, y_train)
        net = get_net_1()
        train_ls, valid_ls = train(net, *data, num_epochs, learning_rate,
                                   weight_decay, batch_size)
        train_l_sum += train_ls[-1]
        valid_l_sum += valid_ls[-1]
        if i == 0:
            d2l.plot(list(range(1, num_epochs + 1)), [train_ls, valid_ls],
                     xlabel='epoch', ylabel='rmse', xlim=[1, num_epochs],
                     legend=['train', 'valid'], yscale='log')
            d2l.plt.show()
        print(f'折{i + 1},训练log rmse{float(train_ls[-1]):f}, '
              f'验证log rmse{float(valid_ls[-1]):f}')
    return train_l_sum / k, valid_l_sum / k

k, num_epochs, lr, weight_decay, batch_size = 5, 100, 0.01, 300, 32
train_l, valid_l = k_fold_1(k, train_features, train_labels, num_epochs, lr,
                                weight_decay, batch_size)
print(f'{k}-折验证: 平均训练log rmse: {float(train_l):f}, '
      f'平均验证log rmse: {float(valid_l):f}')

def train_and_pred_1(train_features, test_features, train_labels, test_data,
                   num_epochs, lr, weight_decay, batch_size):
    net = get_net_1()  # 用上面get_net()选择的线性回归模型
    train_ls, _ = train(net, train_features, train_labels, None, None,
                        num_epochs, lr, weight_decay, batch_size)
    d2l.plot(np.arange(1, num_epochs + 1), [train_ls], xlabel='epoch',
             ylabel='log rmse', xlim=[1, num_epochs], yscale='log')
    print(f'{k}验证:平均训练log rmse:{float(train_ls[-1]):f},'
          f'平均验证log rmse:{float(valid_l):F}')# 训练出模型(得到权重,用于后几行预测)

    # 将神经网络应用于测试集
    preds = net(test_features).detach().numpy()  # 用detach()获取预测结果,确保代码健壮性,转为NumPy数组
    # 将结果重新格式化以导出到Kaggle
    # 在原本的test_data上新建一列,名为SalePrice
    test_data['SalePrice'] = pd.Series(preds.reshape(1, -1)[0])  # reshape成形状(1,n),[0]就是拿出唯一一行
    # 原本test_data有很多列,但这里只拿出'Id'和'SalePrice',放到新的submission容器里
    submission = pd.concat([test_data['Id'], test_data['SalePrice']], axis=1)  # axis=1 表示沿着列来拼接
    submission.to_csv('submission_模型改进版.csv', index=False)  # 结果存在submission.csv文件

train_and_pred_1(train_features, test_features, train_labels, test_data,  # train features和labels用来训模型
               num_epochs, lr, weight_decay, batch_size)  # 训出来的模型导入test_features跑出预测值

3 调试记录

k

num_

epochs

lr

weight

_decay

batch

_size

K-折交叉验证

训练

log rmse

验证

log rmse

5

100

5

0

64

0.162503

0.170357

5

100

2

0

64

0.261633

0.365835

5

100

8

0

64

0.145777

0.162572

5

100

10

0

64

0.139791

0.155917

5

100

10

0.1

64

0.148870

0.168602

5

100

10

1

64

0.335794

0.286485

5

100

10

0.01

64

0.140442

0.156684

5

100

10

0.001

64

0.140120

0.156092

5

100

5

0.001

64

0.162795

0.171169

5

100

20

0.001

64

0.128712

0.147609

5

100

8

0.001

64

0.146114

0.163414

5

100

5

0

32

0.139136

0.155708

5

100

5

0

128

0.192183

0.246566

5

100

10

0.001

64

0.139884

0.156481

5

100

5

0.001

32

0.138977

0.155641

5

100

20

0

32

0.128835

0.147962

5

100

10

0

32

0.127945

0.147207

5

100

10

0.001

32

0.128614

0.147032

8

100

10

0

32

0.128037

0.142846

5

100

10

0

16

0.124825

0.149123

5

500

10

0

32

0.121226

0.173696

5

500

5

0

32

0.123240

0.160028

5

500

10

0

64

0.122665

0.163520

5

500

5

0

64

0.124166

0.151033

模型改进后:

k

num_

epochs

lr

weight

_decay

batch

_size

K-折交叉验证

训练

log rmse

验证

log rmse

5

100

0.01

300

32

0.108352

0.133742

4 submission

IdSalePrice
1461113785.96
1462156048.72
1463181747.12
1464203121.05
1465191368.69
1466171991.34
1467182439.89
1468166988.52
1469193168.55
1470112137.09
1471183325.45
147296528.35
147391684.69
1474145767.9
147592055.09
1476344705.5
1477251672.94
1478304370.25
1479298895.25
1480450478.4
1481310093.5
1482217878.61
1483172409.4
1484167281.73
1485196833.64
1486202284.5
1487312168.5
1488241076.8
1489192070.95
1490244489.17
1491200983.84
149289535.93
1493207221.56
1494290943.78
1495272638.06
1496243777.11
1497164740.9
1498160438.53
1499161415.48
1500153257.61
1501205820.23
1502150050.36
1503290245.22
1504239660.83
1505225869.83
1506195727.6
1507247069.6
1508198461.84
1509153404.28
1510149474.89
1511143365.73
1512179771.84
1513143153.25
1514164463.28
1515207142.9
1516165766.83
1517174731.44
1518116633.61
1519228801.88
1520120989.82
1521123662.26
1522199498.92
152394180.74
1524111497.59
1525109081.15
152698899.4
1527105529.625
1528134050.4
1529147734.1
1530221878.52
1531138699.95
1532106128.23
1533151896.75
1534120328.18
1535147092.66
1536103856.2
153762110.074
1538168197.88
1539204008.5
1540122985.71
1541146010.6
1542135174.7
1543189561.62
154481889.63
1545120721.69
1546154840.67
1547133726.47
1548137232.52
1549124801.6
1550148280.11
1551110953.45
1552148712.19
1553161177.12
1554111970.586
1555162409.33
155674531.97
1557107806.03
155896532.086
155974603.625
1560122241.56
1561125553.58
1562128135.73
1563121719.85
1564160392.33
1565157664.53
1566246291.14
156767018.25
1568244229.27
1569135903.17
1570139315.6
157198562.34
1572143781.42
1573238453.84
1574129342.39
1575234006.45
1576254747.28
1577186234.83
1578152971
1579132953.86
1580195072.14
1581160382.27
1582126756.43
1583297197.47
1584225873.42
1585143116.48
158654580.883
158796395.14
1588155891.67
1589106904.086
1590118894.31
159187651.88
1592139341.81
1593132338.28
1594126392.48
1595103907.73
1596220355.9
1597191195.94
1598203504.2
1599183896.42
1600176789.62
160142094.06
1602122158.266
160364171.242
1604279069.66
1605244725.19
1606157484.22
1607159393.73
1608221515.55
1609188232.12
1610158777.72
1611140316.42
1612182707.16
1613178433.17
1614133360.28
161592409.016
161667748.34
161781721.27
1618110459.95
1619149389.73
1620178361.98
1621129779.04
1622150538.3
1623266459.34
1624215463.56
1625112589.06
1626188526.47
1627191013.19
1628274210.66
1629172601.25
1630327413.03
1631223350.08
1632229787.77
1633184525.78
1634192057.05
1635179813.38
1636148082.97
1637192089.12
1638190096.72
1639187786.94
1640233905.73
1641180478.47
1642253303.22
1643215957.61
1644232635.14
1645214740.62
1646163198.11
1647156445.33
1648128814.695
1649133300.97
1650118352.555
1651117679.46
1652101864.19
1653101833.8
1654144298.64
1655121844.59
1656138391.4
1657153000.23
1658152091.45
1659115774.8
1660154700.66
1661410684.06
1662346532.53
1663354381.2
1664433132.28
1665307670.03
1666320085.84
1667337230.47
1668337309.9
1669308173.3
1670338583.28
1671269937.66
1672392537.28
1673296851.3
1674253212.11
1675206520.33
1676209417.66
1677223688.92
1678444021.62
1679363831.72
1680317564.28
1681265073.8
1682309510.2
1683195234.47
1684185219.67
1685183252.05
1686172749.53
1687175924.81
1688200488.12
1689203853.39
1690208397.34
1691196748.67
1692265109.97
1693173670.9
1694187821.06
1695165344.77
1696263436.56
1697178128.06
1698324574.88
1699325771.84
1700261923.92
1701270523.2
1702240113.36
1703243783.33
1704274015.34
1705259214.7
1706366531.4
1707221778.94
1708204072.95
1709260042.77
1710225288.2
1711281397.06
1712253813.03
1713270747.25
1714222348.45
1715207109.52
1716178277.83
1717183508.95
1718136119.48
1719217343.42
1720237617.86
1721163058.44
1722117638.375
1723157007.36
1724212668.86
1725234065.83
1726188661.6
1727149342.14
1728176637.75
1729169230.88
1730165350.84
1731117434.9
1732130452.85
1733109306.65
1734114385.72
1735116077.46
173690662.35
1737302848.88
1738255245.2
1739253326.4
1740221868.5
1741198871.95
1742181241.31
1743184123.56
1744327130.56
1745226101.39
1746191876.45
1747219786.3
1748213931.55
1749134470.95
1750123112.875
1751246120.11
1752109434.8
1753155650.83
1754196679.22
1755174909.73
1756136186.58
1757113033.45
1758154886.48
1759169205.94
1760171765.55
1761168009
1762184863.5
1763177031.36
1764106422.44
1765170759.22
1766197105.53
1767237577.36
1768138920.52
1769173282.45
1770167698.1
1771114726.08
1772134439.7
1773118469.81
1774157350.5
1775141197.56
1776130997.24
1777102837.67
1778147411.61
1779138353.69
1780178614.1
1781119641.63
178272614.875
1783157095.02
1784107546.42
1785124714.086
1786147805.27
1787172646.55
178846414.43
1789103228.516
179070055.96
1791211715.58
1792165670.83
1793130794.53
1794175393.11
1795125946.83
1796126382.71
1797130378.25
1798118926.41
179994789.4
1800110694.38
1801121674.73
1802136749.16
1803159302.48
1804129110.59
1805132420.53
1806119328.125
1807151045.97
1808126432.375
1809136549.48
1810144924.53
181190992.4
1812100044.91
1813123470.25
181488850.086
181544469.37
181692416.92
1817118327.24
1818162534.33
1819122726.92
182044784.066
1821107299.28
1822160550.75
182336294.516
1824147834.64
1825144273.84
182694870.12
1827104509.42
1828143733.2
1829163778.36
1830157176.83
1831146504.78
183277893.84
1833141836.33
1834114625.49
1835123055.266
1836122243.95
183781517.82
1838117950.39
1839105155.6
1840159322.58
1841125149.164
184291740.41
1843142237.55
1844134637.38
1845145289.33
1846149777.58
1847172309.42
184840584.35
1849124403.586
1850114246.6
1851142043.88
1852117352.336
1853128320.65
1854164825.03
1855141993.39
1856233386.83
1857135669.89
1858130962.3
1859107933.484
1860142895.77
1861109992.336
1862310831.3
1863299255.28
1864299268.8
1865343841.62
1866326482.66
1867227782.98
1868291853.75
1869217277.64
1870228102.27
1871262453.16
1872187227.3
1873250864.22
1874145362.61
1875194309.62
1876201614.14
1877207402.64
1878204440.56
1879124960.95
1880126347.76
1881250471.27
1882241791.14
1883186584.39
1884197893.94
1885226397.67
1886278702.28
1887224751.4
1888265969.9
1889179244.95
1890110452.34
1891120848.664
189294345.05
1893121273.2
1894117598.22
1895135010.39
1896118037.98
1897117422.47
1898107270.64
1899160116.3
1900157429.16
1901177027.53
1902156245.47
1903221259.45
1904151770.31
1905203024.02
1906161040.6
1907215120.16
1908107656.734
1909137318.7
1910121638.766
1911218927.34
1912306107.16
1913179769.19
191450562.766
1915314186.1
191638800.64
1917242084.83
1918136770.16
1919170342.33
1920163701.25
1921365463.2
1922320650.4
1923232383.84
1924237394.1
1925214693.89
1926363536.03
1927123827.24
1928171968.55
1929113323.67
1930119855.7
1931136828.95
1932131686.03
1933192087.19
1934186589.31
1935173983.94
1936198030.95
1937180854.11
1938172292.61
1939247588.83
1940192262.12
1941169245.11
1942169366.89
1943218114.98
1944341042.06
1945359764.78
1946136147.56
1947285367.66
1948176874.7
1949245946.83
1950195159.33
1951249447.81
1952209365.69
1953185571.4
1954188566.03
1955129424.836
1956290810.72
1957158060.36
1958276487.12
1959138741.22
196086343.63
1961125915.734
196294562.96
1963110964.164
1964104556.34
1965135642.03
1966132439.56
1967302895.75
1968390721.62
1969360400.16
1970377830
1971423149.1
1972364671.12
1973290348.3
1974327714.38
1975444677.72
1976286352.62
1977355486.9
1978328790.62
1979318982.03
1980197991.42
1981341432.03
1982228534.23
1983212599.06
1984181306.8
1985232146.98
1986213441.06
1987192692.75
1988184191.58
1989201366.88
1990217518.03
1991236490.97
1992233318.23
1993177581.44
1994225120.64
1995189313.48
1996282999.97
1997303970.97
1998305415.78
1999295546
2000303979.88
2001291034.28
2002249561.94
2003260253.61
2004295601.25
2005231685.67
2006218293.47
2007251289.33
2008229620.78
2009208053.84
2010197810.36
2011136664.83
2012179707.7
2013177737.77
2014190983.55
2015208944.69
2016198256.69
2017201419.83
2018113525.91
2019130375.17
202078022.016
202188089.164
2022204469.31
2023134270.03
2024279730.1
2025344480.8
2026173909
2027161354.03
2028155323.4
2029169542.48
2030261390.88
2031237724.33
2032251362.9
2033245380.56
2034171309.39
2035235745.61
2036211420.66
2037212782.61
2038298426.25
2039229864.08
2040316692.03
2041290950.4
2042210255.33
2043181058.2
2044177827.66
2045202248.98
2046129544.15
2047136158.52
2048149633.92
2049139975.11
2050163222.98
2051107814.52
2052114985.27
2053145502.86
205477206.83
2055163909.06
2056140477.45
205799820.53
2058225086.83
2059130412.16
2060185165.69
2061172888.33
2062115274.61
2063108105.77
2064139543.58
2065118941.98
2066176248.66
2067127431.64
2068143541.88
206983313.2
2070105030.2
207197115.02
2072165747.39
2073122868.086
2074182773.69
2075153989.22
2076126447.09
2077149467.39
2078118796.3
2079136468.56
2080109687.945
2081114276.14
2082130087.55
2083148315.88
2084102425.28
208596556.72
2086117271.17
2087116443.55
2088102453.48
208967236.79
2090127293.07
209195903.83
2092145303.52
2093133460.1
2094117623.69
2095144513.61
209679454.63
209796299.266
2098149284.69
209936690.09
210060520.203
2101124880.086
2102127649.29
210392314.79
2104128594.72
2105142510.14
210642127.43
2107206018.81
2108114330.36
2109102441.72
2110124475.305
2111139695.6
2112139182.97
2113106880.31
2114113849.64
2115157879.84
2116124394.836
2117156753.14
2118120575.14
2119114321.83
2120119997.695
212199331.02
2122116062.33
212386642.61
2124170669.8
2125137863.75
2126164155.19
2127176637.75
2128130764.555
212980336.766
2130129541.195
2131145423.12
2132119677.92
2133126364.08
2134123006.836
213598555.91
213656821.473
2137107099.24
2138126593.484
2139142356.4
2140140556.97
2141156097.16
2142123267.49
2143156064.64
2144106580.125
2145141462.39
2146183934.48
2147153289.62
2148137080.89
2149146522.52
2150239545.02
2151116911.35
2152179186.25
2153189603.12
2154104734.984
2155138008.84
2156247315.38
2157240962.92
2158237414.61
2159216123.8
2160198969.9
2161242800.92
2162372399.62
2163347945.56
2164247858.81
2165205804.66
2166161899.55
2167216748.14
2168184269.84
2169194392.2
2170213005.47
2171154057.2
2172138397.52
2173184006.66
2174220972.17
2175282537.7
2176298441.75
2177239556.61
2178218199.12
2179135466.19
2180218276.03
2181194842.19
2182222855.89
2183193087.56
2184121309.84
2185118380.055
2186157250.95
2187153284.58
2188162235.88
2189300597.38
219072580.46
219160497.64
219280304.18
2193119989.63
2194116577.61
219588022.945
219692677.11
2197115559.875
2198169024.36
2199192694.97
2200151424.89
2201149350.88
2202194655.25
2203146507.19
2204168743.45
2205109194.9
2206150341.62
2207226740.1
2208255365.88
2209246391.88
2210119880.42
2211121254.31
2212115540.97
2213102907.47
2214136083.2
221597332.27
2216144277.47
221760716.445
221862763.176
221963627.38
222044845.117
2221315890.84
2222290799.25
2223298718.16
2224222589.5
2225125323.4
2226186432.38
2227212397.86
2228287098.34
2229254628.03
2230152998.44
2231222075.47
2232191789.6
2233187031.58
2234257058.23
2235230272.05
2236258999.98
2237319799.84
2238214048.78
2239102065.09
2240158464.89
2241137169.53
2242124611.5
2243124103.25
224498728.26
224597152.586
2246123724.586
2247140450.61
2248121787.1
2249111147.11
2250120888.19
2251101792.74
2252198713.98
2253161833.39
2254178705.89
2255195938.16
2256170568.7
2257207306.77
2258148824.89
2259179240.61
2260145916.64
2261204387.14
2262213312.02
2263356149.7
2264424961.84
2265164165.1
2266289722.34
2267358621.78
2268381879.7
2269159660
2270195292.52
2271212519.67
2272210709.94
2273154529.36
2274187911.89
2275173502.78
2276185015.67
2277182418.69
2278155445.45
2279118111.375
228099937.516
2281170798.92
2282184701.4
2283112071.48
2284111875.59
2285136035.3
2286117776.555
2287353495.8
2288288167.7
2289338394.9
2290401648.4
2291331627.44
2292395713.53
2293420412.84
2294371483.97
2295443061.25
2296268960.22
2297349099.84
2298348978.34
2299346810.2
2300317753.56
2301323764.47
2302258243.17
2303246624.14
2304254756.1
2305208520.3
2306200543.64
2307213548.14
2308225429.28
2309290402.88
2310224727.7
2311220170.33
2312186990.95
2313172943.11
2314176099.8
2315188014.77
2316207944.2
2317190642.86
2318185961.94
2319189284.98
2320178989.8
2321242906.52
2322196483.64
2323193020.61
2324195400.62
2325216134.6
2326182904.67
2327201725.58
2328226895.67
2329192693.9
2330187529.69
2331333384.44
2332357222.97
2333306101.4
2334259598.67
2335281031.22
2336300657.84
2337192688.86
2338257907.2
2339227978.97
2340383653.2
2341223894.42
2342233720.42
2343236350.77
2344218003.81
2345234511.58
2346212130.2
2347197132.56
2348247031.4
2349195420.42
2350315577.03
2351260189.48
2352248716.33
2353254267.08
2354149484.08
2355149366.16
2356149691.28
2357194267.88
2358193633.06
2359126845.055
2360108403.15
2361157055.08
2362254361.95
2363133196.9
2364175922.94
2365222858.67
2366197914.94
2367214889.56
2368218312.94
2369214715.7
2370173079.19
2371159399.66
2372190049.03
2373290456.12
2374309481.44
2375248736.11
2376297857.22
2377320124.06
2378143417.44
2379213944.84
2380141978.1
2381174467.8
2382216474.55
2383195846.11
2384244924.88
2385164095.55
2386121780.27
2387119279.25
2388112594.83
2389122109.19
2390151334.38
2391144134.4
2392112780.625
2393175823.5
2394142334.38
2395216090.64
2396137460.3
2397213911.67
2398139653.61
239964441.945
240063638.02
2401121158.28
2402141936.4
2403145107.75
2404157222.94
2405167211.2
2406138359.61
2407121546.3
2408142174.28
2409105895.96
2410180068.67
2411109893.234
2412151322.66
2413119768.125
2414158800.58
2415135627.98
2416126582.36
2417138839.05
2418131610.03
2419121987.13
2420122637.96
2421132496.44
2422101419.58
2423132090.5
2424155953.38
2425234068
2426157920.72
2427132363.14
2428197263.84
242996575.97
2430127218.88
2431102561.63
2432150606.75
2433152024.83
2434142579.98
2435164310.14
2436105742.39
243787084.22
2438112235.22
2439106183.83
2440127022.37
2441104680.94
244294290.41
2443142541.47
2444136019.45
244581525.99
2446143845.4
2447212452.39
2448139668.45
2449117009.1
2450158053.67
2451128273.4
2452203316.9
245387981.125
2454125068
2455145942.38
2456133061.78
2457137796.1
2458135664.55
2459115650.36
2460159323.52
2461126426.85
2462134114.39
2463129103.63
2464185009.62
2465140319.31
2466115379.695
2467147227.53
246894233.48
246982320.65
2470205620.12
2471213453.47
2472170324.97
2473107433.74
247457644.57
2475218314.89
247697317.97
2477107090.484
2478151249.55
247992348.75
2480159604.58
2481124259.016
2482115308.34
248394476.89
2484132420.27
2485117582.89
2486155551.9
2487207302.52
2488177627.22
2489159450.67
2490138633.14
249197022.87
2492197976.7
2493162447.27
2494150426.5
249569021.05
2496239681.11
2497151964.94
2498109070.07
249983445.055
2500118828.17
2501131021.945
2502150892.19
2503102408.83
2504182420.45
2505224363.69
2506263016.66
2507289070.72
2508260050.22
2509224900.55
2510219527.48
2511177975.53
2512209029.47
2513229764.78
2514253553.27
2515151045.73
2516180712.31
2517148802.92
2518154810.17
2519236933.77
2520222842
2521189206.64
2522226648.27
2523106986.61
2524136700.52
2525143917.73
2526149995.61
2527106666.8
2528121268.29
2529147701.78
2530122811.99
2531253765.33
2532225002.58
2533205715.58
2534243815.28
2535271513.1
2536226802.92
2537243578.23
2538192006.38
2539192681.19
2540188746.53
2541186771.11
2542167397.25
2543114243.27
2544108207.18
2545138550.05
2546128880.4
2547153440.98
2548166619.3
2549170107.77
2550632049.94
2551136753
2552136638.28
255356785.57
255487176.836
2555113592.4
255685733.1
255797557.71
2558175308.58
2559135328.47
2560175455.47
2561154635.75
2562149361.19
2563147614.55
2564202234.88
2565174037.36
2566164823.28
2567133644.38
2568228825
2569234001.47
2570114675.64
2571189627.22
2572154509.77
2573226698.14
2574265826
2575136712.9
2576137318.16
2577164742.97
257868704.36
257936073.35
2580100195.695
2581132664.42
2582128382.836
2583290881
2584177855.16
2585200544.55
2586220444.58
2587208417.22
2588141271.05
2589145599.78
2590223740.7
2591253441.28
2592221227.72
2593271671.06
2594177183.31
2595216799.66
2596310295.6
2597206036.39
2598283113.5
2599335723.38
2600175327.52
2601142256.89
260267697.53
260385908.83
260485334.5
260563122.656
2606140956.44
2607237205.98
2608205817.58
2609169601.98
2610109925.13
2611125129.57
2612160685.05
2613124423.266
2614106642.734
2615164238.94
2616163467.12
2617207489.14
2618243113.23
2619215920.45
2620199205.5
2621175588.02
2622186382.38
2623232988.58
2624304002.75
2625271988.12
2626177953.9
2627165756.03
2628423319.97
2629442661.8
2630356875.16
2631418986.9
2632384186.6
2633296088.06
2634389720.03
2635151187.3
2636175498.06
2637196005.14
2638263965.53
2639181826.62
2640154998.8
2641100981.68
2642186425.42
2643115201.24
2644115152.695
2645107302.87
264694237.56
2647108397.9
2648134320.2
2649150222.11
2650124908.11
2651134481.92
2652368143.34
2653257552.64
2654267735.16
2655389236.1
2656329237.12
2657341846.6
2658311920.12
2659311724.7
2660352734.47
2661341331.06
2662361729.47
2663284668.28
2664281643.94
2665338483.3
2666293442.34
2667185647.17
2668188634.44
2669188881.62
2670273139.34
2671198024.66
2672203083.78
2673198801.03
2674201874.75
2675182893.9
2676200920.94
2677202844.25
2678243786.44
2679287026.38
2680281090.38
2681375324.44
2682312524.2
2683452200.62
2684307911.8
2685317881.06
2686264242.66
2687302704.84
2688211291.66
2689208083.3
2690363873.6
2691190423.53
2692151636.83
2693205895.75
2694138873.72
2695204161.86
2696188665.86
2697189169.81
2698206106.16
2699167810.44
2700146637.77
2701149884.67
2702111451.586
2703125808.8
2704151840.12
2705104957.18
270682167.07
2707121665.836
2708130133.78
2709111813.57
2710125930.42
2711286431.22
2712379128.47
2713169412.22
2714145548.45
2715177203.45
2716144775.69
2717192737.33
2718214468.48
2719142167.05
2720187678.3
2721122154.7
2722155253.38
2723146853.11
2724113770.484
2725120849.76
2726140590.27
2727178932.89
2728192774.55
2729146546.5
2730142032
2731130415.625
2732137039.8
2733169931.19
2734144169.06
2735131644.69
2736140667.9
2737117505.98
2738126876.47
2739156556.42
2740144806.52
2741138315.12
2742172950.81
2743154055.39
2744169012.78
2745147130.81
2746133334.94
2747158898.8
2748114636.55
2749135266.58
2750115382.64
2751135356.56
2752217939.88
2753164413.17
2754260829.58
2755142499.69
275690671.46
275770959
275890794.875
2759172922.94
2760130547.664
2761146075.47
2762127897.445
2763195832.22
2764156677.05
2765286615.97
2766118918.51
276783088.51
2768123890.21
2769137663.25
2770145482.55
277196845.75
2772104116.57
2773180693.47
2774139519.48
2775122477.13
2776146383.88
2777157243.62
2778114620.51
2779121435.17
278093864.81
278180680.516
278280079.45
278391807.33
2784127609.72
2785141444.27
278653422.36
2787119806.51
278861782.207
2789175729.34
2790105167.016
2791107341.74
279259601.79
2793160198.78
279493006.75
2795117950.1
279697954.26
2797195539.88
2798118966.53
2799109352.92
280068499.414
2801104069.13
2802120118.766
2803158139.36
2804136711.8
2805110068.445
280666323.52
2807166659.55
2808164967.42
2809118846.8
2810119979.68
2811184912.52
2812181788.11
2813153442.02
2814148336.8
2815114209.875
2816227653.08
2817169597.78
2818124307.695
2819194838.02
2820139595.95
2821120197.086
2822190252.94
2823308789.75
2824179535.75
2825176111.73
2826136909.27
2827130162.99
2828228956.16
2829227076.52
2830237156.56
2831193822.38
2832241089.47
2833304543.7
2834227301.06
2835229116.36
2836205684.11
2837159602.02
2838140022.38
2839172519.83
2840204046.6
2841204397.58
2842233186.67
2843148495.98
2844184657.23
2845119342.414
2846213416.17
2847217466.17
2848207878.9
2849199796.9
2850276615.72
2851210204.67
2852232797.23
2853234925.92
2854144556.6
2855205847.33
2856206303.03
2857191376.44
2858212924.3
285992595.84
2860147669.14
2861125396.03
2862210402.7
2863141268.16
2864255907.52
2865139740.16
2866146773.39
286786024.64
2868111008.71
286996480.734
2870126101.14
287183182.734
287211204.42
287392011.66
2874144366.08
2875108190.29
2876168017.5
2877141247.52
2878154234.95
2879133811.38
288091145.42
2881140921.7
2882181104.83
2883200932.38
2884208622
2885185701.48
2886244048.89
2887105363.88
2888150092.7
288949186.582
289073777.49
2891138551.23
289239044.88
289375943.32
289435523.707
2895330151.03
2896289476
2897215517.52
2898140754.28
2899213778.42
2900166029.47
2901216113.6
2902188425.56
2903318239.38
2904338552.12
290567743
2906210201.06
2907108446.34
2908114567.125
2909138720.25
291069767.32
291170828.086
2912149327.28
291366131
291465598.44
291578043.51
291674433.22
2917182310.4
2918112793.62
2919236883.72

submission改进版

IdSalePrice
1461113816.58
1462158432.16
1463182454.11
1464192929.03
1465190422.6
1466172855.08
1467175751.14
1468163481.38
1469193167.3
1470126402.25
1471172077.23
1472100014.164
147395924.97
1474147253.47
1475112133.08
1476360062.84
1477253665.28
1478308819.44
1479307205.7
1480468440.1
1481321691.5
1482211007.66
1483174627.39
1484167452.81
1485192062.83
1486192640.17
1487340076.53
1488242860.08
1489190053.6
1490248150.97
1491197189.33
149296830.69
1493208703
1494297678.34
1495283630.06
1496252711.9
1497178221.11
1498172065.66
1499166413.56
1500161685.81
1501186395.75
1502156482.38
1503295578.56
1504243695.92
1505223383.28
1506190228.94
1507247864.77
1508196947.73
1509165784.31
1510148379.52
1511153885.97
1512170884.84
1513153683.28
1514150424.06
1515192902.58
1516152284.06
1517169888.06
1518130022.69
1519218552.34
1520129843.734
1521136471.42
1522173864.75
1523115115.64
1524127944.38
1525125068.88
1526116877.484
1527108622.05
1528135576.56
1529158113.73
1530212400.77
1531119006.08
153299445.04
1533150025.28
1534127187.18
1535142190.25
1536104757.31
153742281.137
1538171003.45
1539228963.28
1540107250.85
1541141252.05
1542142563.36
1543186393.72
154488139.15
1545121677.12
1546140944.77
1547128116.05
1548145441.28
1549124092.664
1550140091.7
1551114243.33
1552149666.22
1553144947.08
1554124737.2
1555164717.94
155683980.516
1557108884.95
1558111711.51
155975388.17
1560121728.36
1561129828.51
1562129982.91
1563129359.13
1564160002.42
1565151295.53
1566249800.92
156779003.83
1568233570.56
1569130996.47
1570143398.6
1571104842.18
1572143933.97
1573234889.92
1574126394.055
1575235581.47
1576272386.1
1577182267.47
1578155133.2
1579139578.56
1580195053.27
1581157956.73
1582133506.27
1583291060.03
1584221287.3
1585140182.31
158667177.734
1587110378.914
1588164665.02
158998476.6
1590134626.98
159190614.87
1592119465.15
1593136019.95
1594134475.67
1595112631.52
1596216169.17
1597193927.72
1598214222.83
1599187795.31
1600174351
160159450.594
1602112882.19
160384042.15
1604284428.6
1605247776.88
1606158903.11
1607166864.88
1608218288.86
1609182642.75
1610155880.52
1611136896.7
1612176644.88
1613162895.92
1614135514.4
161596269.31
161678066.4
161795095.06
1618127369.94
1619146648.55
1620174000.19
1621134653.47
1622150110.19
1623279836.12
1624203076.78
1625125407.83
1626166642.25
1627190048.9
1628289153.6
1629180103.25
1630342316.75
1631220504.14
1632229783.16
1633183398.8
1634187926.25
1635178533.4
1636151924.88
1637187136.52
1638181673.34
1639183942.84
1640240894.6
1641182532.27
1642255352.44
1643212145.98
1644238555.69
1645205033.27
1646164578.72
1647167636.98
1648134947.47
1649145763.39
1650117650.8
1651118430.28
1652104233.945
1653104988.77
1654151029.75
1655135840.58
1656146376.89
1657155527.36
1658152697.47
1659128023.4
1660147926.38
1661421646.3
1662371647.78
1663367812.8
1664449628.47
1665316008.72
1666331263.66
1667357877.47
1668354535.1
1669322346.9
1670354709.06
1671255427.16
1672410737.2
1673306157.5
1674249025.23
1675192415.7
1676196134.03
1677209192.56
1678448380.6
1679385175.8
1680323507.16
1681262578.7
1682321257.7
1683187064.45
1684174971.22
1685175832.78
1686170483.97
1687170060.47
1688188909.45
1689199014.56
1690199321.95
1691191247.11
1692275479
1693175150.67
1694186700.86
1695174914.84
1696261204.62
1697178095.5
1698340676.47
1699339922.53
1700261432.12
1701272006.94
1702231502.25
1703234345.08
1704274441.38
1705258274.97
1706372177.6
1707219048.2
1708200837.83
1709260995.92
1710220209.72
1711286981.78
1712255516.08
1713278123.66
1714220333.53
1715209645.69
1716177786
1717177324.19
1718143072.16
1719217081.89
1720243194.8
1721156665.17
1722126794.15
1723156959.27
1724209271.23
1725239107.23
1726186671.1
1727155954.47
1728173363.95
1729172392.33
1730157552.53
1731124205.89
1732131420.28
1733120077.04
1734123884.02
1735127596.195
1736109564.945
1737326201.56
1738268024.3
1739262767.38
1740204033.38
1741182330.75
1742175954.53
1743177516.11
1744343044.97
1745217477.33
1746188850.58
1747214355.38
1748215919.06
1749141580.81
1750130854.21
1751248179.84
1752123228.8
1753150557.83
1754194241.89
1755166971.69
1756137711.62
1757124894.914
1758147007.83
1759159838.16
1760160256.75
1761139656.08
1762187477.11
1763179899.83
1764122384.67
1765170986.81
1766186970.39
1767222829.28
1768146644.1
1769171115.4
1770159986.94
1771126083.48
1772136690.7
1773129675.6
1774142533.9
1775148963.23
1776132469.28
1777120397.734
1778150190.16
1779123299.1
1780170639.55
1781132196.61
178287395.46
1783145947.55
1784110232.23
1785129680.1
1786155628.66
1787174866.06
178856118
1789104492
179085681.92
1791194470.22
1792159630.8
1793132274.44
1794170618.16
1795135174.39
1796134799.44
1797109880.31
1798129185.61
1799115447.45
1800139022.61
1801126720.23
1802123963.39
1803157102.27
1804134550.98
1805140822.14
1806123985.96
1807146238.61
1808124446.18
1809134975.34
1810139430.12
181187952.875
1812102070.29
1813127777.805
1814101246.07
181550006.984
1816105415.83
1817109140.87
1818157760.39
1819128615.59
182055897.164
1821107089.875
1822161574.39
182348101.84
1824137840.44
1825146580.95
1826106379.57
1827109682.55
1828137078.22
1829136742.62
1830148344.7
1831153929.2
183273530.15
1833157823.08
1834119896.89
1835105849.63
1836127193.17
183775264.2
1838124372.4
1839111513.86
1840156625.33
1841141480.1
184297375.59
1843125998.71
1844146450.6
1845147542.14
1846150473.95
1847169453.06
184849455.902
1849116357.305
1850120853.65
1851148103.12
1852123855.63
1853130548.84
1854162865.69
1855148629.12
1856233518.28
1857134982.56
1858134108.84
1859116784.16
1860140739.94
1861117962.445
1862334604.88
1863315378.8
1864315394.25
1865356731.16
1866343453.66
1867225210.62
1868284963.94
1869210019.94
1870227854.58
1871272783
1872176461.14
1873248629.7
1874149395.5
1875186675.44
1876199651.61
1877206616.03
1878203456.22
1879136586.47
1880134425.67
1881248215.47
1882235627.84
1883186074.73
1884199799.62
1885226421.22
1886284435.5
1887221257.19
1888270584.97
1889171393.53
1890117493.06
1891126577.66
1892100930.57
1893137983.6
1894123530.49
1895137550.89
1896130831.89
1897117904.81
1898108495.875
1899166101.39
1900160495.86
1901184778.23
1902161745.8
1903222748.33
1904146912.8
1905191677.9
1906156644.9
1907195271.72
1908113314.586
1909138064.83
1910127989.7
1911213920.58
1912321022.12
1913153562.53
191472273.54
1915331452.34
191652068.926
1917238008.56
1918143781.11
1919161125.34
1920152311.3
1921369272.72
1922329470.78
1923241366.27
1924218875.44
1925204128.97
1926371522.8
1927132483.22
1928158230.17
1929123059.5
1930127034.85
1931125186.836
1932135518.28
1933185867.61
1934181834
1935172029.75
1936199968.1
1937181040.34
1938171487.44
1939253830.16
1940189877.28
1941169020.28
1942174700.89
1943228672.31
1944378843.25
1945392469.3
1946129814.94
1947284670.12
1948174695.45
1949253394.2
1950192195.27
1951250084.75
1952206315.64
1953175459.33
1954185135.03
1955141416.14
1956326049.6
1957155117.8
1958285978.8
1959147326.38
1960108863.15
1961126917.914
1962100565.695
1963107844.01
1964111483.54
1965145817.39
1966144214.75
1967306016.16
1968413894.62
1969376545.22
1970398942.34
1971450446.97
1972372186.1
1973298316.72
1974338782.1
1975453315.28
1976292574.44
1977364004.22
1978345845.62
1979323377.53
1980187148.98
1981345224.88
1982218255.6
1983203554.56
1984172677.53
1985238870.61
1986212535.58
1987194141.53
1988174678.06
1989190827.8
1990207973.27
1991226095.48
1992225426.64
1993168663.44
1994225286.02
1995183634.08
1996275312.94
1997322573.8
1998306569.34
1999307092.47
2000328527.8
2001289683.66
2002248973.61
2003261665.08
2004299097
2005228268.08
2006222115.7
2007253017.11
2008217522.7
2009203901.6
2010193050.39
2011140304.66
2012176820.9
2013181247.34
2014188746.55
2015212531.94
2016194950.33
2017198835.86
2018116436.38
2019132688.55
202099979.71
2021101930.52
2022194022.38
2023139523.06
2024277496.66
2025356936.3
2026181430.03
2027167721.2
2028158330.25
2029169640.44
2030267951.66
2031239700.17
2032263797
2033256229.1
2034179830.31
2035227978.62
2036204761.97
2037211086.4
2038322123.34
2039238438.53
2040324998.2
2041305345.38
2042209690.5
2043180802.28
2044171513.06
2045208370.02
2046147048.36
2047142594.36
2048142097.92
2049142394.31
2050175071.61
2051111503.49
2052122123.47
2053152183.52
205493517.03
2055163097.4
2056147829.11
2057109119.234
2058207781.22
2059132470.1
2060176975.23
2061167580.12
2062134712.11
2063123643.02
2064138589.64
2065115030.41
2066169285.73
2067127286.08
2068151083.06
206988952.97
2070118961.67
2071101515.51
2072140298.33
2073129104.016
2074173923.36
2075154631.89
2076123441.016
2077162099.64
2078134382.67
2079135114.62
2080120152.42
2081131781.16
2082126559.78
2083152545.48
2084116902.56
2085113280.68
2086112189.19
2087115796.164
208899172.62
208989722.25
2090124407.17
2091101428.93
2092126251.3
2093121644.66
2094114269.516
2095141940.73
209684063.17
209797861.79
2098144658.33
209944223.01
210066750.16
2101110790.17
2102130570.45
2103108910.57
2104144936.27
2105134286.11
210645160.777
2107198110.92
2108120122.32
2109113533.914
2110117337.47
2111135028.1
2112144545.69
2113121424.38
2114118727.914
2115165567.44
2116120656.79
2117140580.88
2118132169.1
2119117341.516
2120110728.195
2121101284.32
2122123274.88
212396835.78
2124174725.64
2125122530.61
2126155744.34
2127168768.98
2128127745.305
212983005.28
2130137304.84
2131138872.56
2132113818.37
2133133655.34
2134120345.29
213594670.56
213671101.664
2137110236.64
2138129179.1
2139146268.56
2140147021.5
2141159648.78
2142131723.44
2143151829.06
2144127240.695
2145143676.58
2146183091.67
2147138432.25
2148138550.2
2149143770.45
2150247216.69
2151130730.055
2152171132.39
2153178200.52
2154111902.734
2155142943.14
2156255033.83
2157231639.78
2158243033.5
2159222531.44
2160184625.02
2161240548.9
2162393289.72
2163351552.84
2164251319.16
2165209541.95
2166161176.22
2167218429.67
2168187555.67
2169194737.55
2170213551.28
2171157005.78
2172131617.58
2173188666.36
2174217066.23
2175291368.12
2176314643.56
2177236666.52
2178210783.81
2179137697.14
2180205902.64
2181189809.06
2182221848.8
2183191664.77
2184118496.13
2185121904.22
2186145080.92
2187150266.2
2188161232.69
2189296636.38
219082809.55
219171310.76
219288425.18
2193117009.06
2194103539.48
2195105856.32
2196106999.266
2197125065.95
2198168102.48
2199182802.6
2200140759.62
2201147901.83
2202219184.39
2203138716.88
2204176221.27
2205120361.59
2206146867.94
2207218074.2
2208261673.97
2209258171.39
2210128679.51
2211124766.16
2212129398.68
2213109898.5
2214136358.36
2215108062.695
2216151577.4
221741640.66
221884983.37
221982679.98
222057336.902
2221332248.75
2222297581.44
2223304015.25
2224220093.08
2225129612.83
2226180572.19
2227200704.56
2228290906.94
2229244939
2230157956
2231214641.17
2232178120.25
2233178078.55
2234253836.23
2235224317.12
2236245801.81
2237312202.38
2238202499.16
2239101278.336
2240156092.5
2241137567.67
2242126049.664
2243131462.95
2244105496.57
2245113423.914
2246143241.34
2247127939.55
2248129330.84
2249124667.84
2250133573.77
2251121154.9
2252181185.06
2253163571.56
2254180182.53
2255186588.72
2256174165.08
2257210190.05
2258158404.1
2259178287.83
2260145563.22
2261204864.27
2262218912.17
2263384536.4
2264445243.25
2265167868.03
2266302303.53
2267353645.2
2268407859.66
2269154690.78
2270193957.03
2271209040.34
2272192115.95
2273162287.28
2274184813.92
2275149662.11
2276192300.77
2277179732.94
2278151006.6
2279134717.97
2280110179.75
2281158346.1
2282174898.27
2283114449.99
2284119659.17
2285148362.8
2286125459.87
2287374330.3
2288286092
2289354078.97
2290419052
2291349635.16
2292425485.12
2293442612.7
2294397385.12
2295459371.6
2296257165.53
2297369536.28
2298365585.38
2299358670.66
2300323914.6
2301339191.22
2302257773.14
2303243079.5
2304254137.92
2305190643.31
2306185085.48
2307197270.95
2308220136.48
2309290753.28
2310216837.02
2311204211.98
2312179657.16
2313168135.94
2314169910.23
2315180834.47
2316203061.39
2317191817.55
2318177367.33
2319182393.89
2320181573.75
2321252619.55
2322192489.97
2323198495.17
2324187776.56
2325211307.66
2326182654.11
2327196466.34
2328220435.55
2329193979.7
2330183299.69
2331342326.38
2332391052.6
2333324490.5
2334265112.8
2335292338.7
2336311718.16
2337188344.56
2338251137.69
2339224372.33
2340391687.53
2341214534.86
2342234436.83
2343228446.84
2344220505.11
2345230552.53
2346215707.28
2347197427.78
2348253224.02
2349195799.9
2350327060.75
2351259761.55
2352252175.88
2353254619.89
2354149722.03
2355149936.28
2356148843.1
2357188306.86
2358198336.6
2359142202.69
2360122068.38
2361150708.72
2362247082.4
2363150204.95
2364163834.03
2365219954.25
2366192207.81
2367214773.42
2368218065.34
2369211610.56
2370172103.34
2371166105.45
2372182341.78
2373297192.2
2374324295.53
2375253133.95
2376289657.2
2377333894.38
2378152381.05
2379204237.39
2380147176.34
2381172467.61
2382220246.44
2383197136.61
2384240572.64
2385164499.44
2386136833.17
2387134753.81
2388110968.04
2389117281.49
2390151502.4
2391140846.95
2392116947.23
2393169917.58
2394153555.61
2395205573.97
2396150360.98
2397206533.27
2398126279.98
239972818.24
240071322.14
2401129686.34
2402137327.97
2403146756.39
2404154330.98
2405151309.4
2406151854.06
2407131574.25
2408144814.9
2409115721.08
2410171317.77
2411121974.8
2412160726.56
2413132760.6
2414162108.78
2415130328.52
2416135343.7
2417143860.56
2418141242.03
2419127419.64
2420129849.37
2421163377.73
2422111643.7
2423122383.91
2424144792.58
2425225499.86
2426138591.94
2427136678.97
2428183659.38
2429113430.414
2430137063.95
2431113561.51
2432153972.45
2433151572.14
2434145035.22
2435161557.72
2436116302.664
2437102715.89
2438126103.8
243996591.82
2440125398.04
244199778.336
244297932.27
2443135904.86
2444121821.74
244585885.48
2446145485.69
2447211815.22
2448129222.945
2449114076.12
2450156374.14
2451114435.43
2452222595.45
245396624.28
2454122842.63
2455121348.945
2456134501.95
2457141751.47
2458146727.33
2459116910.02
2460143859.1
2461132618.11
2462129880.16
2463134224.11
2464186559.56
2465138502.6
2466118483.8
2467126863.45
246893665.586
246988803.71
2470198236.06
2471224244.11
2472161246.06
2473114601.3
247466104.6
2475204116.98
2476125858.11
2477127140.09
2478149940.97
2479106027.08
2480160687.84
2481132111.19
2482123184.72
2483112890.47
2484122489.97
2485128951.23
2486148654.94
2487180646.75
2488174067.31
2489160982.58
2490143080.02
249194024.46
2492196978
2493154928.77
2494150010.67
249589149.05
2496238773.03
2497157265.81
2498118202.01
249988147.72
2500123940.305
2501145016.12
2502143711.98
250394850.53
2504195490.58
2505223512.8
2506261058.81
2507286793.8
2508258462.72
2509221906.27
2510213150.73
2511176646.89
2512209559.72
2513221530.02
2514255595.39
2515153251.98
2516178609
2517149428.92
2518159331.88
2519247287.48
2520217945.05
2521188419.1
2522220408.23
2523117609.234
2524140594.92
2525147730.52
2526146340.33
2527130671.81
2528132177.69
2529147370.94
2530126139.04
2531257784.05
2532218978.11
2533195885.39
2534237339.34
2535272649.9
2536225914.67
2537251659.62
2538186148.67
2539189729.27
2540179237.67
2541180269.45
2542165571.72
2543117570.57
2544114610.54
2545131735.94
2546119243.71
2547142803.61
2548163151.98
2549161609.86
2550601652.06
2551135516.58
2552136533.53
255372865.49
255494654.01
2555112978.17
2556102819.47
2557102172.75
2558142799.8
2559147077.06
2560162926.69
2561165914.19
2562150986.39
2563160101.84
2564215008.94
2565155047.75
2566153758.44
2567143647.11
2568226278.08
2569233726.73
2570122256.99
2571186426.62
2572158949.83
2573212469.56
2574291310.88
2575135158.92
2576119016.02
2577154596.25
257881782.1
257951383.03
2580110637.27
2581124629.945
2582130377.04
2583289046.7
2584170571.88
2585188656.94
2586214893.97
2587201812.16
2588141840.11
2589150328.19
2590207862.7
2591233368.78
2592208596.53
2593264846.12
2594179507.44
2595215715.67
2596302486.12
2597191534.98
2598291180.06
2599349501.8
2600180779.86
2601139190.66
260283705.055
260398934.6
260493266.305
260577140.37
2606138894.8
2607233493.45
2608193336.62
2609164557.31
2610115403.32
2611126382.81
2612159627.47
2613135525.84
2614127000.37
2615159132.1
2616150348
2617184961.7
2618213836.86
2619196961.1
2620189848.1
2621176502.16
2622179428.42
2623229164.31
2624316093.84
2625265468.3
2626176227.98
2627171410.34
2628470755.38
2629495089.75
2630393298.66
2631451260.4
2632420568.22
2633305837.16
2634416986.53
2635155537.7
2636175702.02
2637185370.64
2638257548.7
2639191661.78
2640156254.28
2641108398.67
2642175592.44
2643111263.67
2644114680.36
2645116768.43
2646102252.92
2647108196.27
2648143867.12
2649155532.64
2650137735.6
2651145672.55
2652389321.84
2653257187.16
2654267012.22
2655411279.2
2656341335.7
2657346895.2
2658316990.94
2659332261.3
2660369979.1
2661354312.94
2662375499.7
2663289439.3
2664283664.94
2665348070.72
2666287234.9
2667175112.31
2668181750.22
2669181159.69
2670282026.97
2671189440.86
2672198141.03
2673198487.3
2674196260.47
2675185919.77
2676192388.66
2677201286.78
2678258150.94
2679290246.25
2680294213.9
2681405299.75
2682321328.78
2683508680.8
2684330902.4
2685331281.06
2686242294
2687310977
2688207423.61
2689212617.83
2690380654.47
2691192578.56
2692151010.28
2693200119.56
2694142326.97
2695194065.02
2696184748.23
2697184349.38
2698194030.22
2699174918.75
2700157508.53
2701154297.9
2702105564.125
2703126416.11
2704141684.62
2705120513.445
2706106173.85
2707122666.94
2708144020.61
2709116650.516
2710134149.4
2711300130.44
2712403024.47
2713167749.86
2714155467.1
2715172156.36
2716151135.48
2717188180.97
2718210707.75
2719154782.28
2720170810.66
2721136786.94
2722155734.19
2723137525.22
2724133630.1
2725137949.47
2726147697.5
2727169739.1
2728181759.98
2729151593.92
2730149981.62
2731142147.88
2732138951.66
2733157194.3
2734137362.67
2735143752.4
2736142289.44
2737128671.41
2738139429.33
2739161054
2740141852.16
2741149200.92
2742153761.55
2743153570.75
2744164076.56
2745144147.95
2746148568.83
2747154024.52
2748126629.03
2749131272
2750132642.34
2751139764.47
2752209060.42
2753144967.8
2754230085.5
2755144699.88
275698313.75
275760826.055
275896869.82
2759172173.58
2760122414.04
2761142972.36
2762145225.86
2763183214.47
2764145924.17
2765289901.25
2766127482.88
276790084.16
2768131764.67
2769143638.39
2770143414.11
2771108466.26
2772111305.04
2773186153.73
2774142050.33
2775116780.68
2776145638.97
2777144850.75
2778114150.586
2779122828.06
278094048.34
278198965.93
278297332.95
278399300.56
2784124576.95
2785138322.17
278667104.89
2787127814.27
278876001.29
2789170853.77
2790100689.85
2791106826.56
279259853.004
2793147052.72
279496232.664
2795122626.836
2796107979.84
2797191597.5
2798104082.88
2799109486.15
280070032.94
2801112445.57
2802126304.87
2803159996.97
2804134879.28
2805112202.266
280687541.875
2807166501.42
2808149769.77
2809134519.72
2810134378.12
2811167283.14
2812169449.86
2813157022.11
2814157688.19
2815101212.88
2816229828.2
2817161482.17
2818131381.7
2819182258.72
2820142963.02
2821109002.945
2822193043.33
2823320973.53
2824178535.62
2825145867.34
2826130547.46
2827136262.52
2828218970
2829218833.94
2830221957.03
2831185279.03
2832234602.19
2833321863.28
2834227306.14
2835225863.34
2836192867.17
2837160578.31
2838150346.61
2839168093.92
2840203082.12
2841203636.52
2842231202.58
2843156672.2
2844171455.25
2845134274.86
2846205142.78
2847212508
2848204855.81
2849196940.34
2850284461.16
2851204536.89
2852232994.8
2853231694.73
2854142950.17
2855203375.53
2856198280.73
2857189020.25
2858206838.31
2859101252.484
2860132006.53
2861129369.055
2862184794.3
2863140459.23
2864247967.11
2865139382.81
2866139204.3
2867102740.695
286894933.8
286991648.83
2870119731.695
287174300.68
287245479.25
287397251.53
2874145662.03
2875113528.95
2876163768.38
2877158042.22
2878156925.62
2879143938.95
2880110444.18
2881152585.5
2882184830.83
2883194566.44
2884210717.2
2885189722.42
2886239812.06
2887106130.22
2888149209.53
288953087.24
289088317.62
2891141692.03
289239564.46
289381548.22
289454232.34
2895336280.34
2896293739.4
2897224354.12
2898146859.1
2899211767.02
2900163121.3
2901219632.66
2902182716.48
2903323931.12
2904348629
290577044.79
2906197792.22
2907114987.61
2908134429.4
2909146392.9
291080375.74
291184686.83
2912153530.78
291376145.88
291475837.03
291586668.09
291681517.31
2917167503.38
2918115305.73
2919232117.42

原文地址:https://blog.csdn.net/zxqq_/article/details/144447004

免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!