InfluxDB入门指南五-InfluxDB基本操作方法封装

一、前言

上面几篇文章中,介绍了Influx在Linux和Windows上的使用之后,本节开始介绍Influx在Java中的使用,先提供一个InfluxDB Java API 封装的工具类,方便大家直接上手使用。

二、InfluxDB工具类

2.1 导入依赖包

使用maven工具导入如下依赖的jar包:

<dependency>
        <groupId>org.influxdb</groupId>
        <artifactId>influxdb-java</artifactId>
        <version>2.10</version>
</dependency>

2.2 编写工具类代码

package com.common.utils.influxdb;

import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

import org.influxdb.InfluxDB;
import org.influxdb.InfluxDB.ConsistencyLevel;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.Point.Builder;
import org.influxdb.dto.Pong;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;

@Service("tsdbService")
public class TsdbServiceImpl implements TsdbService{

    private static final Logger logger = LoggerFactory.getLogger(TsdbServiceImpl.class);

    private static final InfluxDB.ConsistencyLevel CONSISTENCY_LEVEL = InfluxDB.ConsistencyLevel.ANY;

    private static final TimeUnit PRECESION = TimeUnit.SECONDS;

    @Value("${tsdb.server.hosts}")
    private String hosts;

    @Value("${tsdb.server.port}")
    private String port;

    /**
     * 用户名
     */
    @Value("${tsdb.server.username}")
    private String username;
    /**
     * 密码
     */
    @Value("${tsdb.server.password}")
    private String password;
    /**
     * 数据库
     */
    @Value("${tsdb.server.database}")
    private String database;
    /**
     * 保留策略
     */
    @Value("${tsdb.server.retentionpolicy}")
    private String retentionPolicy;

    private InfluxDB influxDB;

    @PostConstruct
    public void init() {
            List<String> serverAddressList = new ArrayList<>();
        for (String host : hosts.split(",")) {
            serverAddressList.add(String.format("%s:%s", host, port));
                }
            influxDB = InfluxDBFactory.connect(serverAddressList, username, password);  

        try {
            // 如果指定的数据库不存在,则新建一个新的数据库,并新建一个默认的数据保留规则
            if (!this.databaseExist(database)) {
                createDatabase(database);
                createRetentionPolicy();
            }
        } catch (Exception e) {
            // 该数据库可能设置动态代理,不支持创建数据库
            logger.error("[TsdbService] occur error when init tsdb, err msg: {}", e);
        } finally {
            influxDB.setRetentionPolicy(retentionPolicy);
        }

        influxDB.setLogLevel(InfluxDB.LogLevel.NONE);

        // Flush every 1000 Points, at least every 100ms
        // bufferLimit represents the maximum number of points can stored in the retry buffer
        // exceptionHandler represents a consumer function to handle asynchronous errors
        // threadFactory represents the ThreadFactory instance to be used
        influxDB.enableBatch(BatchOptions.DEFAULTS
                .actions(1000)
                .flushDuration(100)
                .bufferLimit(10)
                .exceptionHandler((points, e) -> {
                    List<Point> target = new ArrayList<>();
                    points.forEach(target::add);
                    String msg = String.format("failed to write points:%s\n", target.toString().substring(0, 10000));
                    logger.error(msg, e);
                })
                .threadFactory(
                        Executors.defaultThreadFactory()
                ));
    }

    /**
     * 测试连接是否正常
     * 
     * @return true 正常
     */
    public boolean ping() {
        boolean isConnected = false;
        Pong pong;
        try {
            pong = influxDB.ping();
            if (pong != null) {
                isConnected = true;
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return isConnected;
    }

    @Override
    public void createDatabase(String database) {
        influxDB.query(new Query("CREATE DATABASE " + database, ""));
    }

    @Override
    public void dropDatabase(String database) {
        influxDB.query(new Query("DROP DATABASE " + database, ""));
    }

    @Override
    public boolean databaseExist(String database) {
        return influxDB.databaseExists(database);
    }

    @Override
    public void createRetentionPolicy() {
        String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                "default_policy", database, "90d", 3);
        this.query(command);
    }

    @Override
    public void createRetentionPolicy(String database, String policyName, String duration, int replication, Boolean isDefault) {
        String sql = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s ", policyName,
                database, duration, replication);
        if (isDefault) {
            sql = sql.concat(" DEFAULT");
        }
        this.query(sql);
    }

    @Override
    public void dropRetentionPolicy() {
        this.dropRetentionPolicy(database, retentionPolicy);
    }

    @Override
    public void dropRetentionPolicy(String database, String retentionPolicy) {
        String sql = String.format("DROP RETENTION POLICY %s ON %s", retentionPolicy, database);
        this.query(sql);
    }

    @Override
    public void createContinuousQuery(String measurement) {
        String cqName = String.format("cq_%s", measurement);
        String originMeasurement = String.format("%s.%s.%s", database, retentionPolicy, measurement);
        String cqMeasurement = String.format("%s.%s.%s_hour", database, extendPolicy, measurement);
        String sql = String.format("CREATE CONTINUOUS QUERY \"%s\" ON %s RESAMPLE EVERY 1h FOR 2h BEGIN SELECT MEAN(*) INTO %s FROM %s GROUP BY time(1h),* FILL(none) END",
                cqName, database, cqMeasurement, originMeasurement);

        this.query(sql);
    }

    @Override
    public boolean continuousQueryExists(String measurement) {
        String cqName = String.format("cq_%s", measurement);
        return continuousQueryExists(database, cqName);
    }

    @Override
    public boolean continuousQueryExists(String database, String cqName) {
        String sql = "SHOW CONTINUOUS QUERIES";
        QueryResult result = query(sql);
        List<QueryResult.Series> seriesList = result.getResults().get(0).getSeries();
        if (seriesList != null) {
            for (QueryResult.Series series : seriesList) {
                if (database.equals(series.getName())) {
                    List<List<Object>> continuousQueryList = series.getValues();
                    if (continuousQueryList == null) {
                        return false;
                    } else {
                        for (List<Object> queryResult : continuousQueryList) {
                            if (cqName.equals(queryResult.get(0))) {
                                return true;
                            }
                        }
                    }

                }
            }
        }
        return false;
    }

    @Override
    public void dropContinuousQuery(String databaseName, String cqName) {
        String sql = String.format("DROP CONTINUOUS QUERY %s ON %s", cqName, databaseName);
        QueryResult result = query(sql);
    }

    @Override
    public boolean measurementsExists(String measurement) {
        return measurementsExists(database, measurement);
    }

    @Override
    public boolean measurementsExists(String database, String measurement) {
        String sql = String.format("SHOW MEASUREMENTS ON %s", database);
        QueryResult result = query(sql);
        if (result != null) {
            List<QueryResult.Series> seriesList = result.getResults().get(0).getSeries();
            if (seriesList != null) {
                QueryResult.Series series = seriesList.get(0);
                List<List<Object>> valueList = series.getValues();
                for (List<Object> value : valueList) {
                    if (measurement.equals(value.get(0))) {
                        return true;
                    }
                }
            }
        }
        return false;
    }

@Override
    public QueryResult query(String command) {
        return influxDB.query(new Query(command, database));
    }

    @Override
    public QueryResult dataQuery(String command) {
        return influxDB.query(new Query(command, database), TimeUnit.MILLISECONDS);
    }

    @Override
    public void insert(Point point1) {
        influxDB.write(point1);
    }

    @Override
    public void insert(String measurement, TimeUnit timeUnit, UniteMetricData data) {
        timeUnit = timeUnit == null ? TimeUnit.MILLISECONDS : timeUnit;
        Point point = pointBuilder(measurement, data.getTags(), data.getFields(), data.getTimestamp(), timeUnit);
        influxDB.write(database, retentionPolicy, point);
    }

    @Override
    public void batchInsert(BatchPoints batchPoints) {
        influxDB.write(batchPoints);
    }

    @Override
    public Point pointBuilder(String measurement,
                              Map<String, String> tags,
                              Map<String, Object> fields,
                              long time,
                              TimeUnit timeunit) {
        Point point = Point.measurement(measurement).time(time, timeunit).tag(tags).fields(fields).build();
        return point;
    }

    @Override
    public BatchPoints buildBatchPoints() {
        return this.batchPointsBuilder(database, CONSISTENCY_LEVEL, PRECESION);
    }

    @Override
    public BatchPoints batchPointsBuilder(String database, InfluxDB.ConsistencyLevel level, TimeUnit precision) {
        return batchPointsBuilder(database, level, precision, null);
    }

    @Override
    public BatchPoints batchPointsBuilder(String database, InfluxDB.ConsistencyLevel level, TimeUnit precision, String retentionPolicy) {
        return BatchPoints.database(database).consistency(level).precision(precision).retentionPolicy(retentionPolicy).build();
    }

三、使用示例

3.0 连通性测试

    /**
     * TSDB访问连通性检查,置于其他所有测试之前
     */
    @Before
    public void connectionTest() {
        boolean connected = tsdbService.ping();
        assertTrue(connected);
    }

3.1 查询数据

InfluxDB支持一次查询多个SQL,SQL之间用逗号隔开即可。下面只演示下,只有一条SQL的情况下,怎么解析查询返回的结果集。

       @Resource
    private TsdbService tsdbService;

    // 自测环境TSDB地址
    private static final String MASTER_URL = "10.185.3.150:8091";

    private static final String USERNAME = "root";

    private static final String PASSWORD = "root";

    private static final String DATABASE_NAME = "ncm_test_temp";

    private static final String RP_NAME = "default_policy";

    //测试数据表名,通常以namespace来命名
    private static final String MEASUREMENT_NAME = "NVS";

    private static final Integer QUERY_LIMIT = 10;

/**
     *  批量查询操作单元测试
     */
    @Test
    public void batchQueryTest() {
        long beginTime = 1559613845000L;
        long endTime = 1559621045000L;

        Map<String, String> dimensionMap = new HashMap<>();
        dimensionMap.put("tag1", "tag1");

        // 带时间范围的查询
        String condition = TSDBUtil.getQueryCondition(dimensionMap,beginTime,endTime);

        Object[] args = new Object[]{MEASUREMENT_NAME, condition, QUERY_LIMIT};
        String command = String.format("SELECT * FROM %s WHERE %s ORDER BY time ASC LIMIT %d", args);

        // 执行查询
        QueryResult results = tsdbService.dataQuery(command);
        if (results.getResults() == null) {
            System.out.println("Data is empty");
            return;
        }
        //results.getResults()是同时查询多条SQL语句的返回值
        for (QueryResult.Result result : results.getResults()) {
            List<QueryResult.Series> series = result.getSeries();
            for (QueryResult.Series serie : series) {
                List<List<Object>> values = serie.getValues();//字段字集合
                List<String> colums = serie.getColumns();//字段名

                // 打印查询结果
                System.out.println("colums:" + colums);
                for (List<Object> value : values) {
                    System.out.println("value:" + value);
                }

                // 封装查询结果
                List<Map<String, Object>> dataList = new LinkedList<>();
                for (int i=0;i<values.size();++i){
                    Map<String, Object> dataMap=new HashMap<>(colums.size());
                    for (int j=0;j<colums.size();++j){
                        dataMap.put(colums.get(j),values.get(i).get(j));
                    }
                    dataList.add(dataMap);
                }

                // dataList即可作为返回给用户的查询数据的基础格式
                System.out.println(dataList);

            }
        }

        /**
         * 标准化之前:
         *
         * colums:[time, field1, field2, tag1, tag2]
         * value:[1.550599292E12, efs, 444444.0, tag1, tag2]
         * value:[1.550595692E12, bcd, 333333.0, tag1, tag2]
         * value:[1.550592092E12, abc, 123456.0, tag1, tag2]
         *
         * 标准化之后:
         * [
         *     {
         *         tag1=tag1,
         *         field1=efs,
         *         time=1.550599292E12,
         *         field2=444444.0,
         *         tag2=tag2
         *     },
         *     {
         *         tag1=tag1,
         *         field1=bcd,
         *         time=1.550595692E12,
         *         field2=333333.0,
         *         tag2=tag2
         *     },
         *     {
         *         tag1=tag1,
         *         field1=abc,
         *         time=1.550592092E12,
         *         field2=123456.0,
         *         tag2=tag2
         *     }
         * ]
         */
    }

取数据的时候,注意空值判断,本例将返回数据先进行判空oneResult.getSeries() != null,然后调用oneResult.getSeries().getValues().get(0)获取到第一条SQL的返回结果集,然后遍历valueList,取出每条记录中的目标字段值。

InfluxDB封装的结果集有点深,主要是由于支持多条SQL一次性查询,可以提高查询速度,这个地方有别于关系型数据库的使用。

3.2 插入单条数据

InfluxDB的字段类型,由第一条插入的值得类型决定;tags的类型只能是String型,可以作为索引,提高检索速度。

public static void main(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "1.1.1.1", "db-test", "hour");
        Map<String, String> tags = new HashMap<String, String>();
        tags.put("tag1", "标签值");
        Map<String, Object> fields = new HashMap<String, Object>();
        fields.put("field1", "String类型");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields.put("field2", 3.141592657);
        // 时间使用毫秒为单位
        influxDBConnection.insert("表名", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
    }

另外一个测试用例:

public class UniteMetricData implements Serializable {

    private static final long serialVersionUID = 8968059029015805484L;

    private Map<String, String> tags;

    private Map<String, Object> fields;

    private long timestamp;

    public UniteMetricData(Map<String, String> tags, Map<String, Object> fields, long timestamp) {
        this.tags = tags;
        this.fields = fields;
        this.timestamp = timestamp;
    }

    public Map<String, String> getTags() {
        return tags;
    }

    public void setTags(Map<String, String> tags) {
        this.tags = tags;
    }

    public Map<String, Object> getFields() {
        return fields;
    }

    public void setFields(Map<String, Object> fields) {
        this.fields = fields;
    }

    public long getTimestamp() {
        return timestamp;
    }

    public void setTimestamp(long timestamp) {
        this.timestamp = timestamp;
    }
}

@Test
    public void writeTest() throws InterruptedException {
        Map<String, String> tags  = new HashMap<>();
        tags.put("host", "ncm-test-01");
        tags.put("projectId", "c57212bdec1345cd95107ef3109777");

        Map<String, Object> fields = new HashMap<>();
        fields.put("cpuUsage", 2.17);

        UniteMetricData data = new UniteMetricData(tags, fields, System.currentTimeMillis());

        tsdbService.insert(MEASUREMENT_NAME, TimeUnit.MILLISECONDS, data);

        Thread.sleep(1000);
        QueryResult queryResult = tsdbService.query(String.format("SELECT cpuUsage FROM %s", MEASUREMENT_NAME));

        for (QueryResult.Result result : queryResult.getResults()) {
            for (QueryResult.Series series : result.getSeries()) {
                System.out.println(series.toString());
            }
        }
    }

3.3 批量写入数据的几种方式

注:使用这两种种方式,要就这两条数据都写入到同一数据库下且tag相同,若tag不相同,需将它们放到不同的BatchPoint对象中,否则会出现数据写入错乱问题。

3.3.1 方式一:通过BatchPoints组装数据后,循环插入数据库

public static void main(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "1.1.1.1", "db-test", "hour");
        Map<String, String> tags = new HashMap<String, String>();
        tags.put("tag1", "标签值");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("field1", "abc");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields1.put("field2", 123456);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), tags, fields1);
        Point point2 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), tags, fields2);
        // 将两条记录添加到batchPoints中
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1").retentionPolicy("hour")
                .consistency(ConsistencyLevel.ALL).build();
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2").retentionPolicy("hour")
                .consistency(ConsistencyLevel.ALL).build();
        batchPoints1.point(point1);
        batchPoints2.point(point2);
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert(batchPoints1);
        influxDBConnection.batchInsert(batchPoints2);
    }

3.3.2 方式二:通过BatchPoints组装数据,序列化后,一次性插入数据库

public static void main(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "1.1.1.1", "db-test", "hour");
        Map<String, String> tags1 = new HashMap<String, String>();
        tags1.put("tag1", "标签值");
        Map<String, String> tags2 = new HashMap<String, String>();
        tags2.put("tag2", "标签值");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("field1", "abc");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields1.put("field2", 123456);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), tags1, fields1);
        Point point2 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), tags2, fields2);
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1")
                .retentionPolicy("hour").consistency(ConsistencyLevel.ALL).build();
        // 将两条记录添加到batchPoints中
        batchPoints1.point(point1);
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2")
                .retentionPolicy("hour").consistency(ConsistencyLevel.ALL).build();
        // 将两条记录添加到batchPoints中
        batchPoints2.point(point2);
        // 将不同的batchPoints序列化后,一次性写入数据库,提高写入速度
        List<String> records = new ArrayList<String>();
        records.add(batchPoints1.lineProtocol());
        records.add(batchPoints2.lineProtocol());
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert("db-test", "hour", ConsistencyLevel.ALL, records);
    }

方式三:直接调用write方法写入

@Test
    public void batchWriteTest() {
        Map<String, String> tags = new HashMap<>();
        tags.put("tag1", "tag1");
        tags.put("tag2", "tag2");

        Map<String, Object> fields1 = new HashMap<>();
        fields1.put("field1", "abc");
        fields1.put("field2", 123456);

        Map<String, Object> fields2 = new HashMap<>();
        fields2.put("field1", "bcd");
        fields2.put("field2", 333333);

        Map<String, Object> fields3 = new HashMap<>();
        fields3.put("field1", "efs");
        fields3.put("field2", 444444);

        // 伪造1W条待插入的测试数据
        List<UniteMetricData> recordList=new ArrayList<>(10000);
        for (int i = 0; i < 10000; ++i) {
            UniteMetricData data = new UniteMetricData(tags, fields1, 1559617445000L + random.nextLong() % 1000);
            recordList.add(data);
        }

        long start = System.currentTimeMillis();
        for (UniteMetricData data : recordList) {
            tsdbService.insert(MEASUREMENT_NAME, TimeUnit.MILLISECONDS, data);
        }
        long end = System.currentTimeMillis();
        System.out.println(String.format("Time used %d ms", end - start));
    }

由于在连接创建时开启了本地缓存区,influxdb会执行异步写入,经过测试,批量写入性能不低于上面两种方式,且无需手动构建BathPonint结构,更加简单易用,生产环境中,我们就是使用的这种方式,比较推荐。

3.4 总结

本小节主要介绍了如何使用Java API封装InfluxDB基本的读写操作,希望上述service类能够在大家首次接入InfluxDB时提供一些便利。

四、参考资料

  1. Java版InfluxDB工具类
最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 203,271评论 5 476
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 85,275评论 2 380
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 150,151评论 0 336
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 54,550评论 1 273
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 63,553评论 5 365
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 48,559评论 1 281
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 37,924评论 3 395
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 36,580评论 0 257
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 40,826评论 1 297
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 35,578评论 2 320
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 37,661评论 1 329
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 33,363评论 4 318
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 38,940评论 3 307
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 29,926评论 0 19
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 31,156评论 1 259
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 42,872评论 2 349
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 42,391评论 2 342

推荐阅读更多精彩内容